diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 14a0d5ea90..e4964e8909 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -23,7 +23,7 @@ "dbaeumer.vscode-eslint", "eamodio.gitlens", "editorconfig.editorconfig", - "vue.volar", + "vue.volar@2.2.10", "mrmlnc.vscode-duplicate", "ms-azuretools.vscode-docker", "ms-playwright.playwright", @@ -32,24 +32,16 @@ "mutantdino.resourcemonitor", "oderwat.indent-rainbow", "redhat.vscode-yaml", - "spmeesseman.vscode-taskexplorer", - "visualstudioexptteam.vscodeintellicode" + "ms-python.pylint", + "charliermarsh.ruff" ], "settings": { "terminal.integrated.defaultProfile.linux": "zsh", "python.pythonPath": "/usr/local/bin/python", "python.languageServer": "Default", - "python.linting.enabled": true, - "python.linting.pylintEnabled": true, "python.formatting.autopep8Path": "/usr/local/py-utils/bin/autopep8", "python.formatting.blackPath": "/usr/local/py-utils/bin/black", "python.formatting.yapfPath": "/usr/local/py-utils/bin/yapf", - "python.linting.banditPath": "/usr/local/py-utils/bin/bandit", - "python.linting.flake8Path": "/usr/local/py-utils/bin/flake8", - "python.linting.mypyPath": "/usr/local/py-utils/bin/mypy", - "python.linting.pycodestylePath": "/usr/local/py-utils/bin/pycodestyle", - "python.linting.pydocstylePath": "/usr/local/py-utils/bin/pydocstyle", - "python.linting.pylintPath": "/usr/local/py-utils/bin/pylint", "python.testing.pytestArgs": [ "ietf" ], diff --git a/.devcontainer/docker-compose.extend.yml b/.devcontainer/docker-compose.extend.yml index fa9a412cf2..ce1ce259fd 100644 --- a/.devcontainer/docker-compose.extend.yml +++ b/.devcontainer/docker-compose.extend.yml @@ -1,5 +1,3 @@ -version: '3.8' - services: app: environment: @@ -14,6 +12,10 @@ services: # - datatracker-vscode-ext:/root/.vscode-server/extensions # Runs app on the same network as the database container, allows "forwardPorts" in devcontainer.json function. network_mode: service:db + blobstore: + ports: + - '9000:9000' + - '9001:9001' volumes: datatracker-vscode-ext: diff --git a/.editorconfig b/.editorconfig index d6eafe8d8f..7e5ce6236a 100644 --- a/.editorconfig +++ b/.editorconfig @@ -50,3 +50,9 @@ indent_size = 2 [ietf/**.html] insert_final_newline = false + +# Settings for Kubernetes yaml +# --------------------------------------------------------- +# Use 2-space indents +[k8s/**.yaml] +indent_size = 2 diff --git a/.gitattributes b/.gitattributes index 937c0eb379..62f4aae432 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,2 +1,280 @@ -/.yarn/releases/** binary -/.yarn/plugins/** binary +# Auto detect text files and perform LF normalization +* text=auto + +# --------------------------------------------------- +# Python Projects +# --------------------------------------------------- + +# Source files +*.pxd text diff=python +*.py text diff=python +*.py3 text diff=python +*.pyw text diff=python +*.pyx text diff=python +*.pyz text diff=python +*.pyi text diff=python + +# Binary files +*.db binary +*.p binary +*.pkl binary +*.pickle binary +*.pyc binary export-ignore +*.pyo binary export-ignore +*.pyd binary + +# Jupyter notebook +*.ipynb text eol=lf + +# --------------------------------------------------- +# Web Projects +# --------------------------------------------------- + +# Source code +*.bash text eol=lf +*.bat text eol=crlf +*.cmd text eol=crlf +*.coffee text +*.css text diff=css +*.htm text diff=html +*.html text diff=html +*.inc text +*.ini text +*.js text +*.mjs text +*.cjs text +*.json text +*.jsx text +*.less text +*.ls text +*.map text -diff +*.od text +*.onlydata text +*.php text diff=php +*.pl text +*.ps1 text eol=crlf +*.py text diff=python +*.rb text diff=ruby +*.sass text +*.scm text +*.scss text diff=css +*.sh text eol=lf +.husky/* text eol=lf +*.sql text +*.styl text +*.tag text +*.ts text +*.tsx text +*.xml text +*.xhtml text diff=html + +# Docker +Dockerfile text + +# Documentation +*.ipynb text eol=lf +*.markdown text diff=markdown +*.md text diff=markdown +*.mdwn text diff=markdown +*.mdown text diff=markdown +*.mkd text diff=markdown +*.mkdn text diff=markdown +*.mdtxt text +*.mdtext text +*.txt text +AUTHORS text +CHANGELOG text +CHANGES text +CONTRIBUTING text +COPYING text +copyright text +*COPYRIGHT* text +INSTALL text +license text +LICENSE text +NEWS text +readme text +*README* text +TODO text + +# Templates +*.dot text +*.ejs text +*.erb text +*.haml text +*.handlebars text +*.hbs text +*.hbt text +*.jade text +*.latte text +*.mustache text +*.njk text +*.phtml text +*.pug text +*.svelte text +*.tmpl text +*.tpl text +*.twig text +*.vue text + +# Configs +*.cnf text +*.conf text +*.config text +.editorconfig text +.env text +.gitattributes text +.gitconfig text +.htaccess text +*.lock text -diff +package.json text eol=lf +package-lock.json text eol=lf -diff +pnpm-lock.yaml text eol=lf -diff +.prettierrc text +yarn.lock text -diff +*.toml text +*.yaml text +*.yml text +browserslist text +Makefile text +makefile text +# Fixes syntax highlighting on GitHub to allow comments +tsconfig.json linguist-language=JSON-with-Comments + +# Heroku +Procfile text + +# Graphics +*.ai binary +*.bmp binary +*.eps binary +*.gif binary +*.gifv binary +*.ico binary +*.jng binary +*.jp2 binary +*.jpg binary +*.jpeg binary +*.jpx binary +*.jxr binary +*.pdf binary +*.png binary +*.psb binary +*.psd binary +*.svg text +*.svgz binary +*.tif binary +*.tiff binary +*.wbmp binary +*.webp binary + +# Audio +*.kar binary +*.m4a binary +*.mid binary +*.midi binary +*.mp3 binary +*.ogg binary +*.ra binary + +# Video +*.3gpp binary +*.3gp binary +*.as binary +*.asf binary +*.asx binary +*.avi binary +*.fla binary +*.flv binary +*.m4v binary +*.mng binary +*.mov binary +*.mp4 binary +*.mpeg binary +*.mpg binary +*.ogv binary +*.swc binary +*.swf binary +*.webm binary + +# Archives +*.7z binary +*.gz binary +*.jar binary +*.rar binary +*.tar binary +*.zip binary + +# Fonts +*.ttf binary +*.eot binary +*.otf binary +*.woff binary +*.woff2 binary + +# Executables +*.exe binary +*.pyc binary +# Prevents massive diffs caused by vendored, minified files +**/.yarn/releases/** binary +**/.yarn/plugins/** binary + +# RC files (like .babelrc or .eslintrc) +*.*rc text + +# Ignore files (like .npmignore or .gitignore) +*.*ignore text + +# Prevents massive diffs from built files +dist/* binary + +# --------------------------------------------------- +# Common +# --------------------------------------------------- + +# Documents +*.bibtex text diff=bibtex +*.doc diff=astextplain +*.DOC diff=astextplain +*.docx diff=astextplain +*.DOCX diff=astextplain +*.dot diff=astextplain +*.DOT diff=astextplain +*.pdf diff=astextplain +*.PDF diff=astextplain +*.rtf diff=astextplain +*.RTF diff=astextplain +*.md text diff=markdown +*.mdx text diff=markdown +*.tex text diff=tex +*.adoc text +*.textile text +*.mustache text +*.csv text eol=crlf +*.tab text +*.tsv text +*.txt text +*.sql text +*.epub diff=astextplain + +# Text files where line endings should be preserved +*.patch -text + +# --------------------------------------------------- +# Vzic specific +# --------------------------------------------------- + +*.pl text diff=perl +*.pm text diff=perl + +# C/C++ +*.c text diff=cpp +*.cc text diff=cpp +*.cxx text diff=cpp +*.cpp text diff=cpp +*.cpi text diff=cpp +*.c++ text diff=cpp +*.hpp text diff=cpp +*.h text diff=cpp +*.h++ text diff=cpp +*.hh text diff=cpp \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 5e27af9fed..320614b17e 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -1,8 +1,8 @@ -blank_issues_enabled: false -contact_links: - - name: Help and questions - url: https://github.com/ietf-tools/datatracker/discussions/categories/help-questions - about: Need help? Have a question on setting up the project or its usage? - - name: Discuss new ideas - url: https://github.com/ietf-tools/datatracker/discussions/categories/ideas - about: Submit ideas for new features or improvements to be discussed. +blank_issues_enabled: false +contact_links: + - name: Help and questions + url: https://github.com/ietf-tools/datatracker/discussions/categories/help-questions + about: Need help? Have a question on setting up the project or its usage? + - name: Discuss new ideas + url: https://github.com/ietf-tools/datatracker/discussions/categories/ideas + about: Submit ideas for new features or improvements to be discussed. diff --git a/.github/ISSUE_TEMPLATE/new-feature.yml b/.github/ISSUE_TEMPLATE/new-feature.yml index ddf0b575e8..285081e1c8 100644 --- a/.github/ISSUE_TEMPLATE/new-feature.yml +++ b/.github/ISSUE_TEMPLATE/new-feature.yml @@ -1,6 +1,7 @@ name: Suggest new feature or enhancement description: Propose a new idea to be implemented. labels: ["enhancement"] +type: Feature body: - type: markdown attributes: diff --git a/.github/ISSUE_TEMPLATE/report-a-bug.yml b/.github/ISSUE_TEMPLATE/report-a-bug.yml index d5b725d721..47fa1185b4 100644 --- a/.github/ISSUE_TEMPLATE/report-a-bug.yml +++ b/.github/ISSUE_TEMPLATE/report-a-bug.yml @@ -1,6 +1,7 @@ name: Report a Datatracker bug description: Something in the datatracker's behavior isn't right? File a bug report. Don't use this to report RFC errata or issues with the content of Internet-Drafts. labels: ["bug"] +type: Bug body: - type: markdown attributes: diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 4687d04aa9..17d89f1aab 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -17,3 +17,45 @@ updates: directory: "/" schedule: interval: "weekly" + reviewers: + - "rjsparks" + - package-ecosystem: "npm" + directory: "/" + schedule: + interval: "weekly" + groups: + yarn: + patterns: + - "*" + - package-ecosystem: "npm" + directory: "/playwright" + schedule: + interval: "weekly" + groups: + npm: + patterns: + - "*" + - package-ecosystem: "npm" + directory: "/dev/coverage-action" + schedule: + interval: "weekly" + groups: + npm: + patterns: + - "*" + - package-ecosystem: "npm" + directory: "/dev/deploy-to-container" + schedule: + interval: "weekly" + groups: + npm: + patterns: + - "*" + - package-ecosystem: "npm" + directory: "/dev/diff" + schedule: + interval: "weekly" + groups: + npm: + patterns: + - "*" diff --git a/.github/workflows/build-base-app.yml b/.github/workflows/build-base-app.yml index 85842d9dcf..35172aa299 100644 --- a/.github/workflows/build-base-app.yml +++ b/.github/workflows/build-base-app.yml @@ -1,43 +1,67 @@ -name: Build Base App Docker Image - -on: - push: - branches: - - 'main' - paths: - - 'docker/base.Dockerfile' - - 'requirements.txt' - - workflow_dispatch: - -jobs: - publish: - runs-on: ubuntu-latest - permissions: - contents: read - packages: write - - steps: - - uses: actions/checkout@v4 - - - name: Set up QEMU - uses: docker/setup-qemu-action@v3 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - - - name: Login to GitHub Container Registry - uses: docker/login-action@v3 - with: - registry: ghcr.io - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - - name: Docker Build & Push - uses: docker/build-push-action@v5 - with: - context: . - file: docker/base.Dockerfile - platforms: linux/amd64,linux/arm64 - push: true - tags: ghcr.io/ietf-tools/datatracker-app-base:latest +name: Build Base App Docker Image + +on: + push: + branches: + - 'main' + paths: + - 'docker/base.Dockerfile' + - 'requirements.txt' + + workflow_dispatch: + +jobs: + publish: + runs-on: ubuntu-latest + permissions: + contents: write + packages: write + + steps: + - uses: actions/checkout@v6 + with: + token: ${{ secrets.GH_COMMON_TOKEN }} + + - name: Set Version + run: | + printf -v CURDATE '%(%Y%m%dT%H%M)T' -1 + echo "IMGVERSION=$CURDATE" >> $GITHUB_ENV + + - name: Set up QEMU + uses: docker/setup-qemu-action@v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v4 + + - name: Login to GitHub Container Registry + uses: docker/login-action@v4 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Docker Build & Push + uses: docker/build-push-action@v7 + env: + DOCKER_BUILD_SUMMARY: false + with: + context: . + file: docker/base.Dockerfile + platforms: linux/amd64,linux/arm64 + push: true + tags: | + ghcr.io/ietf-tools/datatracker-app-base:${{ env.IMGVERSION }} + ghcr.io/ietf-tools/datatracker-app-base:py312 + ${{ github.ref == 'refs/heads/main' && 'ghcr.io/ietf-tools/datatracker-app-base:latest' || '' }} + + - name: Update version references + run: | + sed -i "1s/.*/FROM ghcr.io\/ietf-tools\/datatracker-app-base:${{ env.IMGVERSION }}/" dev/build/Dockerfile + echo "${{ env.IMGVERSION }}" > dev/build/TARGET_BASE + + - name: Commit CHANGELOG.md + uses: stefanzweifel/git-auto-commit-action@v7 + with: + branch: ${{ github.ref_name }} + commit_message: 'ci: update base image target version to ${{ env.IMGVERSION }}' + file_pattern: dev/build/Dockerfile dev/build/TARGET_BASE diff --git a/.github/workflows/build-celery-worker.yml b/.github/workflows/build-celery-worker.yml deleted file mode 100644 index 9c37d02ce8..0000000000 --- a/.github/workflows/build-celery-worker.yml +++ /dev/null @@ -1,45 +0,0 @@ -name: Build Celery Worker Docker Image - -on: - push: - branches: - - 'main' - paths: - - 'requirements.txt' - - 'dev/celery/**' - - '.github/workflows/build-celery-worker.yml' - - workflow_dispatch: - -jobs: - publish: - runs-on: ubuntu-latest - permissions: - contents: read - packages: write - - steps: - - uses: actions/checkout@v4 - - - name: Set up QEMU - uses: docker/setup-qemu-action@v3 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - - - name: Login to GitHub Container Registry - uses: docker/login-action@v3 - with: - registry: ghcr.io - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - - name: Docker Build & Push - uses: docker/build-push-action@v5 - with: - context: . - file: dev/celery/Dockerfile - platforms: linux/amd64,linux/arm64 - push: true - tags: ghcr.io/ietf-tools/datatracker-celery:latest - diff --git a/.github/workflows/build-devblobstore.yml b/.github/workflows/build-devblobstore.yml new file mode 100644 index 0000000000..14c4b1a135 --- /dev/null +++ b/.github/workflows/build-devblobstore.yml @@ -0,0 +1,47 @@ +name: Build Dev/Test Blobstore Docker Image + +on: + push: + branches: + - 'main' + paths: + - '.github/workflows/build-devblobstore.yml' + + workflow_dispatch: + +env: + MINIO_VERSION: latest + +jobs: + publish: + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + + steps: + - uses: actions/checkout@v6 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v4 + + - name: Login to GitHub Container Registry + uses: docker/login-action@v4 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Docker Build & Push + uses: docker/build-push-action@v7 + env: + DOCKER_BUILD_SUMMARY: false + with: + context: . + file: docker/devblobstore.Dockerfile + platforms: linux/amd64,linux/arm64 + push: true + build-args: MINIO_VERSION=${{ env.MINIO_VERSION }} + tags: | + ghcr.io/ietf-tools/datatracker-devblobstore:${{ env.MINIO_VERSION }} + ghcr.io/ietf-tools/datatracker-devblobstore:latest diff --git a/.github/workflows/build-mq-broker.yml b/.github/workflows/build-mq-broker.yml index ba935405f9..b297e34b47 100644 --- a/.github/workflows/build-mq-broker.yml +++ b/.github/workflows/build-mq-broker.yml @@ -8,10 +8,13 @@ on: - 'dev/mq/**' - '.github/workflows/build-mq-broker.yml' - workflow_dispatch: - -env: - RABBITMQ_VERSION: 3.12-alpine + workflow_dispatch: + inputs: + rabbitmq_version: + description: 'RabbitMQ Version' + default: '3.13-alpine' + required: true + type: string jobs: publish: @@ -21,29 +24,40 @@ jobs: packages: write steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - name: Set up QEMU - uses: docker/setup-qemu-action@v3 + uses: docker/setup-qemu-action@v4 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 + uses: docker/setup-buildx-action@v4 - name: Login to GitHub Container Registry - uses: docker/login-action@v3 + uses: docker/login-action@v4 with: registry: ghcr.io username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} + - name: Set rabbitmq version + id: rabbitmq-version + run: | + if [[ "${{ inputs.rabbitmq_version }}" == "" ]]; then + echo "RABBITMQ_VERSION=3.13-alpine" >> $GITHUB_OUTPUT + else + echo "RABBITMQ_VERSION=${{ inputs.rabbitmq_version }}" >> $GITHUB_OUTPUT + fi + - name: Docker Build & Push - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v7 + env: + DOCKER_BUILD_SUMMARY: false with: context: . file: dev/mq/Dockerfile platforms: linux/amd64,linux/arm64 push: true - build-args: RABBITMQ_VERSION=${{ env.RABBITMQ_VERSION }} + build-args: RABBITMQ_VERSION=${{ steps.rabbitmq-version.outputs.RABBITMQ_VERSION }} tags: | - ghcr.io/ietf-tools/datatracker-mq:${{ env.RABBITMQ_VERSION }} + ghcr.io/ietf-tools/datatracker-mq:${{ steps.rabbitmq-version.outputs.RABBITMQ_VERSION }} ghcr.io/ietf-tools/datatracker-mq:latest diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 251d576c07..ff84408187 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -7,26 +7,35 @@ on: workflow_dispatch: inputs: - summary: - description: 'Release Summary' - required: false - type: string - default: '' - sandbox: - description: 'Deploy to Sandbox' + deploy: + description: 'Deploy to K8S' + default: 'Skip' + required: true + type: choice + options: + - Skip + - Staging Only + - Staging + Prod + dev: + description: 'Deploy to Dev' default: true required: true type: boolean - legacySandbox: - description: 'Deploy to Legacy Sandbox' + devNoDbRefresh: + description: 'Dev Disable Daily DB Refresh' default: false - required: false + required: true type: boolean skiptests: description: 'Skip Tests' default: false required: true type: boolean + skiparm: + description: 'Skip ARM64 Build' + default: false + required: true + type: boolean ignoreLowerCoverage: description: 'Ignore Lower Coverage' default: false @@ -38,6 +47,10 @@ on: required: true type: boolean +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + jobs: # ----------------------------------------------------------------- # PREPARE @@ -50,13 +63,15 @@ jobs: pkg_version: ${{ steps.buildvars.outputs.pkg_version }} from_tag: ${{ steps.semver.outputs.nextStrict }} to_tag: ${{ steps.semver.outputs.current }} + base_image_version: ${{ steps.baseimgversion.outputs.base_image_version }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 with: - fetch-depth: 0 + fetch-depth: 1 + fetch-tags: false - - name: Get Next Version + - name: Get Next Version (Prod) if: ${{ github.ref_name == 'release' }} id: semver uses: ietf-tools/semver-action@v1 @@ -64,39 +79,56 @@ jobs: token: ${{ github.token }} branch: release skipInvalidTags: true - - - name: Set Next Version Env Var + patchList: fix, bugfix, perf, refactor, test, tests, chore + + - name: Get Dev Version + if: ${{ github.ref_name != 'release' }} + id: semverdev + uses: ietf-tools/semver-action@v1 + with: + token: ${{ github.token }} + branch: release + skipInvalidTags: true + noVersionBumpBehavior: 'current' + noNewCommitBehavior: 'current' + + - name: Set Release Flag if: ${{ github.ref_name == 'release' }} run: | - echo "NEXT_VERSION=$nextStrict" >> $GITHUB_ENV + echo "IS_RELEASE=true" >> $GITHUB_ENV - name: Create Draft Release - uses: ncipollo/release-action@v1.13.0 + uses: ncipollo/release-action@v1.21.0 if: ${{ github.ref_name == 'release' }} with: prerelease: true draft: false commit: ${{ github.sha }} - tag: ${{ env.NEXT_VERSION }} - name: ${{ env.NEXT_VERSION }} + tag: ${{ steps.semver.outputs.nextStrict }} + name: ${{ steps.semver.outputs.nextStrict }} body: '*pending*' token: ${{ secrets.GITHUB_TOKEN }} - name: Set Build Variables id: buildvars run: | - if [[ $NEXT_VERSION ]]; then - echo "Using AUTO SEMVER mode: $NEXT_VERSION" + if [[ $IS_RELEASE ]]; then + echo "Using AUTO SEMVER mode: ${{ steps.semver.outputs.nextStrict }}" echo "should_deploy=true" >> $GITHUB_OUTPUT - echo "pkg_version=$NEXT_VERSION" >> $GITHUB_OUTPUT - echo "::notice::Release $NEXT_VERSION created using branch $GITHUB_REF_NAME" + echo "pkg_version=${{ steps.semver.outputs.nextStrict }}" >> $GITHUB_OUTPUT + echo "::notice::Release ${{ steps.semver.outputs.nextStrict }} created using branch $GITHUB_REF_NAME" else - echo "Using TEST mode: 11.0.0-dev.$GITHUB_RUN_NUMBER" + echo "Using TEST mode: ${{ steps.semverdev.outputs.nextMajorStrict }}.0.0-dev.$GITHUB_RUN_NUMBER" echo "should_deploy=false" >> $GITHUB_OUTPUT - echo "pkg_version=11.0.0-dev.$GITHUB_RUN_NUMBER" >> $GITHUB_OUTPUT - echo "::notice::Non-production build 11.0.0-dev.$GITHUB_RUN_NUMBER created using branch $GITHUB_REF_NAME" + echo "pkg_version=${{ steps.semverdev.outputs.nextMajorStrict }}.0.0-dev.$GITHUB_RUN_NUMBER" >> $GITHUB_OUTPUT + echo "::notice::Non-production build ${{ steps.semverdev.outputs.nextMajorStrict }}.0.0-dev.$GITHUB_RUN_NUMBER created using branch $GITHUB_REF_NAME" fi + - name: Get Base Image Target Version + id: baseimgversion + run: | + echo "base_image_version=$(sed -n '1p' dev/build/TARGET_BASE)" >> $GITHUB_OUTPUT + # ----------------------------------------------------------------- # TESTS # ----------------------------------------------------------------- @@ -106,8 +138,11 @@ jobs: uses: ./.github/workflows/tests.yml if: ${{ github.event.inputs.skiptests == 'false' || github.ref_name == 'release' }} needs: [prepare] + secrets: inherit with: ignoreLowerCoverage: ${{ github.event.inputs.ignoreLowerCoverage == 'true' }} + skipSelenium: true + targetBaseVersion: ${{ needs.prepare.outputs.base_image_version }} # ----------------------------------------------------------------- # RELEASE @@ -116,44 +151,59 @@ jobs: name: Make Release if: ${{ !failure() && !cancelled() }} needs: [tests, prepare] - runs-on: ubuntu-latest + runs-on: + group: hperf-8c32r + permissions: + contents: write + packages: write env: SHOULD_DEPLOY: ${{needs.prepare.outputs.should_deploy}} PKG_VERSION: ${{needs.prepare.outputs.pkg_version}} FROM_TAG: ${{needs.prepare.outputs.from_tag}} TO_TAG: ${{needs.prepare.outputs.to_tag}} + TARGET_BASE: ${{needs.prepare.outputs.base_image_version}} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 with: - fetch-depth: 0 - - - name: Setup Node.js - uses: actions/setup-node@v4 + fetch-depth: 1 + fetch-tags: false + + - name: Setup Node.js environment + uses: actions/setup-node@v6 with: - node-version: '16' - + node-version: 18.x + - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v6 with: - python-version: '3.x' - + python-version: "3.x" + + - name: Setup AWS CLI + uses: unfor19/install-aws-cli-action@v1 + with: + version: 2.22.35 + - name: Download a Coverage Results if: ${{ github.event.inputs.skiptests == 'false' || github.ref_name == 'release' }} - uses: actions/download-artifact@v3.0.2 + uses: actions/download-artifact@v8.0.1 with: name: coverage - name: Make Release Build env: DEBIAN_FRONTEND: noninteractive + BROWSERSLIST_IGNORE_OLD_DATA: 1 run: | echo "PKG_VERSION: $PKG_VERSION" echo "GITHUB_SHA: $GITHUB_SHA" echo "GITHUB_REF_NAME: $GITHUB_REF_NAME" - echo "Running build script..." - chmod +x ./dev/deploy/build.sh - sh ./dev/deploy/build.sh + echo "Running frontend build script..." + echo "Compiling native node packages..." + yarn rebuild + echo "Packaging static assets..." + yarn build --base=https://static.ietf.org/dt/$PKG_VERSION/ + yarn legacy:build echo "Setting version $PKG_VERSION..." sed -i -r -e "s|^__version__ += '.*'$|__version__ = '$PKG_VERSION'|" ietf/__init__.py sed -i -r -e "s|^__release_hash__ += '.*'$|__release_hash__ = '$GITHUB_SHA'|" ietf/__init__.py @@ -171,8 +221,64 @@ jobs: run: | echo "Build release tarball..." mkdir -p /home/runner/work/release - tar -czf /home/runner/work/release/release.tar.gz -X dev/deploy/exclude-patterns.txt . - + tar -czf /home/runner/work/release/release.tar.gz -X dev/build/exclude-patterns.txt . + + - name: Collect + Push Statics + env: + DEBIAN_FRONTEND: noninteractive + AWS_ACCESS_KEY_ID: ${{ secrets.CF_R2_STATIC_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_R2_STATIC_KEY_SECRET }} + AWS_DEFAULT_REGION: auto + AWS_ENDPOINT_URL: ${{ secrets.CF_R2_ENDPOINT }} + run: | + echo "Collecting statics..." + echo "Using ghcr.io/ietf-tools/datatracker-app-base:${{ env.TARGET_BASE }}" + docker run --rm --name collectstatics -v $(pwd):/workspace ghcr.io/ietf-tools/datatracker-app-base:${{ env.TARGET_BASE }} sh dev/build/collectstatics.sh + echo "Pushing statics..." + cd static + aws s3 sync . s3://static/dt/$PKG_VERSION --only-show-errors + + - name: Augment dockerignore for docker image build + env: + DEBIAN_FRONTEND: noninteractive + run: | + cat >> .dockerignore <> $GITHUB_ENV + + - name: Build Images + uses: docker/build-push-action@v7 + env: + DOCKER_BUILD_SUMMARY: false + with: + context: . + file: dev/build/Dockerfile + platforms: ${{ github.event.inputs.skiparm == 'true' && 'linux/amd64' || 'linux/amd64,linux/arm64' }} + push: true + tags: | + ghcr.io/ietf-tools/datatracker:${{ env.PKG_VERSION }} + ${{ env.FEATURE_LATEST_TAG && format('ghcr.io/ietf-tools/datatracker:{0}-latest', env.FEATURE_LATEST_TAG) || null }} + - name: Update CHANGELOG id: changelog uses: Requarks/changelog-action@v1 @@ -183,6 +289,12 @@ jobs: toTag: ${{ env.TO_TAG }} writeToFile: false + - name: Download Coverage Results + if: ${{ github.event.inputs.skiptests == 'false' || github.ref_name == 'release' }} + uses: actions/download-artifact@v8.0.1 + with: + name: coverage + - name: Prepare Coverage Action if: ${{ github.event.inputs.skiptests == 'false' || github.ref_name == 'release' }} working-directory: ./dev/coverage-action @@ -198,12 +310,12 @@ jobs: repoCommon: common version: ${{needs.prepare.outputs.pkg_version}} changelog: ${{ steps.changelog.outputs.changes }} - summary: ${{ github.event.inputs.summary }} + summary: '' coverageResultsPath: coverage.json histCoveragePath: historical-coverage.json - name: Create Release - uses: ncipollo/release-action@v1.13.0 + uses: ncipollo/release-action@v1.21.0 if: ${{ env.SHOULD_DEPLOY == 'true' }} with: allowUpdates: true @@ -216,7 +328,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} - name: Update Baseline Coverage - uses: ncipollo/release-action@v1.13.0 + uses: ncipollo/release-action@v1.21.0 if: ${{ github.event.inputs.updateCoverage == 'true' || github.ref_name == 'release' }} with: allowUpdates: true @@ -229,7 +341,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} - name: Upload Build Artifacts - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v7 with: name: release-${{ env.PKG_VERSION }} path: /home/runner/work/release/release.tar.gz @@ -248,146 +360,139 @@ jobs: steps: - name: Notify on Slack (Success) if: ${{ !contains(join(needs.*.result, ','), 'failure') }} - uses: slackapi/slack-github-action@v1.24.0 + uses: slackapi/slack-github-action@v3 with: - channel-id: ${{ secrets.SLACK_GH_BUILDS_CHANNEL_ID }} + token: ${{ secrets.SLACK_GH_BOT }} + method: chat.postMessage payload: | - { - "text": "Datatracker Build by ${{ github.triggering_actor }} - <@${{ secrets.SLACK_UID_RJSPARKS }}>", - "attachments": [ - { - "color": "28a745", - "fields": [ - { - "title": "Status", - "short": true, - "value": "Completed" - } - ] - } - ] - } - env: - SLACK_BOT_TOKEN: ${{ secrets.SLACK_GH_BOT }} + channel: ${{ secrets.SLACK_GH_BUILDS_CHANNEL_ID }} + text: "Datatracker Build by ${{ github.triggering_actor }}" + attachments: + - color: "28a745" + fields: + - title: "Status" + short: true + value: "Completed" - name: Notify on Slack (Failure) if: ${{ contains(join(needs.*.result, ','), 'failure') }} - uses: slackapi/slack-github-action@v1.24.0 + uses: slackapi/slack-github-action@v3 with: - channel-id: ${{ secrets.SLACK_GH_BUILDS_CHANNEL_ID }} + token: ${{ secrets.SLACK_GH_BOT }} + method: chat.postMessage payload: | - { - "text": "Datatracker Build by ${{ github.triggering_actor }} - <@${{ secrets.SLACK_UID_RJSPARKS }}>", - "attachments": [ - { - "color": "a82929", - "fields": [ - { - "title": "Status", - "short": true, - "value": "Failed" - } - ] - } - ] - } - env: - SLACK_BOT_TOKEN: ${{ secrets.SLACK_GH_BOT }} + channel: ${{ secrets.SLACK_GH_BUILDS_CHANNEL_ID }} + text: "Datatracker Build by ${{ github.triggering_actor }}" + attachments: + - color: "a82929" + fields: + - title: "Status" + short: true + value: "Failed" # ----------------------------------------------------------------- - # SANDBOX + # DEV # ----------------------------------------------------------------- - sandbox: - name: Deploy to Sandbox - if: ${{ !failure() && !cancelled() && github.event.inputs.sandbox == 'true' }} + dev: + name: Deploy to Dev + if: ${{ !failure() && !cancelled() && github.event.inputs.dev == 'true' }} needs: [prepare, release] - runs-on: [self-hosted, dev-server] + runs-on: ubuntu-latest environment: - name: sandbox + name: dev env: PKG_VERSION: ${{needs.prepare.outputs.pkg_version}} steps: - - uses: actions/checkout@v4 - - - name: Download a Release Artifact - uses: actions/download-artifact@v3.0.2 - with: - name: release-${{ env.PKG_VERSION }} - - - name: Deploy to containers - env: - DEBIAN_FRONTEND: noninteractive - run: | - echo "Reset production flags in settings.py..." - sed -i -r -e 's/^DEBUG *= *.*$/DEBUG = True/' -e "s/^SERVER_MODE *= *.*\$/SERVER_MODE = 'development'/" ietf/settings.py - echo "Install Deploy to Container CLI dependencies..." - cd dev/deploy-to-container - npm ci - cd ../.. - echo "Start Deploy..." - node ./dev/deploy-to-container/cli.js --branch ${{ github.ref_name }} --domain dev.ietf.org --appversion ${{ env.PKG_VERSION }} --commit ${{ github.sha }} --ghrunid ${{ github.run_id }} - - - name: Cleanup old docker resources - env: - DEBIAN_FRONTEND: noninteractive - run: | - docker image prune -a -f - - legacySandbox: - name: Deploy to Legacy Sandbox - if: ${{ !failure() && !cancelled() && github.event.inputs.legacySandbox == 'true' }} + - uses: actions/checkout@v6 + with: + ref: main + + - name: Get Deploy Name + env: + DEBIAN_FRONTEND: noninteractive + run: | + echo "Install Get Deploy Name CLI dependencies..." + cd dev/k8s-get-deploy-name + npm ci + echo "Get Deploy Name..." + echo "DEPLOY_NAMESPACE=$(node cli.js --branch ${{ github.ref_name }})" >> "$GITHUB_ENV" + + - name: Deploy to dev + uses: the-actions-org/workflow-dispatch@v4 + with: + workflow: deploy-dev.yml + repo: ietf-tools/infra-k8s + ref: main + token: ${{ secrets.GH_INFRA_K8S_TOKEN }} + inputs: '{ "app":"datatracker", "appVersion":"${{ env.PKG_VERSION }}", "remoteRef":"${{ github.sha }}", "namespace":"${{ env.DEPLOY_NAMESPACE }}", "disableDailyDbRefresh":${{ inputs.devNoDbRefresh }} }' + wait-for-completion: true + wait-for-completion-timeout: 60m + wait-for-completion-interval: 30s + display-workflow-run-url: false + + # ----------------------------------------------------------------- + # STAGING + # ----------------------------------------------------------------- + staging: + name: Deploy to Staging + if: ${{ !failure() && !cancelled() && (github.event.inputs.deploy == 'Staging Only' || github.event.inputs.deploy == 'Staging + Prod' || github.ref_name == 'release') }} needs: [prepare, release] - runs-on: [self-hosted, legacy-sandbox-server] + runs-on: ubuntu-latest environment: - name: legacy-sandbox - url: "https://sandbox.ietf.org" + name: staging env: PKG_VERSION: ${{needs.prepare.outputs.pkg_version}} steps: - - name: Download a Release Artifact - uses: actions/download-artifact@v3.0.2 - with: - name: release-${{ env.PKG_VERSION }} - path: /a/www/ietf-datatracker/main.dev.${{ github.run_number }} + - name: Refresh Staging DB + uses: the-actions-org/workflow-dispatch@v4 + with: + workflow: deploy-db.yml + repo: ietf-tools/infra-k8s + ref: main + token: ${{ secrets.GH_INFRA_K8S_TOKEN }} + inputs: '{ "environment":"${{ secrets.GHA_K8S_CLUSTER }}", "app":"datatracker", "manifest":"postgres", "forceRecreate":true, "restoreToLastFullSnapshot":true, "waitClusterReady":true }' + wait-for-completion: true + wait-for-completion-timeout: 120m + wait-for-completion-interval: 20s + display-workflow-run-url: false - - name: Extract Release - env: - DEBIAN_FRONTEND: noninteractive - working-directory: /a/www/ietf-datatracker/main.dev.${{ github.run_number }} - run: | - echo "Extracting release tarball..." - tar xzf release.tar.gz - echo "Deleting release tarball..." - rm -rf release.tar.gz - - - name: Setup Environment - env: - DEBIAN_FRONTEND: noninteractive - working-directory: /a/www/ietf-datatracker/main.dev.${{ github.run_number }} - run: | - echo "Copying settings from previous deploy..." - cp ../web/ietf/settings_local.py ietf/ - rsync -a ../web/test/ test/ - echo "Installing Python dependencies..." - python3.9 -mvenv env - source env/bin/activate - pip install -r requirements.txt - pip freeze > frozen-requirements.txt - echo "Collecting static..." - ietf/manage.py collectstatic - echo "Running checks..." - ietf/manage.py check - - - name: Update Docker Containers - env: - DEBIAN_FRONTEND: noninteractive - working-directory: /a/docker/datatracker - run: | - echo "Pulling latest docker images..." - docker image tag ghcr.io/ietf-tools/datatracker-celery:latest datatracker-celery-fallback - docker image tag ghcr.io/ietf-tools/datatracker-mq:latest datatracker-mq-fallback - docker-compose pull - # echo "Shutting down containers..." - # docker-compose down -t 300 - + - name: Deploy to staging + uses: the-actions-org/workflow-dispatch@v4 + with: + workflow: deploy.yml + repo: ietf-tools/infra-k8s + ref: main + token: ${{ secrets.GH_INFRA_K8S_TOKEN }} + inputs: '{ "environment":"${{ secrets.GHA_K8S_CLUSTER }}", "app":"datatracker", "appVersion":"${{ env.PKG_VERSION }}", "remoteRef":"${{ github.sha }}" }' + wait-for-completion: true + wait-for-completion-timeout: 30m + wait-for-completion-interval: 30s + display-workflow-run-url: false + + # ----------------------------------------------------------------- + # PROD + # ----------------------------------------------------------------- + prod: + name: Deploy to Production + if: ${{ !failure() && !cancelled() && (github.event.inputs.deploy == 'Staging + Prod' || github.ref_name == 'release') }} + needs: [prepare, staging] + runs-on: ubuntu-latest + environment: + name: production + env: + PKG_VERSION: ${{needs.prepare.outputs.pkg_version}} + + steps: + - name: Deploy to production + uses: the-actions-org/workflow-dispatch@v4 + with: + workflow: deploy.yml + repo: ietf-tools/infra-k8s + ref: main + token: ${{ secrets.GH_INFRA_K8S_TOKEN }} + inputs: '{ "environment":"${{ secrets.GHA_K8S_CLUSTER }}", "app":"datatracker", "appVersion":"${{ env.PKG_VERSION }}", "remoteRef":"${{ github.sha }}" }' + wait-for-completion: true + wait-for-completion-timeout: 30m + wait-for-completion-interval: 30s + display-workflow-run-url: false diff --git a/.github/workflows/ci-run-tests.yml b/.github/workflows/ci-run-tests.yml index 346dd97b43..5349f1ac7a 100644 --- a/.github/workflows/ci-run-tests.yml +++ b/.github/workflows/ci-run-tests.yml @@ -4,6 +4,7 @@ on: pull_request: branches: - 'main' + - 'feat/rfc' paths: - 'client/**' - 'ietf/**' @@ -12,7 +13,34 @@ on: - 'package.json' jobs: + # ----------------------------------------------------------------- + # PREPARE + # ----------------------------------------------------------------- + prepare: + name: Prepare + runs-on: ubuntu-latest + outputs: + base_image_version: ${{ steps.baseimgversion.outputs.base_image_version }} + + steps: + - uses: actions/checkout@v6 + with: + fetch-depth: 1 + fetch-tags: false + + - name: Get Base Image Target Version + id: baseimgversion + run: | + echo "base_image_version=$(sed -n '1p' dev/build/TARGET_BASE)" >> $GITHUB_OUTPUT + + # ----------------------------------------------------------------- + # TESTS + # ----------------------------------------------------------------- tests: + name: Run Tests uses: ./.github/workflows/tests.yml + needs: [prepare] with: - ignoreLowerCoverage: false \ No newline at end of file + ignoreLowerCoverage: false + skipSelenium: true + targetBaseVersion: ${{ needs.prepare.outputs.base_image_version }} diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 89350b6411..bc20779ae6 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -26,12 +26,12 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@v6 - name: Initialize CodeQL - uses: github/codeql-action/init@v2 + uses: github/codeql-action/init@v4 with: languages: ${{ matrix.language }} - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 + uses: github/codeql-action/analyze@v4 diff --git a/.github/workflows/dependency-review.yml b/.github/workflows/dependency-review.yml index 4e75197790..e255b270ff 100644 --- a/.github/workflows/dependency-review.yml +++ b/.github/workflows/dependency-review.yml @@ -15,6 +15,8 @@ jobs: runs-on: ubuntu-latest steps: - name: 'Checkout Repository' - uses: actions/checkout@v4 + uses: actions/checkout@v6 - name: 'Dependency Review' - uses: actions/dependency-review-action@v3 + uses: actions/dependency-review-action@v4 + with: + vulnerability-check: false diff --git a/.github/workflows/dev-assets-sync-nightly.yml b/.github/workflows/dev-assets-sync-nightly.yml index bfb50bd41e..cd986f06f3 100644 --- a/.github/workflows/dev-assets-sync-nightly.yml +++ b/.github/workflows/dev-assets-sync-nightly.yml @@ -29,33 +29,21 @@ jobs: contents: read packages: write steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - name: Login to GitHub Container Registry - uses: docker/login-action@v3 + uses: docker/login-action@v4 with: registry: ghcr.io username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - name: Docker Build & Push - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v7 + env: + DOCKER_BUILD_SUMMARY: false with: context: . file: dev/shared-assets-sync/Dockerfile push: true tags: ghcr.io/ietf-tools/datatracker-rsync-assets:latest - - sync: - name: Run assets rsync - if: ${{ always() }} - runs-on: [self-hosted, dev-server] - needs: [build] - steps: - - name: Run rsync - env: - DEBIAN_FRONTEND: noninteractive - run: | - docker pull ghcr.io/ietf-tools/datatracker-rsync-assets:latest - docker run --rm -v dt-assets:/assets ghcr.io/ietf-tools/datatracker-rsync-assets:latest - docker image prune -a -f diff --git a/.github/workflows/sandbox-refresh.yml b/.github/workflows/sandbox-refresh.yml deleted file mode 100644 index 3ddb119e4f..0000000000 --- a/.github/workflows/sandbox-refresh.yml +++ /dev/null @@ -1,35 +0,0 @@ -name: Sandbox Refresh - -on: - # Run every night - schedule: - - cron: '0 9 * * *' - - workflow_dispatch: - -jobs: - main: - name: Refresh DBs - runs-on: [self-hosted, dev-server] - permissions: - contents: read - - steps: - - uses: actions/checkout@v4 - - - name: Refresh DBs - env: - DEBIAN_FRONTEND: noninteractive - run: | - echo "Install Deploy to Container CLI dependencies..." - cd dev/deploy-to-container - npm ci - cd ../.. - echo "Start Refresh..." - node ./dev/deploy-to-container/refresh.js - - - name: Cleanup old docker resources - env: - DEBIAN_FRONTEND: noninteractive - run: | - docker image prune -a -f diff --git a/.github/workflows/tests-az.yml b/.github/workflows/tests-az.yml index 6964f355a3..833ca89bef 100644 --- a/.github/workflows/tests-az.yml +++ b/.github/workflows/tests-az.yml @@ -38,7 +38,7 @@ jobs: ssh-keyscan -t rsa $vminfo >> ~/.ssh/known_hosts - name: Remote SSH into VM - uses: appleboy/ssh-action@55dabf81b49d4120609345970c91507e2d734799 + uses: appleboy/ssh-action@0ff4204d59e8e51228ff73bce53f80d53301dee2 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 5884b724fc..ad2e35408d 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -1,164 +1,190 @@ -name: Reusable Tests Workflow - -on: - workflow_call: - inputs: - ignoreLowerCoverage: - description: 'Ignore Lower Coverage' - default: false - required: true - type: boolean - -jobs: - tests-python: - name: Python Tests - runs-on: ubuntu-latest - container: ghcr.io/ietf-tools/datatracker-app-base:latest - - services: - db: - image: ghcr.io/ietf-tools/datatracker-db:latest - - steps: - - uses: actions/checkout@v4 - - - name: Prepare for tests - run: | - chmod +x ./dev/tests/prepare.sh - sh ./dev/tests/prepare.sh - - - name: Ensure DB is ready - run: | - /usr/local/bin/wait-for db:5432 -- echo "DB ready" - - - name: Run all tests - shell: bash - run: | - echo "Running checks..." - ./ietf/manage.py check - ./ietf/manage.py migrate --fake-initial - echo "Validating migrations..." - if ! ( ietf/manage.py makemigrations --dry-run --check --verbosity 3 ) ; then - echo "Model changes without migrations found." - exit 1 - fi - echo "Running tests..." - if [[ "x${{ github.event.inputs.ignoreLowerCoverage }}" == "xtrue" ]]; then - echo "Lower coverage failures will be ignored." - ./ietf/manage.py test -v2 --validate-html-harder --settings=settings_test --ignore-lower-coverage - else - ./ietf/manage.py test -v2 --validate-html-harder --settings=settings_test - fi - coverage xml - - - name: Upload Coverage Results to Codecov - uses: codecov/codecov-action@v3.1.4 - with: - files: coverage.xml - - - name: Convert Coverage Results - if: ${{ always() }} - run: | - mv latest-coverage.json coverage.json - - - name: Upload Coverage Results as Build Artifact - uses: actions/upload-artifact@v3 - if: ${{ always() }} - with: - name: coverage - path: coverage.json - - tests-playwright: - name: Playwright Tests - runs-on: macos-latest - strategy: - fail-fast: false - matrix: - project: [chromium, firefox] - - steps: - - uses: actions/checkout@v4 - - - uses: actions/setup-node@v4 - with: - node-version: '18' - - - name: Run all tests - run: | - echo "Installing dependencies..." - yarn - echo "Installing Playwright..." - cd playwright - mkdir test-results - npm ci - npx playwright install --with-deps ${{ matrix.project }} - echo "Running tests..." - npx playwright test --project=${{ matrix.project }} - - - name: Upload Report - uses: actions/upload-artifact@v3 - if: ${{ always() }} - continue-on-error: true - with: - name: playwright-results-${{ matrix.project }} - path: playwright/test-results/ - if-no-files-found: ignore - - tests-playwright-legacy: - name: Playwright Legacy Tests - runs-on: ubuntu-latest - container: ghcr.io/ietf-tools/datatracker-app-base:latest - strategy: - fail-fast: false - matrix: - project: [chromium, firefox] - - services: - db: - image: ghcr.io/ietf-tools/datatracker-db:latest - - steps: - - uses: actions/checkout@v4 - - - name: Prepare for tests - run: | - chmod +x ./dev/tests/prepare.sh - sh ./dev/tests/prepare.sh - - - name: Ensure DB is ready - run: | - /usr/local/bin/wait-for db:5432 -- echo "DB ready" - - - name: Start Datatracker - run: | - echo "Running checks..." - ./ietf/manage.py check - ./ietf/manage.py migrate --fake-initial - echo "Starting datatracker..." - ./ietf/manage.py runserver 0.0.0.0:8000 --settings=settings_local & - echo "Waiting for datatracker to be ready..." - /usr/local/bin/wait-for localhost:8000 -- echo "Datatracker ready" - - - name: Run all tests - env: - # Required to get firefox to run as root: - HOME: "" - run: | - echo "Installing dependencies..." - yarn - echo "Installing Playwright..." - cd playwright - mkdir test-results - npm ci - npx playwright install --with-deps ${{ matrix.project }} - echo "Running tests..." - npx playwright test --project=${{ matrix.project }} -c playwright-legacy.config.js - - - name: Upload Report - uses: actions/upload-artifact@v3 - if: ${{ always() }} - continue-on-error: true - with: - name: playwright-legacy-results-${{ matrix.project }} - path: playwright/test-results/ - if-no-files-found: ignore \ No newline at end of file +name: Reusable Tests Workflow + +on: + workflow_call: + inputs: + ignoreLowerCoverage: + description: 'Ignore Lower Coverage' + default: false + required: true + type: boolean + skipSelenium: + description: 'Skip Selenium Tests' + default: false + required: false + type: boolean + targetBaseVersion: + description: 'Target Base Image Version' + default: latest + required: false + type: string + +jobs: + tests-python: + name: Python Tests + runs-on: ubuntu-latest + container: ghcr.io/ietf-tools/datatracker-app-base:${{ inputs.targetBaseVersion }} + + services: + db: + image: ghcr.io/ietf-tools/datatracker-db:latest + blobstore: + image: ghcr.io/ietf-tools/datatracker-devblobstore:latest + + steps: + - uses: actions/checkout@v6 + + - name: Prepare for tests + run: | + chmod +x ./dev/tests/prepare.sh + sh ./dev/tests/prepare.sh + + - name: Ensure DB is ready + run: | + /usr/local/bin/wait-for db:5432 -- echo "DB ready" + + - name: Run all tests + shell: bash + run: | + echo "Running checks..." + ./ietf/manage.py check + ./ietf/manage.py migrate --fake-initial + echo "Validating migrations..." + if ! ( ietf/manage.py makemigrations --dry-run --check --verbosity 3 ) ; then + echo "Model changes without migrations found." + exit 1 + fi + if [[ "x${{ inputs.skipSelenium }}" == "xtrue" ]]; then + echo "Disable selenium tests..." + rm /usr/bin/geckodriver + fi + echo "Running tests..." + if [[ "x${{ inputs.ignoreLowerCoverage }}" == "xtrue" ]]; then + echo "Lower coverage failures will be ignored." + HOME=/root ./ietf/manage.py test -v2 --validate-html-harder --settings=settings_test --ignore-lower-coverage + else + HOME=/root ./ietf/manage.py test -v2 --validate-html-harder --settings=settings_test + fi + coverage xml + + - name: Upload geckodriver.log + uses: actions/upload-artifact@v7 + if: ${{ failure() }} + with: + name: geckodriverlog + path: geckodriver.log + + - name: Upload Coverage Results to Codecov + uses: codecov/codecov-action@v6 + with: + disable_search: true + files: coverage.xml + token: ${{ secrets.CODECOV_TOKEN }} + + - name: Convert Coverage Results + if: ${{ always() }} + run: | + mv latest-coverage.json coverage.json + + - name: Upload Coverage Results as Build Artifact + uses: actions/upload-artifact@v7 + if: ${{ always() }} + with: + name: coverage + path: coverage.json + + tests-playwright: + name: Playwright Tests + runs-on: macos-latest + strategy: + fail-fast: false + matrix: + project: [chromium, firefox] + + steps: + - uses: actions/checkout@v6 + + - uses: actions/setup-node@v6 + with: + node-version: '18' + + - name: Run all tests + run: | + echo "Installing dependencies..." + yarn + echo "Installing Playwright..." + cd playwright + mkdir test-results + npm ci + npx playwright install --with-deps ${{ matrix.project }} + echo "Running tests..." + npx playwright test --project=${{ matrix.project }} + + - name: Upload Report + uses: actions/upload-artifact@v7 + if: ${{ always() }} + continue-on-error: true + with: + name: playwright-results-${{ matrix.project }} + path: playwright/test-results/ + if-no-files-found: ignore + + tests-playwright-legacy: + if: ${{ false }} # disable until we sort out suspected test runner issue + name: Playwright Legacy Tests + runs-on: ubuntu-latest + container: ghcr.io/ietf-tools/datatracker-app-base:${{ inputs.targetBaseVersion }} + strategy: + fail-fast: false + matrix: + project: [chromium, firefox] + + services: + db: + image: ghcr.io/ietf-tools/datatracker-db:latest + + steps: + - uses: actions/checkout@v6 + + - name: Prepare for tests + run: | + chmod +x ./dev/tests/prepare.sh + sh ./dev/tests/prepare.sh + + - name: Ensure DB is ready + run: | + /usr/local/bin/wait-for db:5432 -- echo "DB ready" + + - name: Start Datatracker + run: | + echo "Running checks..." + ./ietf/manage.py check + ./ietf/manage.py migrate --fake-initial + echo "Starting datatracker..." + ./ietf/manage.py runserver 0.0.0.0:8000 --settings=settings_local & + echo "Waiting for datatracker to be ready..." + /usr/local/bin/wait-for localhost:8000 -- echo "Datatracker ready" + + - name: Run all tests + env: + # Required to get firefox to run as root: + HOME: "" + run: | + echo "Installing dependencies..." + yarn + echo "Installing Playwright..." + cd playwright + mkdir test-results + npm ci + npx playwright install --with-deps ${{ matrix.project }} + echo "Running tests..." + npx playwright test --project=${{ matrix.project }} -c playwright-legacy.config.js + + - name: Upload Report + uses: actions/upload-artifact@v7 + if: ${{ always() }} + continue-on-error: true + with: + name: playwright-legacy-results-${{ matrix.project }} + path: playwright/test-results/ + if-no-files-found: ignore diff --git a/.gitignore b/.gitignore index 80e5f0228b..ccc7a46b08 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,7 @@ .DS_store datatracker.sublime-project datatracker.sublime-workspace +/.claude /.coverage /.factoryboy_random_state /.mypy_cache @@ -17,14 +18,17 @@ datatracker.sublime-workspace /docker/docker-compose.extend-custom.yml /env /ghostdriver.log +/geckodriver.log /htmlcov /ietf/static/dist-neue /latest-coverage.json /media /node_modules /release-coverage.json +/static /tmp-* /.testresult +*.swp *.pyc __pycache__ .yarn/* diff --git a/.pnp.cjs b/.pnp.cjs index 364c066720..6c76263c7e 100644 --- a/.pnp.cjs +++ b/.pnp.cjs @@ -33,40 +33,45 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { [null, {\ "packageLocation": "./",\ "packageDependencies": [\ - ["@fullcalendar/bootstrap5", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.9"],\ - ["@fullcalendar/core", "npm:6.1.9"],\ - ["@fullcalendar/daygrid", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.9"],\ - ["@fullcalendar/icalendar", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.9"],\ - ["@fullcalendar/interaction", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.9"],\ - ["@fullcalendar/list", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.9"],\ - ["@fullcalendar/luxon3", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.9"],\ - ["@fullcalendar/timegrid", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.9"],\ - ["@fullcalendar/vue3", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.9"],\ - ["@parcel/optimizer-data-url", "npm:2.10.0"],\ - ["@parcel/transformer-inline-string", "npm:2.10.0"],\ - ["@parcel/transformer-sass", "npm:2.10.0"],\ + ["@fullcalendar/bootstrap5", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.11"],\ + ["@fullcalendar/core", "npm:6.1.11"],\ + ["@fullcalendar/daygrid", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.11"],\ + ["@fullcalendar/icalendar", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.11"],\ + ["@fullcalendar/interaction", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.11"],\ + ["@fullcalendar/list", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.11"],\ + ["@fullcalendar/luxon3", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.11"],\ + ["@fullcalendar/timegrid", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.11"],\ + ["@fullcalendar/vue3", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.11"],\ + ["@kurkle/color", "npm:0.3.1"],\ + ["@parcel/optimizer-data-url", "npm:2.12.0"],\ + ["@parcel/transformer-inline-string", "npm:2.12.0"],\ + ["@parcel/transformer-sass", "npm:2.12.0"],\ ["@popperjs/core", "npm:2.11.8"],\ - ["@rollup/pluginutils", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:5.0.5"],\ + ["@rollup/pluginutils", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:5.1.0"],\ ["@twuni/emojify", "npm:1.0.2"],\ - ["@vitejs/plugin-vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:4.4.0"],\ - ["bootstrap", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:5.3.2"],\ - ["bootstrap-icons", "npm:1.11.1"],\ + ["@vitejs/plugin-vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:4.6.2"],\ + ["@vue/language-plugin-pug", "npm:2.0.7"],\ + ["bootstrap", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:5.3.3"],\ + ["bootstrap-icons", "npm:1.11.3"],\ ["browser-fs-access", "npm:0.35.0"],\ ["browserlist", "npm:1.0.1"],\ - ["c8", "npm:8.0.1"],\ - ["caniuse-lite", "npm:1.0.30001538"],\ - ["d3", "npm:7.8.5"],\ - ["eslint", "npm:8.51.0"],\ + ["c8", "npm:9.1.0"],\ + ["caniuse-lite", "npm:1.0.30001603"],\ + ["chart.js", "npm:4.5.1"],\ + ["chartjs-plugin-autocolors", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:0.3.1"],\ + ["chartjs-plugin-zoom", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.2.0"],\ + ["d3", "npm:7.9.0"],\ + ["eslint", "npm:8.57.0"],\ ["eslint-config-standard", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:17.1.0"],\ ["eslint-plugin-cypress", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.15.1"],\ - ["eslint-plugin-import", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.28.1"],\ - ["eslint-plugin-n", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:16.2.0"],\ + ["eslint-plugin-import", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.29.1"],\ + ["eslint-plugin-n", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:16.6.2"],\ ["eslint-plugin-node", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:11.1.0"],\ ["eslint-plugin-promise", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.1"],\ - ["eslint-plugin-vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:9.17.0"],\ + ["eslint-plugin-vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:9.24.0"],\ ["file-saver", "npm:2.0.5"],\ - ["highcharts", "npm:11.1.0"],\ - ["html-validate", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:8.5.0"],\ + ["highcharts", "npm:11.4.0"],\ + ["html-validate", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:8.18.1"],\ ["ical.js", "npm:1.5.0"],\ ["jquery", "npm:3.7.1"],\ ["jquery-migrate", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.1"],\ @@ -74,28 +79,28 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ["list.js", "npm:2.3.1"],\ ["lodash", "npm:4.17.21"],\ ["lodash-es", "npm:4.17.21"],\ - ["luxon", "npm:3.4.3"],\ - ["moment", "npm:2.29.4"],\ - ["moment-timezone", "npm:0.5.43"],\ + ["luxon", "npm:3.4.4"],\ + ["moment", "npm:2.30.1"],\ + ["moment-timezone", "npm:0.5.45"],\ ["ms", "npm:2.1.3"],\ ["murmurhash-js", "npm:1.0.0"],\ - ["naive-ui", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.35.0"],\ - ["parcel", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.10.0"],\ + ["naive-ui", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.38.1"],\ + ["parcel", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.12.0"],\ ["pinia", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.1.7"],\ ["pinia-plugin-persist", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:1.0.0"],\ ["pug", "npm:3.0.2"],\ - ["sass", "npm:1.69.4"],\ + ["sass", "npm:1.72.0"],\ ["seedrandom", "npm:3.0.5"],\ ["select2", "npm:4.1.0-rc.0"],\ ["select2-bootstrap-5-theme", "npm:1.3.0"],\ ["send", "npm:0.18.0"],\ ["shepherd.js", "npm:11.2.0"],\ ["slugify", "npm:1.6.6"],\ - ["sortablejs", "npm:1.15.0"],\ + ["sortablejs", "npm:1.15.2"],\ ["vanillajs-datepicker", "npm:1.3.4"],\ - ["vite", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:4.4.11"],\ - ["vue", "npm:3.3.4"],\ - ["vue-router", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:4.2.5"],\ + ["vite", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:4.5.3"],\ + ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.21"],\ + ["vue-router", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:4.3.0"],\ ["zxcvbn", "npm:4.4.2"]\ ],\ "linkType": "SOFT"\ @@ -150,10 +155,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ],\ "linkType": "HARD"\ }],\ - ["npm:7.22.4", {\ - "packageLocation": "./.yarn/cache/@babel-parser-npm-7.22.4-32183c89ee-0ca6d3a2d9.zip/node_modules/@babel/parser/",\ + ["npm:7.23.9", {\ + "packageLocation": "./.yarn/cache/@babel-parser-npm-7.23.9-720a0b56cb-e7cd4960ac.zip/node_modules/@babel/parser/",\ "packageDependencies": [\ - ["@babel/parser", "npm:7.22.4"],\ + ["@babel/parser", "npm:7.23.9"],\ ["@babel/types", "npm:7.18.4"]\ ],\ "linkType": "HARD"\ @@ -197,10 +202,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ],\ "linkType": "SOFT"\ }],\ - ["virtual:d5901c8fe9a0c32ef9bd30914b8624afdd53ad520846726499200f014090a72c0a1d5e3737654e39af21acf7bf6f0993bedc3c908b3b8804daa47faed23d0085#npm:0.15.12", {\ - "packageLocation": "./.yarn/__virtual__/@css-render-plugin-bem-virtual-3ee8479233/0/cache/@css-render-plugin-bem-npm-0.15.12-bf8b43dc1f-9fa7ddd62b.zip/node_modules/@css-render/plugin-bem/",\ + ["virtual:32fd9c861d759cd42dabb479e4fd652286369e629cc7ef63c9cf4f1af5387c64be25fafc985023ea8534b1ec1f4cc92e6c918c7f3b594aa0f8acad026c671a6a#npm:0.15.12", {\ + "packageLocation": "./.yarn/__virtual__/@css-render-plugin-bem-virtual-105b1b654b/0/cache/@css-render-plugin-bem-npm-0.15.12-bf8b43dc1f-9fa7ddd62b.zip/node_modules/@css-render/plugin-bem/",\ "packageDependencies": [\ - ["@css-render/plugin-bem", "virtual:d5901c8fe9a0c32ef9bd30914b8624afdd53ad520846726499200f014090a72c0a1d5e3737654e39af21acf7bf6f0993bedc3c908b3b8804daa47faed23d0085#npm:0.15.12"],\ + ["@css-render/plugin-bem", "virtual:32fd9c861d759cd42dabb479e4fd652286369e629cc7ef63c9cf4f1af5387c64be25fafc985023ea8534b1ec1f4cc92e6c918c7f3b594aa0f8acad026c671a6a#npm:0.15.12"],\ ["@types/css-render", null],\ ["css-render", "npm:0.15.12"]\ ],\ @@ -226,12 +231,12 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ],\ "linkType": "SOFT"\ }],\ - ["virtual:07229bbf54bc488d21e48f65df3fcd2cdabd1e401dfffccce7403d04695be90e478a0d508694f896481602b0f9db804b9f384dfa051fe08e896fd18fd1fe0b6b#npm:0.15.10", {\ - "packageLocation": "./.yarn/__virtual__/@css-render-vue3-ssr-virtual-5eb3a62c1f/0/cache/@css-render-vue3-ssr-npm-0.15.10-b8526cc313-7977e0c440.zip/node_modules/@css-render/vue3-ssr/",\ + ["virtual:2366be83ef58a728ebb5a5e9ed4600f4465f98b2a844262fcfbe89415361d5d5f9e964ec3b9a72d6a5004f37c1024d017c65e67473dd9cc39cd61f51768c65e6#npm:0.15.10", {\ + "packageLocation": "./.yarn/__virtual__/@css-render-vue3-ssr-virtual-8cb63dbe2e/0/cache/@css-render-vue3-ssr-npm-0.15.10-b8526cc313-7977e0c440.zip/node_modules/@css-render/vue3-ssr/",\ "packageDependencies": [\ - ["@css-render/vue3-ssr", "virtual:07229bbf54bc488d21e48f65df3fcd2cdabd1e401dfffccce7403d04695be90e478a0d508694f896481602b0f9db804b9f384dfa051fe08e896fd18fd1fe0b6b#npm:0.15.10"],\ + ["@css-render/vue3-ssr", "virtual:2366be83ef58a728ebb5a5e9ed4600f4465f98b2a844262fcfbe89415361d5d5f9e964ec3b9a72d6a5004f37c1024d017c65e67473dd9cc39cd61f51768c65e6#npm:0.15.10"],\ ["@types/vue", null],\ - ["vue", "npm:3.3.4"]\ + ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.21"]\ ],\ "packagePeers": [\ "@types/vue",\ @@ -239,12 +244,12 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ],\ "linkType": "HARD"\ }],\ - ["virtual:d5901c8fe9a0c32ef9bd30914b8624afdd53ad520846726499200f014090a72c0a1d5e3737654e39af21acf7bf6f0993bedc3c908b3b8804daa47faed23d0085#npm:0.15.12", {\ - "packageLocation": "./.yarn/__virtual__/@css-render-vue3-ssr-virtual-f9b68b2e9d/0/cache/@css-render-vue3-ssr-npm-0.15.12-a130f4db3a-a5505ae161.zip/node_modules/@css-render/vue3-ssr/",\ + ["virtual:32fd9c861d759cd42dabb479e4fd652286369e629cc7ef63c9cf4f1af5387c64be25fafc985023ea8534b1ec1f4cc92e6c918c7f3b594aa0f8acad026c671a6a#npm:0.15.12", {\ + "packageLocation": "./.yarn/__virtual__/@css-render-vue3-ssr-virtual-18db73fb22/0/cache/@css-render-vue3-ssr-npm-0.15.12-a130f4db3a-a5505ae161.zip/node_modules/@css-render/vue3-ssr/",\ "packageDependencies": [\ - ["@css-render/vue3-ssr", "virtual:d5901c8fe9a0c32ef9bd30914b8624afdd53ad520846726499200f014090a72c0a1d5e3737654e39af21acf7bf6f0993bedc3c908b3b8804daa47faed23d0085#npm:0.15.12"],\ + ["@css-render/vue3-ssr", "virtual:32fd9c861d759cd42dabb479e4fd652286369e629cc7ef63c9cf4f1af5387c64be25fafc985023ea8534b1ec1f4cc92e6c918c7f3b594aa0f8acad026c671a6a#npm:0.15.12"],\ ["@types/vue", null],\ - ["vue", "npm:3.3.4"]\ + ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.21"]\ ],\ "packagePeers": [\ "@types/vue",\ @@ -468,12 +473,12 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ],\ "linkType": "SOFT"\ }],\ - ["virtual:77fce3ec74d55c7e6791631c329cf3adde374e21e618e865127f72e63efeb3376dcf7fc8217de80f5a310e81c791a72e9d099b00fd3252d5653ff68dff50c2fa#npm:4.4.0", {\ - "packageLocation": "./.yarn/__virtual__/@eslint-community-eslint-utils-virtual-3663415ec2/0/cache/@eslint-community-eslint-utils-npm-4.4.0-d1791bd5a3-cdfe3ae42b.zip/node_modules/@eslint-community/eslint-utils/",\ + ["virtual:4286e12a3a0f74af013bc8f16c6d8fdde823cfbf6389660266b171e551f576c805b0a7a8eb2a7087a5cee7dfe6ebb6e1ea3808d93daf915edc95656907a381bb#npm:4.4.0", {\ + "packageLocation": "./.yarn/__virtual__/@eslint-community-eslint-utils-virtual-1c7da85a1a/0/cache/@eslint-community-eslint-utils-npm-4.4.0-d1791bd5a3-cdfe3ae42b.zip/node_modules/@eslint-community/eslint-utils/",\ "packageDependencies": [\ - ["@eslint-community/eslint-utils", "virtual:77fce3ec74d55c7e6791631c329cf3adde374e21e618e865127f72e63efeb3376dcf7fc8217de80f5a310e81c791a72e9d099b00fd3252d5653ff68dff50c2fa#npm:4.4.0"],\ + ["@eslint-community/eslint-utils", "virtual:4286e12a3a0f74af013bc8f16c6d8fdde823cfbf6389660266b171e551f576c805b0a7a8eb2a7087a5cee7dfe6ebb6e1ea3808d93daf915edc95656907a381bb#npm:4.4.0"],\ ["@types/eslint", null],\ - ["eslint", "npm:8.51.0"],\ + ["eslint", "npm:8.57.0"],\ ["eslint-visitor-keys", "npm:3.3.0"]\ ],\ "packagePeers": [\ @@ -484,10 +489,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@eslint-community/regexpp", [\ - ["npm:4.5.1", {\ - "packageLocation": "./.yarn/cache/@eslint-community-regexpp-npm-4.5.1-bf72922237-6d901166d6.zip/node_modules/@eslint-community/regexpp/",\ + ["npm:4.10.0", {\ + "packageLocation": "./.yarn/cache/@eslint-community-regexpp-npm-4.10.0-6bfb984c81-2a6e345429.zip/node_modules/@eslint-community/regexpp/",\ "packageDependencies": [\ - ["@eslint-community/regexpp", "npm:4.5.1"]\ + ["@eslint-community/regexpp", "npm:4.10.0"]\ ],\ "linkType": "HARD"\ }],\ @@ -500,10 +505,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@eslint/eslintrc", [\ - ["npm:2.1.2", {\ - "packageLocation": "./.yarn/cache/@eslint-eslintrc-npm-2.1.2-feb0771c9f-bc742a1e3b.zip/node_modules/@eslint/eslintrc/",\ + ["npm:2.1.4", {\ + "packageLocation": "./.yarn/cache/@eslint-eslintrc-npm-2.1.4-1ff4b5f908-10957c7592.zip/node_modules/@eslint/eslintrc/",\ "packageDependencies": [\ - ["@eslint/eslintrc", "npm:2.1.2"],\ + ["@eslint/eslintrc", "npm:2.1.4"],\ ["ajv", "npm:6.12.6"],\ ["debug", "virtual:b86a9fb34323a98c6519528ed55faa0d9b44ca8879307c0b29aa384bde47ff59a7d0c9051b31246f14521dfb71ba3c5d6d0b35c29fffc17bf875aa6ad977d9e8#npm:4.3.4"],\ ["espree", "npm:9.6.1"],\ @@ -518,10 +523,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@eslint/js", [\ - ["npm:8.51.0", {\ - "packageLocation": "./.yarn/cache/@eslint-js-npm-8.51.0-51f088b88b-0228bf1e1e.zip/node_modules/@eslint/js/",\ + ["npm:8.57.0", {\ + "packageLocation": "./.yarn/cache/@eslint-js-npm-8.57.0-00ead3710a-315dc65b0e.zip/node_modules/@eslint/js/",\ "packageDependencies": [\ - ["@eslint/js", "npm:8.51.0"]\ + ["@eslint/js", "npm:8.57.0"]\ ],\ "linkType": "HARD"\ }]\ @@ -557,18 +562,18 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@fullcalendar/bootstrap5", [\ - ["npm:6.1.9", {\ - "packageLocation": "./.yarn/cache/@fullcalendar-bootstrap5-npm-6.1.9-ef68c3c094-1d6168fafc.zip/node_modules/@fullcalendar/bootstrap5/",\ + ["npm:6.1.11", {\ + "packageLocation": "./.yarn/cache/@fullcalendar-bootstrap5-npm-6.1.11-6e0fbf281a-a0c3b94346.zip/node_modules/@fullcalendar/bootstrap5/",\ "packageDependencies": [\ - ["@fullcalendar/bootstrap5", "npm:6.1.9"]\ + ["@fullcalendar/bootstrap5", "npm:6.1.11"]\ ],\ "linkType": "SOFT"\ }],\ - ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.9", {\ - "packageLocation": "./.yarn/__virtual__/@fullcalendar-bootstrap5-virtual-32a9d3c1d6/0/cache/@fullcalendar-bootstrap5-npm-6.1.9-ef68c3c094-1d6168fafc.zip/node_modules/@fullcalendar/bootstrap5/",\ + ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.11", {\ + "packageLocation": "./.yarn/__virtual__/@fullcalendar-bootstrap5-virtual-50942c1c6f/0/cache/@fullcalendar-bootstrap5-npm-6.1.11-6e0fbf281a-a0c3b94346.zip/node_modules/@fullcalendar/bootstrap5/",\ "packageDependencies": [\ - ["@fullcalendar/bootstrap5", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.9"],\ - ["@fullcalendar/core", "npm:6.1.9"],\ + ["@fullcalendar/bootstrap5", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.11"],\ + ["@fullcalendar/core", "npm:6.1.11"],\ ["@types/fullcalendar__core", null]\ ],\ "packagePeers": [\ @@ -579,28 +584,28 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@fullcalendar/core", [\ - ["npm:6.1.9", {\ - "packageLocation": "./.yarn/cache/@fullcalendar-core-npm-6.1.9-b4da84d4b8-836db3e40c.zip/node_modules/@fullcalendar/core/",\ + ["npm:6.1.11", {\ + "packageLocation": "./.yarn/cache/@fullcalendar-core-npm-6.1.11-ae049c8ace-0078a6f96b.zip/node_modules/@fullcalendar/core/",\ "packageDependencies": [\ - ["@fullcalendar/core", "npm:6.1.9"],\ + ["@fullcalendar/core", "npm:6.1.11"],\ ["preact", "npm:10.12.1"]\ ],\ "linkType": "HARD"\ }]\ ]],\ ["@fullcalendar/daygrid", [\ - ["npm:6.1.9", {\ - "packageLocation": "./.yarn/cache/@fullcalendar-daygrid-npm-6.1.9-4c0da59f84-3db55247c4.zip/node_modules/@fullcalendar/daygrid/",\ + ["npm:6.1.11", {\ + "packageLocation": "./.yarn/cache/@fullcalendar-daygrid-npm-6.1.11-2187ca1b8f-6eb5606de5.zip/node_modules/@fullcalendar/daygrid/",\ "packageDependencies": [\ - ["@fullcalendar/daygrid", "npm:6.1.9"]\ + ["@fullcalendar/daygrid", "npm:6.1.11"]\ ],\ "linkType": "SOFT"\ }],\ - ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.9", {\ - "packageLocation": "./.yarn/__virtual__/@fullcalendar-daygrid-virtual-8030f0f5bf/0/cache/@fullcalendar-daygrid-npm-6.1.9-4c0da59f84-3db55247c4.zip/node_modules/@fullcalendar/daygrid/",\ + ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.11", {\ + "packageLocation": "./.yarn/__virtual__/@fullcalendar-daygrid-virtual-b91d1ffe14/0/cache/@fullcalendar-daygrid-npm-6.1.11-2187ca1b8f-6eb5606de5.zip/node_modules/@fullcalendar/daygrid/",\ "packageDependencies": [\ - ["@fullcalendar/daygrid", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.9"],\ - ["@fullcalendar/core", "npm:6.1.9"],\ + ["@fullcalendar/daygrid", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.11"],\ + ["@fullcalendar/core", "npm:6.1.11"],\ ["@types/fullcalendar__core", null]\ ],\ "packagePeers": [\ @@ -611,18 +616,18 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@fullcalendar/icalendar", [\ - ["npm:6.1.9", {\ - "packageLocation": "./.yarn/cache/@fullcalendar-icalendar-npm-6.1.9-92e390eda8-d47daf4ae0.zip/node_modules/@fullcalendar/icalendar/",\ + ["npm:6.1.11", {\ + "packageLocation": "./.yarn/cache/@fullcalendar-icalendar-npm-6.1.11-73807e790d-4e6eff15a8.zip/node_modules/@fullcalendar/icalendar/",\ "packageDependencies": [\ - ["@fullcalendar/icalendar", "npm:6.1.9"]\ + ["@fullcalendar/icalendar", "npm:6.1.11"]\ ],\ "linkType": "SOFT"\ }],\ - ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.9", {\ - "packageLocation": "./.yarn/__virtual__/@fullcalendar-icalendar-virtual-2edf12646d/0/cache/@fullcalendar-icalendar-npm-6.1.9-92e390eda8-d47daf4ae0.zip/node_modules/@fullcalendar/icalendar/",\ + ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.11", {\ + "packageLocation": "./.yarn/__virtual__/@fullcalendar-icalendar-virtual-636a290006/0/cache/@fullcalendar-icalendar-npm-6.1.11-73807e790d-4e6eff15a8.zip/node_modules/@fullcalendar/icalendar/",\ "packageDependencies": [\ - ["@fullcalendar/icalendar", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.9"],\ - ["@fullcalendar/core", "npm:6.1.9"],\ + ["@fullcalendar/icalendar", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.11"],\ + ["@fullcalendar/core", "npm:6.1.11"],\ ["@types/fullcalendar__core", null],\ ["@types/ical.js", null],\ ["ical.js", "npm:1.5.0"]\ @@ -637,18 +642,18 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@fullcalendar/interaction", [\ - ["npm:6.1.9", {\ - "packageLocation": "./.yarn/cache/@fullcalendar-interaction-npm-6.1.9-f729b81a3d-787111ea6f.zip/node_modules/@fullcalendar/interaction/",\ + ["npm:6.1.11", {\ + "packageLocation": "./.yarn/cache/@fullcalendar-interaction-npm-6.1.11-39630596c7-c67d4cfa0b.zip/node_modules/@fullcalendar/interaction/",\ "packageDependencies": [\ - ["@fullcalendar/interaction", "npm:6.1.9"]\ + ["@fullcalendar/interaction", "npm:6.1.11"]\ ],\ "linkType": "SOFT"\ }],\ - ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.9", {\ - "packageLocation": "./.yarn/__virtual__/@fullcalendar-interaction-virtual-45406e4d3a/0/cache/@fullcalendar-interaction-npm-6.1.9-f729b81a3d-787111ea6f.zip/node_modules/@fullcalendar/interaction/",\ + ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.11", {\ + "packageLocation": "./.yarn/__virtual__/@fullcalendar-interaction-virtual-3ebf8b0646/0/cache/@fullcalendar-interaction-npm-6.1.11-39630596c7-c67d4cfa0b.zip/node_modules/@fullcalendar/interaction/",\ "packageDependencies": [\ - ["@fullcalendar/interaction", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.9"],\ - ["@fullcalendar/core", "npm:6.1.9"],\ + ["@fullcalendar/interaction", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.11"],\ + ["@fullcalendar/core", "npm:6.1.11"],\ ["@types/fullcalendar__core", null]\ ],\ "packagePeers": [\ @@ -659,18 +664,18 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@fullcalendar/list", [\ - ["npm:6.1.9", {\ - "packageLocation": "./.yarn/cache/@fullcalendar-list-npm-6.1.9-f76695c5ab-978dd54b71.zip/node_modules/@fullcalendar/list/",\ + ["npm:6.1.11", {\ + "packageLocation": "./.yarn/cache/@fullcalendar-list-npm-6.1.11-8f1846f302-84a8cd6e63.zip/node_modules/@fullcalendar/list/",\ "packageDependencies": [\ - ["@fullcalendar/list", "npm:6.1.9"]\ + ["@fullcalendar/list", "npm:6.1.11"]\ ],\ "linkType": "SOFT"\ }],\ - ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.9", {\ - "packageLocation": "./.yarn/__virtual__/@fullcalendar-list-virtual-95391a7d25/0/cache/@fullcalendar-list-npm-6.1.9-f76695c5ab-978dd54b71.zip/node_modules/@fullcalendar/list/",\ + ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.11", {\ + "packageLocation": "./.yarn/__virtual__/@fullcalendar-list-virtual-1c555df506/0/cache/@fullcalendar-list-npm-6.1.11-8f1846f302-84a8cd6e63.zip/node_modules/@fullcalendar/list/",\ "packageDependencies": [\ - ["@fullcalendar/list", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.9"],\ - ["@fullcalendar/core", "npm:6.1.9"],\ + ["@fullcalendar/list", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.11"],\ + ["@fullcalendar/core", "npm:6.1.11"],\ ["@types/fullcalendar__core", null]\ ],\ "packagePeers": [\ @@ -681,21 +686,21 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@fullcalendar/luxon3", [\ - ["npm:6.1.9", {\ - "packageLocation": "./.yarn/cache/@fullcalendar-luxon3-npm-6.1.9-d79fc8f961-25122126e2.zip/node_modules/@fullcalendar/luxon3/",\ + ["npm:6.1.11", {\ + "packageLocation": "./.yarn/cache/@fullcalendar-luxon3-npm-6.1.11-3e90656a71-8e7f45aab2.zip/node_modules/@fullcalendar/luxon3/",\ "packageDependencies": [\ - ["@fullcalendar/luxon3", "npm:6.1.9"]\ + ["@fullcalendar/luxon3", "npm:6.1.11"]\ ],\ "linkType": "SOFT"\ }],\ - ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.9", {\ - "packageLocation": "./.yarn/__virtual__/@fullcalendar-luxon3-virtual-2026214153/0/cache/@fullcalendar-luxon3-npm-6.1.9-d79fc8f961-25122126e2.zip/node_modules/@fullcalendar/luxon3/",\ + ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.11", {\ + "packageLocation": "./.yarn/__virtual__/@fullcalendar-luxon3-virtual-38643019c2/0/cache/@fullcalendar-luxon3-npm-6.1.11-3e90656a71-8e7f45aab2.zip/node_modules/@fullcalendar/luxon3/",\ "packageDependencies": [\ - ["@fullcalendar/luxon3", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.9"],\ - ["@fullcalendar/core", "npm:6.1.9"],\ + ["@fullcalendar/luxon3", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.11"],\ + ["@fullcalendar/core", "npm:6.1.11"],\ ["@types/fullcalendar__core", null],\ ["@types/luxon", null],\ - ["luxon", "npm:3.4.3"]\ + ["luxon", "npm:3.4.4"]\ ],\ "packagePeers": [\ "@fullcalendar/core",\ @@ -707,19 +712,19 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@fullcalendar/timegrid", [\ - ["npm:6.1.9", {\ - "packageLocation": "./.yarn/cache/@fullcalendar-timegrid-npm-6.1.9-b227fefa80-8c12a508f7.zip/node_modules/@fullcalendar/timegrid/",\ + ["npm:6.1.11", {\ + "packageLocation": "./.yarn/cache/@fullcalendar-timegrid-npm-6.1.11-1d43455bfd-4a11e6dd90.zip/node_modules/@fullcalendar/timegrid/",\ "packageDependencies": [\ - ["@fullcalendar/timegrid", "npm:6.1.9"]\ + ["@fullcalendar/timegrid", "npm:6.1.11"]\ ],\ "linkType": "SOFT"\ }],\ - ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.9", {\ - "packageLocation": "./.yarn/__virtual__/@fullcalendar-timegrid-virtual-6658ed7986/0/cache/@fullcalendar-timegrid-npm-6.1.9-b227fefa80-8c12a508f7.zip/node_modules/@fullcalendar/timegrid/",\ + ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.11", {\ + "packageLocation": "./.yarn/__virtual__/@fullcalendar-timegrid-virtual-5e951d78a6/0/cache/@fullcalendar-timegrid-npm-6.1.11-1d43455bfd-4a11e6dd90.zip/node_modules/@fullcalendar/timegrid/",\ "packageDependencies": [\ - ["@fullcalendar/timegrid", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.9"],\ - ["@fullcalendar/core", "npm:6.1.9"],\ - ["@fullcalendar/daygrid", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.9"],\ + ["@fullcalendar/timegrid", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.11"],\ + ["@fullcalendar/core", "npm:6.1.11"],\ + ["@fullcalendar/daygrid", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.11"],\ ["@types/fullcalendar__core", null]\ ],\ "packagePeers": [\ @@ -730,21 +735,21 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@fullcalendar/vue3", [\ - ["npm:6.1.9", {\ - "packageLocation": "./.yarn/cache/@fullcalendar-vue3-npm-6.1.9-3c150e259d-2c1c0fbe72.zip/node_modules/@fullcalendar/vue3/",\ + ["npm:6.1.11", {\ + "packageLocation": "./.yarn/cache/@fullcalendar-vue3-npm-6.1.11-f6b8b48da4-5891a596e9.zip/node_modules/@fullcalendar/vue3/",\ "packageDependencies": [\ - ["@fullcalendar/vue3", "npm:6.1.9"]\ + ["@fullcalendar/vue3", "npm:6.1.11"]\ ],\ "linkType": "SOFT"\ }],\ - ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.9", {\ - "packageLocation": "./.yarn/__virtual__/@fullcalendar-vue3-virtual-00c58bdbde/0/cache/@fullcalendar-vue3-npm-6.1.9-3c150e259d-2c1c0fbe72.zip/node_modules/@fullcalendar/vue3/",\ + ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.11", {\ + "packageLocation": "./.yarn/__virtual__/@fullcalendar-vue3-virtual-cb317bc2d1/0/cache/@fullcalendar-vue3-npm-6.1.11-f6b8b48da4-5891a596e9.zip/node_modules/@fullcalendar/vue3/",\ "packageDependencies": [\ - ["@fullcalendar/vue3", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.9"],\ - ["@fullcalendar/core", "npm:6.1.9"],\ + ["@fullcalendar/vue3", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.11"],\ + ["@fullcalendar/core", "npm:6.1.11"],\ ["@types/fullcalendar__core", null],\ ["@types/vue", null],\ - ["vue", "npm:3.3.4"]\ + ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.21"]\ ],\ "packagePeers": [\ "@fullcalendar/core",\ @@ -775,11 +780,11 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@humanwhocodes/config-array", [\ - ["npm:0.11.11", {\ - "packageLocation": "./.yarn/cache/@humanwhocodes-config-array-npm-0.11.11-e3582554ee-db84507375.zip/node_modules/@humanwhocodes/config-array/",\ + ["npm:0.11.14", {\ + "packageLocation": "./.yarn/cache/@humanwhocodes-config-array-npm-0.11.14-94a02fcc87-861ccce9ea.zip/node_modules/@humanwhocodes/config-array/",\ "packageDependencies": [\ - ["@humanwhocodes/config-array", "npm:0.11.11"],\ - ["@humanwhocodes/object-schema", "npm:1.2.1"],\ + ["@humanwhocodes/config-array", "npm:0.11.14"],\ + ["@humanwhocodes/object-schema", "npm:2.0.2"],\ ["debug", "virtual:b86a9fb34323a98c6519528ed55faa0d9b44ca8879307c0b29aa384bde47ff59a7d0c9051b31246f14521dfb71ba3c5d6d0b35c29fffc17bf875aa6ad977d9e8#npm:4.3.4"],\ ["minimatch", "npm:3.1.2"]\ ],\ @@ -796,10 +801,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@humanwhocodes/object-schema", [\ - ["npm:1.2.1", {\ - "packageLocation": "./.yarn/cache/@humanwhocodes-object-schema-npm-1.2.1-eb622b5d0e-a824a1ec31.zip/node_modules/@humanwhocodes/object-schema/",\ + ["npm:2.0.2", {\ + "packageLocation": "./.yarn/cache/@humanwhocodes-object-schema-npm-2.0.2-77b42018f9-2fc1150336.zip/node_modules/@humanwhocodes/object-schema/",\ "packageDependencies": [\ - ["@humanwhocodes/object-schema", "npm:1.2.1"]\ + ["@humanwhocodes/object-schema", "npm:2.0.2"]\ ],\ "linkType": "HARD"\ }]\ @@ -882,6 +887,22 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "linkType": "HARD"\ }]\ ]],\ + ["@kurkle/color", [\ + ["npm:0.3.1", {\ + "packageLocation": "./.yarn/cache/@kurkle-color-npm-0.3.1-174f3d038c-e6be5c081b.zip/node_modules/@kurkle/color/",\ + "packageDependencies": [\ + ["@kurkle/color", "npm:0.3.1"]\ + ],\ + "linkType": "HARD"\ + }],\ + ["npm:0.3.4", {\ + "packageLocation": "./.yarn/cache/@kurkle-color-npm-0.3.4-fbd637031f-b95c6abe02.zip/node_modules/@kurkle/color/",\ + "packageDependencies": [\ + ["@kurkle/color", "npm:0.3.4"]\ + ],\ + "linkType": "HARD"\ + }]\ + ]],\ ["@lezer/common", [\ ["npm:0.15.12", {\ "packageLocation": "./.yarn/cache/@lezer-common-npm-0.15.12-62017272b0-dae6581618.zip/node_modules/@lezer/common/",\ @@ -1159,25 +1180,25 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@parcel/bundler-default", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-bundler-default-npm-2.10.0-bf1aa01515-58d3619928.zip/node_modules/@parcel/bundler-default/",\ - "packageDependencies": [\ - ["@parcel/bundler-default", "npm:2.10.0"],\ - ["@parcel/diagnostic", "npm:2.10.0"],\ - ["@parcel/graph", "npm:3.0.0"],\ - ["@parcel/plugin", "npm:2.10.0"],\ - ["@parcel/rust", "npm:2.10.0"],\ - ["@parcel/utils", "npm:2.10.0"],\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-bundler-default-npm-2.12.0-9ba57d919c-f211a76f55.zip/node_modules/@parcel/bundler-default/",\ + "packageDependencies": [\ + ["@parcel/bundler-default", "npm:2.12.0"],\ + ["@parcel/diagnostic", "npm:2.12.0"],\ + ["@parcel/graph", "npm:3.2.0"],\ + ["@parcel/plugin", "npm:2.12.0"],\ + ["@parcel/rust", "npm:2.12.0"],\ + ["@parcel/utils", "npm:2.12.0"],\ ["nullthrows", "npm:1.1.1"]\ ],\ "linkType": "HARD"\ }]\ ]],\ ["@parcel/cache", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-cache-npm-2.10.0-37f1f83d32-209d474abd.zip/node_modules/@parcel/cache/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-cache-npm-2.12.0-3389909f2c-a45e799809.zip/node_modules/@parcel/cache/",\ "packageDependencies": [\ - ["@parcel/cache", "npm:2.10.0"]\ + ["@parcel/cache", "npm:2.12.0"]\ ],\ "linkType": "SOFT"\ }],\ @@ -1188,30 +1209,14 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ],\ "linkType": "SOFT"\ }],\ - ["virtual:270e786ba124f493b75e4cd9a08f7491010f97327e0fcf0c93872db7e85ab335c548e71e39c548e3ecd0ddd319719697b172c5c43cd0b75c1948a8e82873b962#npm:2.10.0", {\ - "packageLocation": "./.yarn/__virtual__/@parcel-cache-virtual-ef6118146d/0/cache/@parcel-cache-npm-2.10.0-37f1f83d32-209d474abd.zip/node_modules/@parcel/cache/",\ - "packageDependencies": [\ - ["@parcel/cache", "virtual:270e786ba124f493b75e4cd9a08f7491010f97327e0fcf0c93872db7e85ab335c548e71e39c548e3ecd0ddd319719697b172c5c43cd0b75c1948a8e82873b962#npm:2.10.0"],\ - ["@parcel/core", "npm:2.6.2"],\ - ["@parcel/fs", "virtual:270e786ba124f493b75e4cd9a08f7491010f97327e0fcf0c93872db7e85ab335c548e71e39c548e3ecd0ddd319719697b172c5c43cd0b75c1948a8e82873b962#npm:2.10.0"],\ - ["@parcel/logger", "npm:2.10.0"],\ - ["@parcel/utils", "npm:2.10.0"],\ - ["@types/parcel__core", null],\ - ["lmdb", "npm:2.8.5"]\ - ],\ - "packagePeers": [\ - "@types/parcel__core"\ - ],\ - "linkType": "HARD"\ - }],\ - ["virtual:59eaeeba7a5d21408bb7b40531b36a88648baa29ed841afea77b484ce4124f400b3aed2f6c7b6598bebbcce34fe625391a4c262c0e17b5a4f9e1ebbf693fa69b#npm:2.10.0", {\ - "packageLocation": "./.yarn/__virtual__/@parcel-cache-virtual-e54076624a/0/cache/@parcel-cache-npm-2.10.0-37f1f83d32-209d474abd.zip/node_modules/@parcel/cache/",\ - "packageDependencies": [\ - ["@parcel/cache", "virtual:59eaeeba7a5d21408bb7b40531b36a88648baa29ed841afea77b484ce4124f400b3aed2f6c7b6598bebbcce34fe625391a4c262c0e17b5a4f9e1ebbf693fa69b#npm:2.10.0"],\ - ["@parcel/core", "npm:2.10.0"],\ - ["@parcel/fs", "virtual:59eaeeba7a5d21408bb7b40531b36a88648baa29ed841afea77b484ce4124f400b3aed2f6c7b6598bebbcce34fe625391a4c262c0e17b5a4f9e1ebbf693fa69b#npm:2.10.0"],\ - ["@parcel/logger", "npm:2.10.0"],\ - ["@parcel/utils", "npm:2.10.0"],\ + ["virtual:8f08b883d4cc438aa2ec719eb5cec278f9ea627197c55f35530bcaf9cd4e4738e04be8abe946bd2702b3f5c94b812f529f1b87c05c7d6de04e1ade9b3f3e00f6#npm:2.12.0", {\ + "packageLocation": "./.yarn/__virtual__/@parcel-cache-virtual-a2e9499dbb/0/cache/@parcel-cache-npm-2.12.0-3389909f2c-a45e799809.zip/node_modules/@parcel/cache/",\ + "packageDependencies": [\ + ["@parcel/cache", "virtual:8f08b883d4cc438aa2ec719eb5cec278f9ea627197c55f35530bcaf9cd4e4738e04be8abe946bd2702b3f5c94b812f529f1b87c05c7d6de04e1ade9b3f3e00f6#npm:2.12.0"],\ + ["@parcel/core", "npm:2.12.0"],\ + ["@parcel/fs", "virtual:8f08b883d4cc438aa2ec719eb5cec278f9ea627197c55f35530bcaf9cd4e4738e04be8abe946bd2702b3f5c94b812f529f1b87c05c7d6de04e1ade9b3f3e00f6#npm:2.12.0"],\ + ["@parcel/logger", "npm:2.12.0"],\ + ["@parcel/utils", "npm:2.12.0"],\ ["@types/parcel__core", null],\ ["lmdb", "npm:2.8.5"]\ ],\ @@ -1236,13 +1241,29 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "@types/parcel__core"\ ],\ "linkType": "HARD"\ + }],\ + ["virtual:ffe47febbf7847f9b64454e506be514f3cbd8bbd1821ba64e8e762685b5100c3f7867a926c2aa7f5349f2a1370184e7d2f8f70428bcab9b21701f56d9632c378#npm:2.12.0", {\ + "packageLocation": "./.yarn/__virtual__/@parcel-cache-virtual-6f5cc88243/0/cache/@parcel-cache-npm-2.12.0-3389909f2c-a45e799809.zip/node_modules/@parcel/cache/",\ + "packageDependencies": [\ + ["@parcel/cache", "virtual:ffe47febbf7847f9b64454e506be514f3cbd8bbd1821ba64e8e762685b5100c3f7867a926c2aa7f5349f2a1370184e7d2f8f70428bcab9b21701f56d9632c378#npm:2.12.0"],\ + ["@parcel/core", "npm:2.6.2"],\ + ["@parcel/fs", "virtual:ffe47febbf7847f9b64454e506be514f3cbd8bbd1821ba64e8e762685b5100c3f7867a926c2aa7f5349f2a1370184e7d2f8f70428bcab9b21701f56d9632c378#npm:2.12.0"],\ + ["@parcel/logger", "npm:2.12.0"],\ + ["@parcel/utils", "npm:2.12.0"],\ + ["@types/parcel__core", null],\ + ["lmdb", "npm:2.8.5"]\ + ],\ + "packagePeers": [\ + "@types/parcel__core"\ + ],\ + "linkType": "HARD"\ }]\ ]],\ ["@parcel/codeframe", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-codeframe-npm-2.10.0-e8aa1b4ecc-d87b17d3ce.zip/node_modules/@parcel/codeframe/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-codeframe-npm-2.12.0-aa8027940e-265c4d7ebe.zip/node_modules/@parcel/codeframe/",\ "packageDependencies": [\ - ["@parcel/codeframe", "npm:2.10.0"],\ + ["@parcel/codeframe", "npm:2.12.0"],\ ["chalk", "npm:4.1.2"]\ ],\ "linkType": "HARD"\ @@ -1257,59 +1278,59 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@parcel/compressor-raw", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-compressor-raw-npm-2.10.0-961e5d9fe0-043fca0ecb.zip/node_modules/@parcel/compressor-raw/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-compressor-raw-npm-2.12.0-19f313c172-16c56704f3.zip/node_modules/@parcel/compressor-raw/",\ "packageDependencies": [\ - ["@parcel/compressor-raw", "npm:2.10.0"],\ - ["@parcel/plugin", "npm:2.10.0"]\ + ["@parcel/compressor-raw", "npm:2.12.0"],\ + ["@parcel/plugin", "npm:2.12.0"]\ ],\ "linkType": "HARD"\ }]\ ]],\ ["@parcel/config-default", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-config-default-npm-2.10.0-2a1fbdf24b-d780d05021.zip/node_modules/@parcel/config-default/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-config-default-npm-2.12.0-aefd3c699e-72877c5dc4.zip/node_modules/@parcel/config-default/",\ "packageDependencies": [\ - ["@parcel/config-default", "npm:2.10.0"]\ + ["@parcel/config-default", "npm:2.12.0"]\ ],\ "linkType": "SOFT"\ }],\ - ["virtual:71592776e81a3a98123fea990d2adcb9a2eb4cc84ca35ac4be3a6f331fe8d1f764a124c4f9a2dad3afd35076e01667fb0ef9ccd5629fbe405b31f0d1b14a14fd#npm:2.10.0", {\ - "packageLocation": "./.yarn/__virtual__/@parcel-config-default-virtual-61dcbb3314/0/cache/@parcel-config-default-npm-2.10.0-2a1fbdf24b-d780d05021.zip/node_modules/@parcel/config-default/",\ - "packageDependencies": [\ - ["@parcel/config-default", "virtual:71592776e81a3a98123fea990d2adcb9a2eb4cc84ca35ac4be3a6f331fe8d1f764a124c4f9a2dad3afd35076e01667fb0ef9ccd5629fbe405b31f0d1b14a14fd#npm:2.10.0"],\ - ["@parcel/bundler-default", "npm:2.10.0"],\ - ["@parcel/compressor-raw", "npm:2.10.0"],\ - ["@parcel/core", "npm:2.10.0"],\ - ["@parcel/namer-default", "npm:2.10.0"],\ - ["@parcel/optimizer-css", "npm:2.10.0"],\ - ["@parcel/optimizer-htmlnano", "npm:2.10.0"],\ - ["@parcel/optimizer-image", "virtual:61dcbb3314ed0db4613bbccf7f920606ba09571991ce6dbf378d0f819338fde0afdb3e35306b4f7d24155236814007855a39ab7d05afaeadead37df42309ed7e#npm:2.10.0"],\ - ["@parcel/optimizer-svgo", "npm:2.10.0"],\ - ["@parcel/optimizer-swc", "npm:2.10.0"],\ - ["@parcel/packager-css", "npm:2.10.0"],\ - ["@parcel/packager-html", "npm:2.10.0"],\ - ["@parcel/packager-js", "npm:2.10.0"],\ - ["@parcel/packager-raw", "npm:2.10.0"],\ - ["@parcel/packager-svg", "npm:2.10.0"],\ - ["@parcel/packager-wasm", "npm:2.10.0"],\ - ["@parcel/reporter-dev-server", "npm:2.10.0"],\ - ["@parcel/resolver-default", "npm:2.10.0"],\ - ["@parcel/runtime-browser-hmr", "npm:2.10.0"],\ - ["@parcel/runtime-js", "npm:2.10.0"],\ - ["@parcel/runtime-react-refresh", "npm:2.10.0"],\ - ["@parcel/runtime-service-worker", "npm:2.10.0"],\ - ["@parcel/transformer-babel", "npm:2.10.0"],\ - ["@parcel/transformer-css", "npm:2.10.0"],\ - ["@parcel/transformer-html", "npm:2.10.0"],\ - ["@parcel/transformer-image", "virtual:61dcbb3314ed0db4613bbccf7f920606ba09571991ce6dbf378d0f819338fde0afdb3e35306b4f7d24155236814007855a39ab7d05afaeadead37df42309ed7e#npm:2.10.0"],\ - ["@parcel/transformer-js", "virtual:61dcbb3314ed0db4613bbccf7f920606ba09571991ce6dbf378d0f819338fde0afdb3e35306b4f7d24155236814007855a39ab7d05afaeadead37df42309ed7e#npm:2.10.0"],\ - ["@parcel/transformer-json", "npm:2.10.0"],\ - ["@parcel/transformer-postcss", "npm:2.10.0"],\ - ["@parcel/transformer-posthtml", "npm:2.10.0"],\ - ["@parcel/transformer-raw", "npm:2.10.0"],\ - ["@parcel/transformer-react-refresh-wrap", "npm:2.10.0"],\ - ["@parcel/transformer-svg", "npm:2.10.0"],\ + ["virtual:fdd74b573cf769bcde15fb47c39fbe0d73f59838182900fd59d3d43b2214ea01b1d45084fb49d0c192fc3e8a49adea5782afcb7fe14e09c63bedaf09f4939e35#npm:2.12.0", {\ + "packageLocation": "./.yarn/__virtual__/@parcel-config-default-virtual-284acdc258/0/cache/@parcel-config-default-npm-2.12.0-aefd3c699e-72877c5dc4.zip/node_modules/@parcel/config-default/",\ + "packageDependencies": [\ + ["@parcel/config-default", "virtual:fdd74b573cf769bcde15fb47c39fbe0d73f59838182900fd59d3d43b2214ea01b1d45084fb49d0c192fc3e8a49adea5782afcb7fe14e09c63bedaf09f4939e35#npm:2.12.0"],\ + ["@parcel/bundler-default", "npm:2.12.0"],\ + ["@parcel/compressor-raw", "npm:2.12.0"],\ + ["@parcel/core", "npm:2.12.0"],\ + ["@parcel/namer-default", "npm:2.12.0"],\ + ["@parcel/optimizer-css", "npm:2.12.0"],\ + ["@parcel/optimizer-htmlnano", "npm:2.12.0"],\ + ["@parcel/optimizer-image", "virtual:284acdc258f2328e304855ff98dec9e5e8952a2bd7797a2e11c082f6cad2e0d3068e07fb498d46b810d8efae36becee510ac53186a75e438e809dc472f832ab2#npm:2.12.0"],\ + ["@parcel/optimizer-svgo", "npm:2.12.0"],\ + ["@parcel/optimizer-swc", "npm:2.12.0"],\ + ["@parcel/packager-css", "npm:2.12.0"],\ + ["@parcel/packager-html", "npm:2.12.0"],\ + ["@parcel/packager-js", "npm:2.12.0"],\ + ["@parcel/packager-raw", "npm:2.12.0"],\ + ["@parcel/packager-svg", "npm:2.12.0"],\ + ["@parcel/packager-wasm", "npm:2.12.0"],\ + ["@parcel/reporter-dev-server", "npm:2.12.0"],\ + ["@parcel/resolver-default", "npm:2.12.0"],\ + ["@parcel/runtime-browser-hmr", "npm:2.12.0"],\ + ["@parcel/runtime-js", "npm:2.12.0"],\ + ["@parcel/runtime-react-refresh", "npm:2.12.0"],\ + ["@parcel/runtime-service-worker", "npm:2.12.0"],\ + ["@parcel/transformer-babel", "npm:2.12.0"],\ + ["@parcel/transformer-css", "npm:2.12.0"],\ + ["@parcel/transformer-html", "npm:2.12.0"],\ + ["@parcel/transformer-image", "virtual:284acdc258f2328e304855ff98dec9e5e8952a2bd7797a2e11c082f6cad2e0d3068e07fb498d46b810d8efae36becee510ac53186a75e438e809dc472f832ab2#npm:2.12.0"],\ + ["@parcel/transformer-js", "virtual:284acdc258f2328e304855ff98dec9e5e8952a2bd7797a2e11c082f6cad2e0d3068e07fb498d46b810d8efae36becee510ac53186a75e438e809dc472f832ab2#npm:2.12.0"],\ + ["@parcel/transformer-json", "npm:2.12.0"],\ + ["@parcel/transformer-postcss", "npm:2.12.0"],\ + ["@parcel/transformer-posthtml", "npm:2.12.0"],\ + ["@parcel/transformer-raw", "npm:2.12.0"],\ + ["@parcel/transformer-react-refresh-wrap", "npm:2.12.0"],\ + ["@parcel/transformer-svg", "npm:2.12.0"],\ ["@types/parcel__core", null]\ ],\ "packagePeers": [\ @@ -1320,25 +1341,25 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@parcel/core", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-core-npm-2.10.0-59eaeeba7a-c59c2971ea.zip/node_modules/@parcel/core/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-core-npm-2.12.0-8f08b883d4-5bf6746308.zip/node_modules/@parcel/core/",\ "packageDependencies": [\ - ["@parcel/core", "npm:2.10.0"],\ + ["@parcel/core", "npm:2.12.0"],\ ["@mischnic/json-sourcemap", "npm:0.1.0"],\ - ["@parcel/cache", "virtual:59eaeeba7a5d21408bb7b40531b36a88648baa29ed841afea77b484ce4124f400b3aed2f6c7b6598bebbcce34fe625391a4c262c0e17b5a4f9e1ebbf693fa69b#npm:2.10.0"],\ - ["@parcel/diagnostic", "npm:2.10.0"],\ - ["@parcel/events", "npm:2.10.0"],\ - ["@parcel/fs", "virtual:59eaeeba7a5d21408bb7b40531b36a88648baa29ed841afea77b484ce4124f400b3aed2f6c7b6598bebbcce34fe625391a4c262c0e17b5a4f9e1ebbf693fa69b#npm:2.10.0"],\ - ["@parcel/graph", "npm:3.0.0"],\ - ["@parcel/logger", "npm:2.10.0"],\ - ["@parcel/package-manager", "virtual:59eaeeba7a5d21408bb7b40531b36a88648baa29ed841afea77b484ce4124f400b3aed2f6c7b6598bebbcce34fe625391a4c262c0e17b5a4f9e1ebbf693fa69b#npm:2.10.0"],\ - ["@parcel/plugin", "npm:2.10.0"],\ - ["@parcel/profiler", "npm:2.10.0"],\ - ["@parcel/rust", "npm:2.10.0"],\ + ["@parcel/cache", "virtual:8f08b883d4cc438aa2ec719eb5cec278f9ea627197c55f35530bcaf9cd4e4738e04be8abe946bd2702b3f5c94b812f529f1b87c05c7d6de04e1ade9b3f3e00f6#npm:2.12.0"],\ + ["@parcel/diagnostic", "npm:2.12.0"],\ + ["@parcel/events", "npm:2.12.0"],\ + ["@parcel/fs", "virtual:8f08b883d4cc438aa2ec719eb5cec278f9ea627197c55f35530bcaf9cd4e4738e04be8abe946bd2702b3f5c94b812f529f1b87c05c7d6de04e1ade9b3f3e00f6#npm:2.12.0"],\ + ["@parcel/graph", "npm:3.2.0"],\ + ["@parcel/logger", "npm:2.12.0"],\ + ["@parcel/package-manager", "virtual:8f08b883d4cc438aa2ec719eb5cec278f9ea627197c55f35530bcaf9cd4e4738e04be8abe946bd2702b3f5c94b812f529f1b87c05c7d6de04e1ade9b3f3e00f6#npm:2.12.0"],\ + ["@parcel/plugin", "npm:2.12.0"],\ + ["@parcel/profiler", "npm:2.12.0"],\ + ["@parcel/rust", "npm:2.12.0"],\ ["@parcel/source-map", "npm:2.1.1"],\ - ["@parcel/types", "npm:2.10.0"],\ - ["@parcel/utils", "npm:2.10.0"],\ - ["@parcel/workers", "virtual:59eaeeba7a5d21408bb7b40531b36a88648baa29ed841afea77b484ce4124f400b3aed2f6c7b6598bebbcce34fe625391a4c262c0e17b5a4f9e1ebbf693fa69b#npm:2.10.0"],\ + ["@parcel/types", "npm:2.12.0"],\ + ["@parcel/utils", "npm:2.12.0"],\ + ["@parcel/workers", "virtual:8f08b883d4cc438aa2ec719eb5cec278f9ea627197c55f35530bcaf9cd4e4738e04be8abe946bd2702b3f5c94b812f529f1b87c05c7d6de04e1ade9b3f3e00f6#npm:2.12.0"],\ ["abortcontroller-polyfill", "npm:1.7.3"],\ ["base-x", "npm:3.0.9"],\ ["browserslist", "npm:4.20.3"],\ @@ -1346,7 +1367,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ["dotenv", "npm:7.0.0"],\ ["dotenv-expand", "npm:5.1.0"],\ ["json5", "npm:2.2.1"],\ - ["msgpackr", "npm:1.6.0"],\ + ["msgpackr", "npm:1.10.1"],\ ["nullthrows", "npm:1.1.1"],\ ["semver", "npm:7.5.4"]\ ],\ @@ -1385,10 +1406,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@parcel/diagnostic", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-diagnostic-npm-2.10.0-1e389b369e-45c606ca52.zip/node_modules/@parcel/diagnostic/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-diagnostic-npm-2.12.0-6e89ddad28-a4b918c1a0.zip/node_modules/@parcel/diagnostic/",\ "packageDependencies": [\ - ["@parcel/diagnostic", "npm:2.10.0"],\ + ["@parcel/diagnostic", "npm:2.12.0"],\ ["@mischnic/json-sourcemap", "npm:0.1.0"],\ ["nullthrows", "npm:1.1.1"]\ ],\ @@ -1405,10 +1426,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@parcel/events", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-events-npm-2.10.0-da42a4afa6-1d21cd4186.zip/node_modules/@parcel/events/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-events-npm-2.12.0-e6eff18c8c-136a8a2921.zip/node_modules/@parcel/events/",\ "packageDependencies": [\ - ["@parcel/events", "npm:2.10.0"]\ + ["@parcel/events", "npm:2.12.0"]\ ],\ "linkType": "HARD"\ }],\ @@ -1421,10 +1442,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@parcel/fs", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-fs-npm-2.10.0-c959567f0f-10faae481c.zip/node_modules/@parcel/fs/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-fs-npm-2.12.0-3c46842e62-43d454d55d.zip/node_modules/@parcel/fs/",\ "packageDependencies": [\ - ["@parcel/fs", "npm:2.10.0"]\ + ["@parcel/fs", "npm:2.12.0"]\ ],\ "linkType": "SOFT"\ }],\ @@ -1435,33 +1456,16 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ],\ "linkType": "SOFT"\ }],\ - ["virtual:270e786ba124f493b75e4cd9a08f7491010f97327e0fcf0c93872db7e85ab335c548e71e39c548e3ecd0ddd319719697b172c5c43cd0b75c1948a8e82873b962#npm:2.10.0", {\ - "packageLocation": "./.yarn/__virtual__/@parcel-fs-virtual-eac6fac48b/0/cache/@parcel-fs-npm-2.10.0-c959567f0f-10faae481c.zip/node_modules/@parcel/fs/",\ - "packageDependencies": [\ - ["@parcel/fs", "virtual:270e786ba124f493b75e4cd9a08f7491010f97327e0fcf0c93872db7e85ab335c548e71e39c548e3ecd0ddd319719697b172c5c43cd0b75c1948a8e82873b962#npm:2.10.0"],\ - ["@parcel/core", "npm:2.6.2"],\ - ["@parcel/rust", "npm:2.10.0"],\ - ["@parcel/types", "npm:2.10.0"],\ - ["@parcel/utils", "npm:2.10.0"],\ + ["virtual:8f08b883d4cc438aa2ec719eb5cec278f9ea627197c55f35530bcaf9cd4e4738e04be8abe946bd2702b3f5c94b812f529f1b87c05c7d6de04e1ade9b3f3e00f6#npm:2.12.0", {\ + "packageLocation": "./.yarn/__virtual__/@parcel-fs-virtual-762e5c5add/0/cache/@parcel-fs-npm-2.12.0-3c46842e62-43d454d55d.zip/node_modules/@parcel/fs/",\ + "packageDependencies": [\ + ["@parcel/fs", "virtual:8f08b883d4cc438aa2ec719eb5cec278f9ea627197c55f35530bcaf9cd4e4738e04be8abe946bd2702b3f5c94b812f529f1b87c05c7d6de04e1ade9b3f3e00f6#npm:2.12.0"],\ + ["@parcel/core", "npm:2.12.0"],\ + ["@parcel/rust", "npm:2.12.0"],\ + ["@parcel/types", "npm:2.12.0"],\ + ["@parcel/utils", "npm:2.12.0"],\ ["@parcel/watcher", "npm:2.0.7"],\ - ["@parcel/workers", "virtual:270e786ba124f493b75e4cd9a08f7491010f97327e0fcf0c93872db7e85ab335c548e71e39c548e3ecd0ddd319719697b172c5c43cd0b75c1948a8e82873b962#npm:2.10.0"],\ - ["@types/parcel__core", null]\ - ],\ - "packagePeers": [\ - "@types/parcel__core"\ - ],\ - "linkType": "HARD"\ - }],\ - ["virtual:59eaeeba7a5d21408bb7b40531b36a88648baa29ed841afea77b484ce4124f400b3aed2f6c7b6598bebbcce34fe625391a4c262c0e17b5a4f9e1ebbf693fa69b#npm:2.10.0", {\ - "packageLocation": "./.yarn/__virtual__/@parcel-fs-virtual-e5737ffb6b/0/cache/@parcel-fs-npm-2.10.0-c959567f0f-10faae481c.zip/node_modules/@parcel/fs/",\ - "packageDependencies": [\ - ["@parcel/fs", "virtual:59eaeeba7a5d21408bb7b40531b36a88648baa29ed841afea77b484ce4124f400b3aed2f6c7b6598bebbcce34fe625391a4c262c0e17b5a4f9e1ebbf693fa69b#npm:2.10.0"],\ - ["@parcel/core", "npm:2.10.0"],\ - ["@parcel/rust", "npm:2.10.0"],\ - ["@parcel/types", "npm:2.10.0"],\ - ["@parcel/utils", "npm:2.10.0"],\ - ["@parcel/watcher", "npm:2.0.7"],\ - ["@parcel/workers", "virtual:59eaeeba7a5d21408bb7b40531b36a88648baa29ed841afea77b484ce4124f400b3aed2f6c7b6598bebbcce34fe625391a4c262c0e17b5a4f9e1ebbf693fa69b#npm:2.10.0"],\ + ["@parcel/workers", "virtual:8f08b883d4cc438aa2ec719eb5cec278f9ea627197c55f35530bcaf9cd4e4738e04be8abe946bd2702b3f5c94b812f529f1b87c05c7d6de04e1ade9b3f3e00f6#npm:2.12.0"],\ ["@types/parcel__core", null]\ ],\ "packagePeers": [\ @@ -1486,6 +1490,23 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "@types/parcel__core"\ ],\ "linkType": "HARD"\ + }],\ + ["virtual:ffe47febbf7847f9b64454e506be514f3cbd8bbd1821ba64e8e762685b5100c3f7867a926c2aa7f5349f2a1370184e7d2f8f70428bcab9b21701f56d9632c378#npm:2.12.0", {\ + "packageLocation": "./.yarn/__virtual__/@parcel-fs-virtual-ae7dde1116/0/cache/@parcel-fs-npm-2.12.0-3c46842e62-43d454d55d.zip/node_modules/@parcel/fs/",\ + "packageDependencies": [\ + ["@parcel/fs", "virtual:ffe47febbf7847f9b64454e506be514f3cbd8bbd1821ba64e8e762685b5100c3f7867a926c2aa7f5349f2a1370184e7d2f8f70428bcab9b21701f56d9632c378#npm:2.12.0"],\ + ["@parcel/core", "npm:2.6.2"],\ + ["@parcel/rust", "npm:2.12.0"],\ + ["@parcel/types", "npm:2.12.0"],\ + ["@parcel/utils", "npm:2.12.0"],\ + ["@parcel/watcher", "npm:2.0.7"],\ + ["@parcel/workers", "virtual:ffe47febbf7847f9b64454e506be514f3cbd8bbd1821ba64e8e762685b5100c3f7867a926c2aa7f5349f2a1370184e7d2f8f70428bcab9b21701f56d9632c378#npm:2.12.0"],\ + ["@types/parcel__core", null]\ + ],\ + "packagePeers": [\ + "@types/parcel__core"\ + ],\ + "linkType": "HARD"\ }]\ ]],\ ["@parcel/fs-search", [\ @@ -1508,10 +1529,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ],\ "linkType": "HARD"\ }],\ - ["npm:3.0.0", {\ - "packageLocation": "./.yarn/cache/@parcel-graph-npm-3.0.0-9001abfefc-0a9d5017f6.zip/node_modules/@parcel/graph/",\ + ["npm:3.2.0", {\ + "packageLocation": "./.yarn/cache/@parcel-graph-npm-3.2.0-92821d4289-b4d31624fc.zip/node_modules/@parcel/graph/",\ "packageDependencies": [\ - ["@parcel/graph", "npm:3.0.0"],\ + ["@parcel/graph", "npm:3.2.0"],\ ["nullthrows", "npm:1.1.1"]\ ],\ "linkType": "HARD"\ @@ -1529,12 +1550,12 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@parcel/logger", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-logger-npm-2.10.0-41ac90e34c-52d0b5331d.zip/node_modules/@parcel/logger/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-logger-npm-2.12.0-7d2f85a906-be3fe9d9ea.zip/node_modules/@parcel/logger/",\ "packageDependencies": [\ - ["@parcel/logger", "npm:2.10.0"],\ - ["@parcel/diagnostic", "npm:2.10.0"],\ - ["@parcel/events", "npm:2.10.0"]\ + ["@parcel/logger", "npm:2.12.0"],\ + ["@parcel/diagnostic", "npm:2.12.0"],\ + ["@parcel/events", "npm:2.12.0"]\ ],\ "linkType": "HARD"\ }],\ @@ -1549,10 +1570,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@parcel/markdown-ansi", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-markdown-ansi-npm-2.10.0-4dd4da44f3-35e2d07ec8.zip/node_modules/@parcel/markdown-ansi/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-markdown-ansi-npm-2.12.0-6b0fe453df-850ee665d9.zip/node_modules/@parcel/markdown-ansi/",\ "packageDependencies": [\ - ["@parcel/markdown-ansi", "npm:2.10.0"],\ + ["@parcel/markdown-ansi", "npm:2.12.0"],\ ["chalk", "npm:4.1.2"]\ ],\ "linkType": "HARD"\ @@ -1567,27 +1588,27 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@parcel/namer-default", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-namer-default-npm-2.10.0-4b82db40fd-f2a32096d1.zip/node_modules/@parcel/namer-default/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-namer-default-npm-2.12.0-28980cfd47-dc92ec0945.zip/node_modules/@parcel/namer-default/",\ "packageDependencies": [\ - ["@parcel/namer-default", "npm:2.10.0"],\ - ["@parcel/diagnostic", "npm:2.10.0"],\ - ["@parcel/plugin", "npm:2.10.0"],\ + ["@parcel/namer-default", "npm:2.12.0"],\ + ["@parcel/diagnostic", "npm:2.12.0"],\ + ["@parcel/plugin", "npm:2.12.0"],\ ["nullthrows", "npm:1.1.1"]\ ],\ "linkType": "HARD"\ }]\ ]],\ ["@parcel/node-resolver-core", [\ - ["npm:3.1.0", {\ - "packageLocation": "./.yarn/cache/@parcel-node-resolver-core-npm-3.1.0-9c9ff3ab8b-dcdd39bc6a.zip/node_modules/@parcel/node-resolver-core/",\ + ["npm:3.3.0", {\ + "packageLocation": "./.yarn/cache/@parcel-node-resolver-core-npm-3.3.0-53804df663-acc3721678.zip/node_modules/@parcel/node-resolver-core/",\ "packageDependencies": [\ - ["@parcel/node-resolver-core", "npm:3.1.0"],\ + ["@parcel/node-resolver-core", "npm:3.3.0"],\ ["@mischnic/json-sourcemap", "npm:0.1.0"],\ - ["@parcel/diagnostic", "npm:2.10.0"],\ - ["@parcel/fs", "virtual:270e786ba124f493b75e4cd9a08f7491010f97327e0fcf0c93872db7e85ab335c548e71e39c548e3ecd0ddd319719697b172c5c43cd0b75c1948a8e82873b962#npm:2.10.0"],\ - ["@parcel/rust", "npm:2.10.0"],\ - ["@parcel/utils", "npm:2.10.0"],\ + ["@parcel/diagnostic", "npm:2.12.0"],\ + ["@parcel/fs", "virtual:ffe47febbf7847f9b64454e506be514f3cbd8bbd1821ba64e8e762685b5100c3f7867a926c2aa7f5349f2a1370184e7d2f8f70428bcab9b21701f56d9632c378#npm:2.12.0"],\ + ["@parcel/rust", "npm:2.12.0"],\ + ["@parcel/utils", "npm:2.12.0"],\ ["nullthrows", "npm:1.1.1"],\ ["semver", "npm:7.5.4"]\ ],\ @@ -1595,14 +1616,14 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@parcel/optimizer-css", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-optimizer-css-npm-2.10.0-dbd5825b4e-ea15989512.zip/node_modules/@parcel/optimizer-css/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-optimizer-css-npm-2.12.0-f95bd4d060-abcdf58c29.zip/node_modules/@parcel/optimizer-css/",\ "packageDependencies": [\ - ["@parcel/optimizer-css", "npm:2.10.0"],\ - ["@parcel/diagnostic", "npm:2.10.0"],\ - ["@parcel/plugin", "npm:2.10.0"],\ + ["@parcel/optimizer-css", "npm:2.12.0"],\ + ["@parcel/diagnostic", "npm:2.12.0"],\ + ["@parcel/plugin", "npm:2.12.0"],\ ["@parcel/source-map", "npm:2.1.1"],\ - ["@parcel/utils", "npm:2.10.0"],\ + ["@parcel/utils", "npm:2.12.0"],\ ["browserslist", "npm:4.20.3"],\ ["lightningcss", "npm:1.17.1"],\ ["nullthrows", "npm:1.1.1"]\ @@ -1611,12 +1632,12 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@parcel/optimizer-data-url", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-optimizer-data-url-npm-2.10.0-700cb5aab6-ec9530be83.zip/node_modules/@parcel/optimizer-data-url/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-optimizer-data-url-npm-2.12.0-dad3731170-0397293961.zip/node_modules/@parcel/optimizer-data-url/",\ "packageDependencies": [\ - ["@parcel/optimizer-data-url", "npm:2.10.0"],\ - ["@parcel/plugin", "npm:2.10.0"],\ - ["@parcel/utils", "npm:2.10.0"],\ + ["@parcel/optimizer-data-url", "npm:2.12.0"],\ + ["@parcel/plugin", "npm:2.12.0"],\ + ["@parcel/utils", "npm:2.12.0"],\ ["isbinaryfile", "npm:4.0.10"],\ ["mime", "npm:2.6.0"]\ ],\ @@ -1624,12 +1645,12 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@parcel/optimizer-htmlnano", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-optimizer-htmlnano-npm-2.10.0-ee0243765c-1f6de13022.zip/node_modules/@parcel/optimizer-htmlnano/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-optimizer-htmlnano-npm-2.12.0-cdd2835c12-64e571f56f.zip/node_modules/@parcel/optimizer-htmlnano/",\ "packageDependencies": [\ - ["@parcel/optimizer-htmlnano", "npm:2.10.0"],\ - ["@parcel/plugin", "npm:2.10.0"],\ - ["htmlnano", "virtual:ee0243765cbdf501388f259b4f1148af5bb4df5c2fa392d4cf1f1d61d3475a9c15a5729ae4be6dd2e258041e618368d112e36aa7b208b01a51861aaaf92fa944#npm:2.0.2"],\ + ["@parcel/optimizer-htmlnano", "npm:2.12.0"],\ + ["@parcel/plugin", "npm:2.12.0"],\ + ["htmlnano", "virtual:cdd2835c1202e86fad55b2266578ff3755267672440481af37bdfff670fd205f561469a10385c20d1ff403af7fad49006bc71ffff21d12592a8ebd0c8be79c0c#npm:2.0.2"],\ ["nullthrows", "npm:1.1.1"],\ ["posthtml", "npm:0.16.6"],\ ["svgo", "npm:2.8.0"]\ @@ -1638,23 +1659,23 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@parcel/optimizer-image", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-optimizer-image-npm-2.10.0-a581b60cbd-94d5db2837.zip/node_modules/@parcel/optimizer-image/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-optimizer-image-npm-2.12.0-4cbc56f72d-7d28379bf1.zip/node_modules/@parcel/optimizer-image/",\ "packageDependencies": [\ - ["@parcel/optimizer-image", "npm:2.10.0"]\ + ["@parcel/optimizer-image", "npm:2.12.0"]\ ],\ "linkType": "SOFT"\ }],\ - ["virtual:61dcbb3314ed0db4613bbccf7f920606ba09571991ce6dbf378d0f819338fde0afdb3e35306b4f7d24155236814007855a39ab7d05afaeadead37df42309ed7e#npm:2.10.0", {\ - "packageLocation": "./.yarn/__virtual__/@parcel-optimizer-image-virtual-9b7a1cafe7/0/cache/@parcel-optimizer-image-npm-2.10.0-a581b60cbd-94d5db2837.zip/node_modules/@parcel/optimizer-image/",\ - "packageDependencies": [\ - ["@parcel/optimizer-image", "virtual:61dcbb3314ed0db4613bbccf7f920606ba09571991ce6dbf378d0f819338fde0afdb3e35306b4f7d24155236814007855a39ab7d05afaeadead37df42309ed7e#npm:2.10.0"],\ - ["@parcel/core", "npm:2.10.0"],\ - ["@parcel/diagnostic", "npm:2.10.0"],\ - ["@parcel/plugin", "npm:2.10.0"],\ - ["@parcel/rust", "npm:2.10.0"],\ - ["@parcel/utils", "npm:2.10.0"],\ - ["@parcel/workers", "virtual:59eaeeba7a5d21408bb7b40531b36a88648baa29ed841afea77b484ce4124f400b3aed2f6c7b6598bebbcce34fe625391a4c262c0e17b5a4f9e1ebbf693fa69b#npm:2.10.0"],\ + ["virtual:284acdc258f2328e304855ff98dec9e5e8952a2bd7797a2e11c082f6cad2e0d3068e07fb498d46b810d8efae36becee510ac53186a75e438e809dc472f832ab2#npm:2.12.0", {\ + "packageLocation": "./.yarn/__virtual__/@parcel-optimizer-image-virtual-8c3b1760b5/0/cache/@parcel-optimizer-image-npm-2.12.0-4cbc56f72d-7d28379bf1.zip/node_modules/@parcel/optimizer-image/",\ + "packageDependencies": [\ + ["@parcel/optimizer-image", "virtual:284acdc258f2328e304855ff98dec9e5e8952a2bd7797a2e11c082f6cad2e0d3068e07fb498d46b810d8efae36becee510ac53186a75e438e809dc472f832ab2#npm:2.12.0"],\ + ["@parcel/core", "npm:2.12.0"],\ + ["@parcel/diagnostic", "npm:2.12.0"],\ + ["@parcel/plugin", "npm:2.12.0"],\ + ["@parcel/rust", "npm:2.12.0"],\ + ["@parcel/utils", "npm:2.12.0"],\ + ["@parcel/workers", "virtual:8f08b883d4cc438aa2ec719eb5cec278f9ea627197c55f35530bcaf9cd4e4738e04be8abe946bd2702b3f5c94b812f529f1b87c05c7d6de04e1ade9b3f3e00f6#npm:2.12.0"],\ ["@types/parcel__core", null]\ ],\ "packagePeers": [\ @@ -1665,38 +1686,38 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@parcel/optimizer-svgo", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-optimizer-svgo-npm-2.10.0-154d938969-7201c63222.zip/node_modules/@parcel/optimizer-svgo/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-optimizer-svgo-npm-2.12.0-08c0f1b17f-d3a4d2de9f.zip/node_modules/@parcel/optimizer-svgo/",\ "packageDependencies": [\ - ["@parcel/optimizer-svgo", "npm:2.10.0"],\ - ["@parcel/diagnostic", "npm:2.10.0"],\ - ["@parcel/plugin", "npm:2.10.0"],\ - ["@parcel/utils", "npm:2.10.0"],\ + ["@parcel/optimizer-svgo", "npm:2.12.0"],\ + ["@parcel/diagnostic", "npm:2.12.0"],\ + ["@parcel/plugin", "npm:2.12.0"],\ + ["@parcel/utils", "npm:2.12.0"],\ ["svgo", "npm:2.8.0"]\ ],\ "linkType": "HARD"\ }]\ ]],\ ["@parcel/optimizer-swc", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-optimizer-swc-npm-2.10.0-caf3bb9c02-1fe68ee6ff.zip/node_modules/@parcel/optimizer-swc/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-optimizer-swc-npm-2.12.0-fb535e4283-0b7fdf3df1.zip/node_modules/@parcel/optimizer-swc/",\ "packageDependencies": [\ - ["@parcel/optimizer-swc", "npm:2.10.0"],\ - ["@parcel/diagnostic", "npm:2.10.0"],\ - ["@parcel/plugin", "npm:2.10.0"],\ + ["@parcel/optimizer-swc", "npm:2.12.0"],\ + ["@parcel/diagnostic", "npm:2.12.0"],\ + ["@parcel/plugin", "npm:2.12.0"],\ ["@parcel/source-map", "npm:2.1.1"],\ - ["@parcel/utils", "npm:2.10.0"],\ - ["@swc/core", "virtual:caf3bb9c02ae9f768ff8cb8f830dcff0d7f38e60f1817c3f155faf0af46cd208a17e673fb908c23a477f907e553fbf9eef21af5f078ed79b4c34aca3fefc5224#npm:1.3.62"],\ + ["@parcel/utils", "npm:2.12.0"],\ + ["@swc/core", "virtual:5f8211ac5fe0096c8679c8fc747f0917af84ce168460ce1b592cb42613ababf55139691f5b329cd10e1e2b99af39861401c7b9633ed396447c506b02a80144b0#npm:1.3.62"],\ ["nullthrows", "npm:1.1.1"]\ ],\ "linkType": "HARD"\ }]\ ]],\ ["@parcel/package-manager", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-package-manager-npm-2.10.0-4f4a39adee-7c4a95d9df.zip/node_modules/@parcel/package-manager/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-package-manager-npm-2.12.0-fc90aacf70-a517e9efe1.zip/node_modules/@parcel/package-manager/",\ "packageDependencies": [\ - ["@parcel/package-manager", "npm:2.10.0"]\ + ["@parcel/package-manager", "npm:2.12.0"]\ ],\ "linkType": "SOFT"\ }],\ @@ -1707,38 +1728,19 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ],\ "linkType": "SOFT"\ }],\ - ["virtual:270e786ba124f493b75e4cd9a08f7491010f97327e0fcf0c93872db7e85ab335c548e71e39c548e3ecd0ddd319719697b172c5c43cd0b75c1948a8e82873b962#npm:2.10.0", {\ - "packageLocation": "./.yarn/__virtual__/@parcel-package-manager-virtual-2dbcea4ef2/0/cache/@parcel-package-manager-npm-2.10.0-4f4a39adee-7c4a95d9df.zip/node_modules/@parcel/package-manager/",\ - "packageDependencies": [\ - ["@parcel/package-manager", "virtual:270e786ba124f493b75e4cd9a08f7491010f97327e0fcf0c93872db7e85ab335c548e71e39c548e3ecd0ddd319719697b172c5c43cd0b75c1948a8e82873b962#npm:2.10.0"],\ - ["@parcel/core", "npm:2.6.2"],\ - ["@parcel/diagnostic", "npm:2.10.0"],\ - ["@parcel/fs", "virtual:270e786ba124f493b75e4cd9a08f7491010f97327e0fcf0c93872db7e85ab335c548e71e39c548e3ecd0ddd319719697b172c5c43cd0b75c1948a8e82873b962#npm:2.10.0"],\ - ["@parcel/logger", "npm:2.10.0"],\ - ["@parcel/node-resolver-core", "npm:3.1.0"],\ - ["@parcel/types", "npm:2.10.0"],\ - ["@parcel/utils", "npm:2.10.0"],\ - ["@parcel/workers", "virtual:270e786ba124f493b75e4cd9a08f7491010f97327e0fcf0c93872db7e85ab335c548e71e39c548e3ecd0ddd319719697b172c5c43cd0b75c1948a8e82873b962#npm:2.10.0"],\ - ["@types/parcel__core", null],\ - ["semver", "npm:7.5.4"]\ - ],\ - "packagePeers": [\ - "@types/parcel__core"\ - ],\ - "linkType": "HARD"\ - }],\ - ["virtual:59eaeeba7a5d21408bb7b40531b36a88648baa29ed841afea77b484ce4124f400b3aed2f6c7b6598bebbcce34fe625391a4c262c0e17b5a4f9e1ebbf693fa69b#npm:2.10.0", {\ - "packageLocation": "./.yarn/__virtual__/@parcel-package-manager-virtual-07edc7e62f/0/cache/@parcel-package-manager-npm-2.10.0-4f4a39adee-7c4a95d9df.zip/node_modules/@parcel/package-manager/",\ - "packageDependencies": [\ - ["@parcel/package-manager", "virtual:59eaeeba7a5d21408bb7b40531b36a88648baa29ed841afea77b484ce4124f400b3aed2f6c7b6598bebbcce34fe625391a4c262c0e17b5a4f9e1ebbf693fa69b#npm:2.10.0"],\ - ["@parcel/core", "npm:2.10.0"],\ - ["@parcel/diagnostic", "npm:2.10.0"],\ - ["@parcel/fs", "virtual:59eaeeba7a5d21408bb7b40531b36a88648baa29ed841afea77b484ce4124f400b3aed2f6c7b6598bebbcce34fe625391a4c262c0e17b5a4f9e1ebbf693fa69b#npm:2.10.0"],\ - ["@parcel/logger", "npm:2.10.0"],\ - ["@parcel/node-resolver-core", "npm:3.1.0"],\ - ["@parcel/types", "npm:2.10.0"],\ - ["@parcel/utils", "npm:2.10.0"],\ - ["@parcel/workers", "virtual:59eaeeba7a5d21408bb7b40531b36a88648baa29ed841afea77b484ce4124f400b3aed2f6c7b6598bebbcce34fe625391a4c262c0e17b5a4f9e1ebbf693fa69b#npm:2.10.0"],\ + ["virtual:8f08b883d4cc438aa2ec719eb5cec278f9ea627197c55f35530bcaf9cd4e4738e04be8abe946bd2702b3f5c94b812f529f1b87c05c7d6de04e1ade9b3f3e00f6#npm:2.12.0", {\ + "packageLocation": "./.yarn/__virtual__/@parcel-package-manager-virtual-8612c9adea/0/cache/@parcel-package-manager-npm-2.12.0-fc90aacf70-a517e9efe1.zip/node_modules/@parcel/package-manager/",\ + "packageDependencies": [\ + ["@parcel/package-manager", "virtual:8f08b883d4cc438aa2ec719eb5cec278f9ea627197c55f35530bcaf9cd4e4738e04be8abe946bd2702b3f5c94b812f529f1b87c05c7d6de04e1ade9b3f3e00f6#npm:2.12.0"],\ + ["@parcel/core", "npm:2.12.0"],\ + ["@parcel/diagnostic", "npm:2.12.0"],\ + ["@parcel/fs", "virtual:8f08b883d4cc438aa2ec719eb5cec278f9ea627197c55f35530bcaf9cd4e4738e04be8abe946bd2702b3f5c94b812f529f1b87c05c7d6de04e1ade9b3f3e00f6#npm:2.12.0"],\ + ["@parcel/logger", "npm:2.12.0"],\ + ["@parcel/node-resolver-core", "npm:3.3.0"],\ + ["@parcel/types", "npm:2.12.0"],\ + ["@parcel/utils", "npm:2.12.0"],\ + ["@parcel/workers", "virtual:8f08b883d4cc438aa2ec719eb5cec278f9ea627197c55f35530bcaf9cd4e4738e04be8abe946bd2702b3f5c94b812f529f1b87c05c7d6de04e1ade9b3f3e00f6#npm:2.12.0"],\ + ["@swc/core", "virtual:5f8211ac5fe0096c8679c8fc747f0917af84ce168460ce1b592cb42613ababf55139691f5b329cd10e1e2b99af39861401c7b9633ed396447c506b02a80144b0#npm:1.3.62"],\ ["@types/parcel__core", null],\ ["semver", "npm:7.5.4"]\ ],\ @@ -1766,30 +1768,52 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "@types/parcel__core"\ ],\ "linkType": "HARD"\ + }],\ + ["virtual:ffe47febbf7847f9b64454e506be514f3cbd8bbd1821ba64e8e762685b5100c3f7867a926c2aa7f5349f2a1370184e7d2f8f70428bcab9b21701f56d9632c378#npm:2.12.0", {\ + "packageLocation": "./.yarn/__virtual__/@parcel-package-manager-virtual-5f8211ac5f/0/cache/@parcel-package-manager-npm-2.12.0-fc90aacf70-a517e9efe1.zip/node_modules/@parcel/package-manager/",\ + "packageDependencies": [\ + ["@parcel/package-manager", "virtual:ffe47febbf7847f9b64454e506be514f3cbd8bbd1821ba64e8e762685b5100c3f7867a926c2aa7f5349f2a1370184e7d2f8f70428bcab9b21701f56d9632c378#npm:2.12.0"],\ + ["@parcel/core", "npm:2.6.2"],\ + ["@parcel/diagnostic", "npm:2.12.0"],\ + ["@parcel/fs", "virtual:ffe47febbf7847f9b64454e506be514f3cbd8bbd1821ba64e8e762685b5100c3f7867a926c2aa7f5349f2a1370184e7d2f8f70428bcab9b21701f56d9632c378#npm:2.12.0"],\ + ["@parcel/logger", "npm:2.12.0"],\ + ["@parcel/node-resolver-core", "npm:3.3.0"],\ + ["@parcel/types", "npm:2.12.0"],\ + ["@parcel/utils", "npm:2.12.0"],\ + ["@parcel/workers", "virtual:ffe47febbf7847f9b64454e506be514f3cbd8bbd1821ba64e8e762685b5100c3f7867a926c2aa7f5349f2a1370184e7d2f8f70428bcab9b21701f56d9632c378#npm:2.12.0"],\ + ["@swc/core", "virtual:5f8211ac5fe0096c8679c8fc747f0917af84ce168460ce1b592cb42613ababf55139691f5b329cd10e1e2b99af39861401c7b9633ed396447c506b02a80144b0#npm:1.3.62"],\ + ["@types/parcel__core", null],\ + ["semver", "npm:7.5.4"]\ + ],\ + "packagePeers": [\ + "@types/parcel__core"\ + ],\ + "linkType": "HARD"\ }]\ ]],\ ["@parcel/packager-css", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-packager-css-npm-2.10.0-cb31a968a8-11bf4cae4c.zip/node_modules/@parcel/packager-css/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-packager-css-npm-2.12.0-b1c27a8323-684aaa1d85.zip/node_modules/@parcel/packager-css/",\ "packageDependencies": [\ - ["@parcel/packager-css", "npm:2.10.0"],\ - ["@parcel/diagnostic", "npm:2.10.0"],\ - ["@parcel/plugin", "npm:2.10.0"],\ + ["@parcel/packager-css", "npm:2.12.0"],\ + ["@parcel/diagnostic", "npm:2.12.0"],\ + ["@parcel/plugin", "npm:2.12.0"],\ ["@parcel/source-map", "npm:2.1.1"],\ - ["@parcel/utils", "npm:2.10.0"],\ + ["@parcel/utils", "npm:2.12.0"],\ + ["lightningcss", "npm:1.17.1"],\ ["nullthrows", "npm:1.1.1"]\ ],\ "linkType": "HARD"\ }]\ ]],\ ["@parcel/packager-html", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-packager-html-npm-2.10.0-d6f71e7e36-8dfd86e7d6.zip/node_modules/@parcel/packager-html/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-packager-html-npm-2.12.0-ad361b1265-ee558ad616.zip/node_modules/@parcel/packager-html/",\ "packageDependencies": [\ - ["@parcel/packager-html", "npm:2.10.0"],\ - ["@parcel/plugin", "npm:2.10.0"],\ - ["@parcel/types", "npm:2.10.0"],\ - ["@parcel/utils", "npm:2.10.0"],\ + ["@parcel/packager-html", "npm:2.12.0"],\ + ["@parcel/plugin", "npm:2.12.0"],\ + ["@parcel/types", "npm:2.12.0"],\ + ["@parcel/utils", "npm:2.12.0"],\ ["nullthrows", "npm:1.1.1"],\ ["posthtml", "npm:0.16.6"]\ ],\ @@ -1797,16 +1821,16 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@parcel/packager-js", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-packager-js-npm-2.10.0-f84ec4cc7b-9b62598864.zip/node_modules/@parcel/packager-js/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-packager-js-npm-2.12.0-093e3200cd-2189b7ff15.zip/node_modules/@parcel/packager-js/",\ "packageDependencies": [\ - ["@parcel/packager-js", "npm:2.10.0"],\ - ["@parcel/diagnostic", "npm:2.10.0"],\ - ["@parcel/plugin", "npm:2.10.0"],\ - ["@parcel/rust", "npm:2.10.0"],\ + ["@parcel/packager-js", "npm:2.12.0"],\ + ["@parcel/diagnostic", "npm:2.12.0"],\ + ["@parcel/plugin", "npm:2.12.0"],\ + ["@parcel/rust", "npm:2.12.0"],\ ["@parcel/source-map", "npm:2.1.1"],\ - ["@parcel/types", "npm:2.10.0"],\ - ["@parcel/utils", "npm:2.10.0"],\ + ["@parcel/types", "npm:2.12.0"],\ + ["@parcel/utils", "npm:2.12.0"],\ ["globals", "npm:13.15.0"],\ ["nullthrows", "npm:1.1.1"]\ ],\ @@ -1814,44 +1838,44 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@parcel/packager-raw", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-packager-raw-npm-2.10.0-01ef1b8e3e-492fe07ae5.zip/node_modules/@parcel/packager-raw/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-packager-raw-npm-2.12.0-b7f15635f8-39ce2fc7ae.zip/node_modules/@parcel/packager-raw/",\ "packageDependencies": [\ - ["@parcel/packager-raw", "npm:2.10.0"],\ - ["@parcel/plugin", "npm:2.10.0"]\ + ["@parcel/packager-raw", "npm:2.12.0"],\ + ["@parcel/plugin", "npm:2.12.0"]\ ],\ "linkType": "HARD"\ }]\ ]],\ ["@parcel/packager-svg", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-packager-svg-npm-2.10.0-22326715bd-f49d7f3b88.zip/node_modules/@parcel/packager-svg/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-packager-svg-npm-2.12.0-fa921ce522-436ac9ea39.zip/node_modules/@parcel/packager-svg/",\ "packageDependencies": [\ - ["@parcel/packager-svg", "npm:2.10.0"],\ - ["@parcel/plugin", "npm:2.10.0"],\ - ["@parcel/types", "npm:2.10.0"],\ - ["@parcel/utils", "npm:2.10.0"],\ + ["@parcel/packager-svg", "npm:2.12.0"],\ + ["@parcel/plugin", "npm:2.12.0"],\ + ["@parcel/types", "npm:2.12.0"],\ + ["@parcel/utils", "npm:2.12.0"],\ ["posthtml", "npm:0.16.6"]\ ],\ "linkType": "HARD"\ }]\ ]],\ ["@parcel/packager-wasm", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-packager-wasm-npm-2.10.0-b1d2cd8f88-d9a13eb838.zip/node_modules/@parcel/packager-wasm/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-packager-wasm-npm-2.12.0-ec551a9e29-a10e1cd988.zip/node_modules/@parcel/packager-wasm/",\ "packageDependencies": [\ - ["@parcel/packager-wasm", "npm:2.10.0"],\ - ["@parcel/plugin", "npm:2.10.0"]\ + ["@parcel/packager-wasm", "npm:2.12.0"],\ + ["@parcel/plugin", "npm:2.12.0"]\ ],\ "linkType": "HARD"\ }]\ ]],\ ["@parcel/plugin", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-plugin-npm-2.10.0-efbc58a209-e13ba6e7e5.zip/node_modules/@parcel/plugin/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-plugin-npm-2.12.0-947dec85d3-0b52f1dd06.zip/node_modules/@parcel/plugin/",\ "packageDependencies": [\ - ["@parcel/plugin", "npm:2.10.0"],\ - ["@parcel/types", "npm:2.10.0"]\ + ["@parcel/plugin", "npm:2.12.0"],\ + ["@parcel/types", "npm:2.12.0"]\ ],\ "linkType": "HARD"\ }],\ @@ -1865,25 +1889,25 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@parcel/profiler", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-profiler-npm-2.10.0-b1ba499bc1-78d545edb7.zip/node_modules/@parcel/profiler/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-profiler-npm-2.12.0-69720a23ab-b683b74e10.zip/node_modules/@parcel/profiler/",\ "packageDependencies": [\ - ["@parcel/profiler", "npm:2.10.0"],\ - ["@parcel/diagnostic", "npm:2.10.0"],\ - ["@parcel/events", "npm:2.10.0"],\ + ["@parcel/profiler", "npm:2.12.0"],\ + ["@parcel/diagnostic", "npm:2.12.0"],\ + ["@parcel/events", "npm:2.12.0"],\ ["chrome-trace-event", "npm:1.0.3"]\ ],\ "linkType": "HARD"\ }]\ ]],\ ["@parcel/reporter-cli", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-reporter-cli-npm-2.10.0-083fc2f2d6-0137a91e45.zip/node_modules/@parcel/reporter-cli/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-reporter-cli-npm-2.12.0-b3e4c5fe19-8cc524fa15.zip/node_modules/@parcel/reporter-cli/",\ "packageDependencies": [\ - ["@parcel/reporter-cli", "npm:2.10.0"],\ - ["@parcel/plugin", "npm:2.10.0"],\ - ["@parcel/types", "npm:2.10.0"],\ - ["@parcel/utils", "npm:2.10.0"],\ + ["@parcel/reporter-cli", "npm:2.12.0"],\ + ["@parcel/plugin", "npm:2.12.0"],\ + ["@parcel/types", "npm:2.12.0"],\ + ["@parcel/utils", "npm:2.12.0"],\ ["chalk", "npm:4.1.2"],\ ["term-size", "npm:2.2.1"]\ ],\ @@ -1891,23 +1915,23 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@parcel/reporter-dev-server", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-reporter-dev-server-npm-2.10.0-2f19cb846e-e72fd6ec09.zip/node_modules/@parcel/reporter-dev-server/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-reporter-dev-server-npm-2.12.0-aed1d2c68c-43957b4656.zip/node_modules/@parcel/reporter-dev-server/",\ "packageDependencies": [\ - ["@parcel/reporter-dev-server", "npm:2.10.0"],\ - ["@parcel/plugin", "npm:2.10.0"],\ - ["@parcel/utils", "npm:2.10.0"]\ + ["@parcel/reporter-dev-server", "npm:2.12.0"],\ + ["@parcel/plugin", "npm:2.12.0"],\ + ["@parcel/utils", "npm:2.12.0"]\ ],\ "linkType": "HARD"\ }]\ ]],\ ["@parcel/reporter-tracer", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-reporter-tracer-npm-2.10.0-184a89e262-0f8249b998.zip/node_modules/@parcel/reporter-tracer/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-reporter-tracer-npm-2.12.0-5cec9ab2d5-24cddacd19.zip/node_modules/@parcel/reporter-tracer/",\ "packageDependencies": [\ - ["@parcel/reporter-tracer", "npm:2.10.0"],\ - ["@parcel/plugin", "npm:2.10.0"],\ - ["@parcel/utils", "npm:2.10.0"],\ + ["@parcel/reporter-tracer", "npm:2.12.0"],\ + ["@parcel/plugin", "npm:2.12.0"],\ + ["@parcel/utils", "npm:2.12.0"],\ ["chrome-trace-event", "npm:1.0.3"],\ ["nullthrows", "npm:1.1.1"]\ ],\ @@ -1915,47 +1939,47 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@parcel/resolver-default", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-resolver-default-npm-2.10.0-ca49f01a75-c82e2d3c4b.zip/node_modules/@parcel/resolver-default/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-resolver-default-npm-2.12.0-8da790891c-f3652eea09.zip/node_modules/@parcel/resolver-default/",\ "packageDependencies": [\ - ["@parcel/resolver-default", "npm:2.10.0"],\ - ["@parcel/node-resolver-core", "npm:3.1.0"],\ - ["@parcel/plugin", "npm:2.10.0"]\ + ["@parcel/resolver-default", "npm:2.12.0"],\ + ["@parcel/node-resolver-core", "npm:3.3.0"],\ + ["@parcel/plugin", "npm:2.12.0"]\ ],\ "linkType": "HARD"\ }]\ ]],\ ["@parcel/runtime-browser-hmr", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-runtime-browser-hmr-npm-2.10.0-c6b7773a09-12928462c8.zip/node_modules/@parcel/runtime-browser-hmr/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-runtime-browser-hmr-npm-2.12.0-6f0da66673-bbba57ecee.zip/node_modules/@parcel/runtime-browser-hmr/",\ "packageDependencies": [\ - ["@parcel/runtime-browser-hmr", "npm:2.10.0"],\ - ["@parcel/plugin", "npm:2.10.0"],\ - ["@parcel/utils", "npm:2.10.0"]\ + ["@parcel/runtime-browser-hmr", "npm:2.12.0"],\ + ["@parcel/plugin", "npm:2.12.0"],\ + ["@parcel/utils", "npm:2.12.0"]\ ],\ "linkType": "HARD"\ }]\ ]],\ ["@parcel/runtime-js", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-runtime-js-npm-2.10.0-6b4cf1576c-3bbd64c5b9.zip/node_modules/@parcel/runtime-js/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-runtime-js-npm-2.12.0-e21acc0f42-6afa3e7eb2.zip/node_modules/@parcel/runtime-js/",\ "packageDependencies": [\ - ["@parcel/runtime-js", "npm:2.10.0"],\ - ["@parcel/diagnostic", "npm:2.10.0"],\ - ["@parcel/plugin", "npm:2.10.0"],\ - ["@parcel/utils", "npm:2.10.0"],\ + ["@parcel/runtime-js", "npm:2.12.0"],\ + ["@parcel/diagnostic", "npm:2.12.0"],\ + ["@parcel/plugin", "npm:2.12.0"],\ + ["@parcel/utils", "npm:2.12.0"],\ ["nullthrows", "npm:1.1.1"]\ ],\ "linkType": "HARD"\ }]\ ]],\ ["@parcel/runtime-react-refresh", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-runtime-react-refresh-npm-2.10.0-b1f6c62bdf-dc567474a1.zip/node_modules/@parcel/runtime-react-refresh/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-runtime-react-refresh-npm-2.12.0-2b09615691-41aee9a874.zip/node_modules/@parcel/runtime-react-refresh/",\ "packageDependencies": [\ - ["@parcel/runtime-react-refresh", "npm:2.10.0"],\ - ["@parcel/plugin", "npm:2.10.0"],\ - ["@parcel/utils", "npm:2.10.0"],\ + ["@parcel/runtime-react-refresh", "npm:2.12.0"],\ + ["@parcel/plugin", "npm:2.12.0"],\ + ["@parcel/utils", "npm:2.12.0"],\ ["react-error-overlay", "npm:6.0.9"],\ ["react-refresh", "npm:0.9.0"]\ ],\ @@ -1963,22 +1987,22 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@parcel/runtime-service-worker", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-runtime-service-worker-npm-2.10.0-3ca99a5366-d0bfd113b9.zip/node_modules/@parcel/runtime-service-worker/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-runtime-service-worker-npm-2.12.0-7d227ff0bf-c71246428e.zip/node_modules/@parcel/runtime-service-worker/",\ "packageDependencies": [\ - ["@parcel/runtime-service-worker", "npm:2.10.0"],\ - ["@parcel/plugin", "npm:2.10.0"],\ - ["@parcel/utils", "npm:2.10.0"],\ + ["@parcel/runtime-service-worker", "npm:2.12.0"],\ + ["@parcel/plugin", "npm:2.12.0"],\ + ["@parcel/utils", "npm:2.12.0"],\ ["nullthrows", "npm:1.1.1"]\ ],\ "linkType": "HARD"\ }]\ ]],\ ["@parcel/rust", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/unplugged/@parcel-rust-npm-2.10.0-99038406b0/node_modules/@parcel/rust/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/unplugged/@parcel-rust-npm-2.12.0-0cf943f3e5/node_modules/@parcel/rust/",\ "packageDependencies": [\ - ["@parcel/rust", "npm:2.10.0"]\ + ["@parcel/rust", "npm:2.12.0"]\ ],\ "linkType": "HARD"\ }]\ @@ -2002,14 +2026,14 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@parcel/transformer-babel", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-transformer-babel-npm-2.10.0-fb74ad8c73-fd64092c9c.zip/node_modules/@parcel/transformer-babel/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-transformer-babel-npm-2.12.0-953de52432-b8c457c0be.zip/node_modules/@parcel/transformer-babel/",\ "packageDependencies": [\ - ["@parcel/transformer-babel", "npm:2.10.0"],\ - ["@parcel/diagnostic", "npm:2.10.0"],\ - ["@parcel/plugin", "npm:2.10.0"],\ + ["@parcel/transformer-babel", "npm:2.12.0"],\ + ["@parcel/diagnostic", "npm:2.12.0"],\ + ["@parcel/plugin", "npm:2.12.0"],\ ["@parcel/source-map", "npm:2.1.1"],\ - ["@parcel/utils", "npm:2.10.0"],\ + ["@parcel/utils", "npm:2.12.0"],\ ["browserslist", "npm:4.20.3"],\ ["json5", "npm:2.2.1"],\ ["nullthrows", "npm:1.1.1"],\ @@ -2019,14 +2043,14 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@parcel/transformer-css", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-transformer-css-npm-2.10.0-4fc35c8005-acc26e9b3d.zip/node_modules/@parcel/transformer-css/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-transformer-css-npm-2.12.0-24ddc31ae3-3a6f16321d.zip/node_modules/@parcel/transformer-css/",\ "packageDependencies": [\ - ["@parcel/transformer-css", "npm:2.10.0"],\ - ["@parcel/diagnostic", "npm:2.10.0"],\ - ["@parcel/plugin", "npm:2.10.0"],\ + ["@parcel/transformer-css", "npm:2.12.0"],\ + ["@parcel/diagnostic", "npm:2.12.0"],\ + ["@parcel/plugin", "npm:2.12.0"],\ ["@parcel/source-map", "npm:2.1.1"],\ - ["@parcel/utils", "npm:2.10.0"],\ + ["@parcel/utils", "npm:2.12.0"],\ ["browserslist", "npm:4.20.3"],\ ["lightningcss", "npm:1.17.1"],\ ["nullthrows", "npm:1.1.1"]\ @@ -2035,13 +2059,13 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@parcel/transformer-html", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-transformer-html-npm-2.10.0-b6d2228044-f28e0d3606.zip/node_modules/@parcel/transformer-html/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-transformer-html-npm-2.12.0-be2b9ee40c-7fcfac62ca.zip/node_modules/@parcel/transformer-html/",\ "packageDependencies": [\ - ["@parcel/transformer-html", "npm:2.10.0"],\ - ["@parcel/diagnostic", "npm:2.10.0"],\ - ["@parcel/plugin", "npm:2.10.0"],\ - ["@parcel/rust", "npm:2.10.0"],\ + ["@parcel/transformer-html", "npm:2.12.0"],\ + ["@parcel/diagnostic", "npm:2.12.0"],\ + ["@parcel/plugin", "npm:2.12.0"],\ + ["@parcel/rust", "npm:2.12.0"],\ ["nullthrows", "npm:1.1.1"],\ ["posthtml", "npm:0.16.6"],\ ["posthtml-parser", "npm:0.10.2"],\ @@ -2053,21 +2077,21 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@parcel/transformer-image", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-transformer-image-npm-2.10.0-e63bd526ed-61a47d7d8e.zip/node_modules/@parcel/transformer-image/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-transformer-image-npm-2.12.0-53f04e21c0-0a1581eacc.zip/node_modules/@parcel/transformer-image/",\ "packageDependencies": [\ - ["@parcel/transformer-image", "npm:2.10.0"]\ + ["@parcel/transformer-image", "npm:2.12.0"]\ ],\ "linkType": "SOFT"\ }],\ - ["virtual:61dcbb3314ed0db4613bbccf7f920606ba09571991ce6dbf378d0f819338fde0afdb3e35306b4f7d24155236814007855a39ab7d05afaeadead37df42309ed7e#npm:2.10.0", {\ - "packageLocation": "./.yarn/__virtual__/@parcel-transformer-image-virtual-3e73171071/0/cache/@parcel-transformer-image-npm-2.10.0-e63bd526ed-61a47d7d8e.zip/node_modules/@parcel/transformer-image/",\ - "packageDependencies": [\ - ["@parcel/transformer-image", "virtual:61dcbb3314ed0db4613bbccf7f920606ba09571991ce6dbf378d0f819338fde0afdb3e35306b4f7d24155236814007855a39ab7d05afaeadead37df42309ed7e#npm:2.10.0"],\ - ["@parcel/core", "npm:2.10.0"],\ - ["@parcel/plugin", "npm:2.10.0"],\ - ["@parcel/utils", "npm:2.10.0"],\ - ["@parcel/workers", "virtual:59eaeeba7a5d21408bb7b40531b36a88648baa29ed841afea77b484ce4124f400b3aed2f6c7b6598bebbcce34fe625391a4c262c0e17b5a4f9e1ebbf693fa69b#npm:2.10.0"],\ + ["virtual:284acdc258f2328e304855ff98dec9e5e8952a2bd7797a2e11c082f6cad2e0d3068e07fb498d46b810d8efae36becee510ac53186a75e438e809dc472f832ab2#npm:2.12.0", {\ + "packageLocation": "./.yarn/__virtual__/@parcel-transformer-image-virtual-acc9c20c9c/0/cache/@parcel-transformer-image-npm-2.12.0-53f04e21c0-0a1581eacc.zip/node_modules/@parcel/transformer-image/",\ + "packageDependencies": [\ + ["@parcel/transformer-image", "virtual:284acdc258f2328e304855ff98dec9e5e8952a2bd7797a2e11c082f6cad2e0d3068e07fb498d46b810d8efae36becee510ac53186a75e438e809dc472f832ab2#npm:2.12.0"],\ + ["@parcel/core", "npm:2.12.0"],\ + ["@parcel/plugin", "npm:2.12.0"],\ + ["@parcel/utils", "npm:2.12.0"],\ + ["@parcel/workers", "virtual:8f08b883d4cc438aa2ec719eb5cec278f9ea627197c55f35530bcaf9cd4e4738e04be8abe946bd2702b3f5c94b812f529f1b87c05c7d6de04e1ade9b3f3e00f6#npm:2.12.0"],\ ["@types/parcel__core", null],\ ["nullthrows", "npm:1.1.1"]\ ],\ @@ -2079,34 +2103,34 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@parcel/transformer-inline-string", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-transformer-inline-string-npm-2.10.0-44c9b349db-618c919108.zip/node_modules/@parcel/transformer-inline-string/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-transformer-inline-string-npm-2.12.0-a33f10bafa-5f63c08695.zip/node_modules/@parcel/transformer-inline-string/",\ "packageDependencies": [\ - ["@parcel/transformer-inline-string", "npm:2.10.0"],\ - ["@parcel/plugin", "npm:2.10.0"]\ + ["@parcel/transformer-inline-string", "npm:2.12.0"],\ + ["@parcel/plugin", "npm:2.12.0"]\ ],\ "linkType": "HARD"\ }]\ ]],\ ["@parcel/transformer-js", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-transformer-js-npm-2.10.0-132e460926-e9944ce77c.zip/node_modules/@parcel/transformer-js/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-transformer-js-npm-2.12.0-404d54db18-b9fe4c887b.zip/node_modules/@parcel/transformer-js/",\ "packageDependencies": [\ - ["@parcel/transformer-js", "npm:2.10.0"]\ + ["@parcel/transformer-js", "npm:2.12.0"]\ ],\ "linkType": "SOFT"\ }],\ - ["virtual:61dcbb3314ed0db4613bbccf7f920606ba09571991ce6dbf378d0f819338fde0afdb3e35306b4f7d24155236814007855a39ab7d05afaeadead37df42309ed7e#npm:2.10.0", {\ - "packageLocation": "./.yarn/__virtual__/@parcel-transformer-js-virtual-aab7779f34/0/cache/@parcel-transformer-js-npm-2.10.0-132e460926-e9944ce77c.zip/node_modules/@parcel/transformer-js/",\ - "packageDependencies": [\ - ["@parcel/transformer-js", "virtual:61dcbb3314ed0db4613bbccf7f920606ba09571991ce6dbf378d0f819338fde0afdb3e35306b4f7d24155236814007855a39ab7d05afaeadead37df42309ed7e#npm:2.10.0"],\ - ["@parcel/core", "npm:2.10.0"],\ - ["@parcel/diagnostic", "npm:2.10.0"],\ - ["@parcel/plugin", "npm:2.10.0"],\ - ["@parcel/rust", "npm:2.10.0"],\ + ["virtual:284acdc258f2328e304855ff98dec9e5e8952a2bd7797a2e11c082f6cad2e0d3068e07fb498d46b810d8efae36becee510ac53186a75e438e809dc472f832ab2#npm:2.12.0", {\ + "packageLocation": "./.yarn/__virtual__/@parcel-transformer-js-virtual-567f83ac24/0/cache/@parcel-transformer-js-npm-2.12.0-404d54db18-b9fe4c887b.zip/node_modules/@parcel/transformer-js/",\ + "packageDependencies": [\ + ["@parcel/transformer-js", "virtual:284acdc258f2328e304855ff98dec9e5e8952a2bd7797a2e11c082f6cad2e0d3068e07fb498d46b810d8efae36becee510ac53186a75e438e809dc472f832ab2#npm:2.12.0"],\ + ["@parcel/core", "npm:2.12.0"],\ + ["@parcel/diagnostic", "npm:2.12.0"],\ + ["@parcel/plugin", "npm:2.12.0"],\ + ["@parcel/rust", "npm:2.12.0"],\ ["@parcel/source-map", "npm:2.1.1"],\ - ["@parcel/utils", "npm:2.10.0"],\ - ["@parcel/workers", "virtual:59eaeeba7a5d21408bb7b40531b36a88648baa29ed841afea77b484ce4124f400b3aed2f6c7b6598bebbcce34fe625391a4c262c0e17b5a4f9e1ebbf693fa69b#npm:2.10.0"],\ + ["@parcel/utils", "npm:2.12.0"],\ + ["@parcel/workers", "virtual:8f08b883d4cc438aa2ec719eb5cec278f9ea627197c55f35530bcaf9cd4e4738e04be8abe946bd2702b3f5c94b812f529f1b87c05c7d6de04e1ade9b3f3e00f6#npm:2.12.0"],\ ["@swc/helpers", "npm:0.5.1"],\ ["@types/parcel__core", null],\ ["browserslist", "npm:4.20.3"],\ @@ -2122,25 +2146,25 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@parcel/transformer-json", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-transformer-json-npm-2.10.0-5525143f86-9c7aceb8e6.zip/node_modules/@parcel/transformer-json/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-transformer-json-npm-2.12.0-652d8d99d2-a711cb65a8.zip/node_modules/@parcel/transformer-json/",\ "packageDependencies": [\ - ["@parcel/transformer-json", "npm:2.10.0"],\ - ["@parcel/plugin", "npm:2.10.0"],\ + ["@parcel/transformer-json", "npm:2.12.0"],\ + ["@parcel/plugin", "npm:2.12.0"],\ ["json5", "npm:2.2.1"]\ ],\ "linkType": "HARD"\ }]\ ]],\ ["@parcel/transformer-postcss", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-transformer-postcss-npm-2.10.0-c1f60c708a-2e524bd513.zip/node_modules/@parcel/transformer-postcss/",\ - "packageDependencies": [\ - ["@parcel/transformer-postcss", "npm:2.10.0"],\ - ["@parcel/diagnostic", "npm:2.10.0"],\ - ["@parcel/plugin", "npm:2.10.0"],\ - ["@parcel/rust", "npm:2.10.0"],\ - ["@parcel/utils", "npm:2.10.0"],\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-transformer-postcss-npm-2.12.0-f0cfb95fac-b210044a7f.zip/node_modules/@parcel/transformer-postcss/",\ + "packageDependencies": [\ + ["@parcel/transformer-postcss", "npm:2.12.0"],\ + ["@parcel/diagnostic", "npm:2.12.0"],\ + ["@parcel/plugin", "npm:2.12.0"],\ + ["@parcel/rust", "npm:2.12.0"],\ + ["@parcel/utils", "npm:2.12.0"],\ ["clone", "npm:2.1.2"],\ ["nullthrows", "npm:1.1.1"],\ ["postcss-value-parser", "npm:4.2.0"],\ @@ -2150,12 +2174,12 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@parcel/transformer-posthtml", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-transformer-posthtml-npm-2.10.0-31d54ed3f0-7de343f0f9.zip/node_modules/@parcel/transformer-posthtml/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-transformer-posthtml-npm-2.12.0-41c570db12-b62582ae7e.zip/node_modules/@parcel/transformer-posthtml/",\ "packageDependencies": [\ - ["@parcel/transformer-posthtml", "npm:2.10.0"],\ - ["@parcel/plugin", "npm:2.10.0"],\ - ["@parcel/utils", "npm:2.10.0"],\ + ["@parcel/transformer-posthtml", "npm:2.12.0"],\ + ["@parcel/plugin", "npm:2.12.0"],\ + ["@parcel/utils", "npm:2.12.0"],\ ["nullthrows", "npm:1.1.1"],\ ["posthtml", "npm:0.16.6"],\ ["posthtml-parser", "npm:0.10.2"],\ @@ -2166,33 +2190,33 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@parcel/transformer-raw", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-transformer-raw-npm-2.10.0-d7cd50f767-c7b1b9c6f7.zip/node_modules/@parcel/transformer-raw/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-transformer-raw-npm-2.12.0-bd2cb66ddf-de6681e2e7.zip/node_modules/@parcel/transformer-raw/",\ "packageDependencies": [\ - ["@parcel/transformer-raw", "npm:2.10.0"],\ - ["@parcel/plugin", "npm:2.10.0"]\ + ["@parcel/transformer-raw", "npm:2.12.0"],\ + ["@parcel/plugin", "npm:2.12.0"]\ ],\ "linkType": "HARD"\ }]\ ]],\ ["@parcel/transformer-react-refresh-wrap", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-transformer-react-refresh-wrap-npm-2.10.0-4c3ddcc095-fc3163bcb0.zip/node_modules/@parcel/transformer-react-refresh-wrap/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-transformer-react-refresh-wrap-npm-2.12.0-59ed68910f-9aba8c1ab0.zip/node_modules/@parcel/transformer-react-refresh-wrap/",\ "packageDependencies": [\ - ["@parcel/transformer-react-refresh-wrap", "npm:2.10.0"],\ - ["@parcel/plugin", "npm:2.10.0"],\ - ["@parcel/utils", "npm:2.10.0"],\ + ["@parcel/transformer-react-refresh-wrap", "npm:2.12.0"],\ + ["@parcel/plugin", "npm:2.12.0"],\ + ["@parcel/utils", "npm:2.12.0"],\ ["react-refresh", "npm:0.9.0"]\ ],\ "linkType": "HARD"\ }]\ ]],\ ["@parcel/transformer-sass", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-transformer-sass-npm-2.10.0-6c5f188bcc-2d697077ac.zip/node_modules/@parcel/transformer-sass/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-transformer-sass-npm-2.12.0-ef787eef35-ce6b4d329b.zip/node_modules/@parcel/transformer-sass/",\ "packageDependencies": [\ - ["@parcel/transformer-sass", "npm:2.10.0"],\ - ["@parcel/plugin", "npm:2.10.0"],\ + ["@parcel/transformer-sass", "npm:2.12.0"],\ + ["@parcel/plugin", "npm:2.12.0"],\ ["@parcel/source-map", "npm:2.1.1"],\ ["sass", "npm:1.52.1"]\ ],\ @@ -2200,13 +2224,13 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@parcel/transformer-svg", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-transformer-svg-npm-2.10.0-881c72cd1f-d5f55f6eee.zip/node_modules/@parcel/transformer-svg/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-transformer-svg-npm-2.12.0-f41b181676-92b7c65894.zip/node_modules/@parcel/transformer-svg/",\ "packageDependencies": [\ - ["@parcel/transformer-svg", "npm:2.10.0"],\ - ["@parcel/diagnostic", "npm:2.10.0"],\ - ["@parcel/plugin", "npm:2.10.0"],\ - ["@parcel/rust", "npm:2.10.0"],\ + ["@parcel/transformer-svg", "npm:2.12.0"],\ + ["@parcel/diagnostic", "npm:2.12.0"],\ + ["@parcel/plugin", "npm:2.12.0"],\ + ["@parcel/rust", "npm:2.12.0"],\ ["nullthrows", "npm:1.1.1"],\ ["posthtml", "npm:0.16.6"],\ ["posthtml-parser", "npm:0.10.2"],\ @@ -2217,16 +2241,16 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@parcel/types", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-types-npm-2.10.0-270e786ba1-387aa07902.zip/node_modules/@parcel/types/",\ - "packageDependencies": [\ - ["@parcel/types", "npm:2.10.0"],\ - ["@parcel/cache", "virtual:270e786ba124f493b75e4cd9a08f7491010f97327e0fcf0c93872db7e85ab335c548e71e39c548e3ecd0ddd319719697b172c5c43cd0b75c1948a8e82873b962#npm:2.10.0"],\ - ["@parcel/diagnostic", "npm:2.10.0"],\ - ["@parcel/fs", "virtual:270e786ba124f493b75e4cd9a08f7491010f97327e0fcf0c93872db7e85ab335c548e71e39c548e3ecd0ddd319719697b172c5c43cd0b75c1948a8e82873b962#npm:2.10.0"],\ - ["@parcel/package-manager", "virtual:270e786ba124f493b75e4cd9a08f7491010f97327e0fcf0c93872db7e85ab335c548e71e39c548e3ecd0ddd319719697b172c5c43cd0b75c1948a8e82873b962#npm:2.10.0"],\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-types-npm-2.12.0-ffe47febbf-250f95580c.zip/node_modules/@parcel/types/",\ + "packageDependencies": [\ + ["@parcel/types", "npm:2.12.0"],\ + ["@parcel/cache", "virtual:ffe47febbf7847f9b64454e506be514f3cbd8bbd1821ba64e8e762685b5100c3f7867a926c2aa7f5349f2a1370184e7d2f8f70428bcab9b21701f56d9632c378#npm:2.12.0"],\ + ["@parcel/diagnostic", "npm:2.12.0"],\ + ["@parcel/fs", "virtual:ffe47febbf7847f9b64454e506be514f3cbd8bbd1821ba64e8e762685b5100c3f7867a926c2aa7f5349f2a1370184e7d2f8f70428bcab9b21701f56d9632c378#npm:2.12.0"],\ + ["@parcel/package-manager", "virtual:ffe47febbf7847f9b64454e506be514f3cbd8bbd1821ba64e8e762685b5100c3f7867a926c2aa7f5349f2a1370184e7d2f8f70428bcab9b21701f56d9632c378#npm:2.12.0"],\ ["@parcel/source-map", "npm:2.1.1"],\ - ["@parcel/workers", "virtual:270e786ba124f493b75e4cd9a08f7491010f97327e0fcf0c93872db7e85ab335c548e71e39c548e3ecd0ddd319719697b172c5c43cd0b75c1948a8e82873b962#npm:2.10.0"],\ + ["@parcel/workers", "virtual:ffe47febbf7847f9b64454e506be514f3cbd8bbd1821ba64e8e762685b5100c3f7867a926c2aa7f5349f2a1370184e7d2f8f70428bcab9b21701f56d9632c378#npm:2.12.0"],\ ["utility-types", "npm:3.10.0"]\ ],\ "linkType": "HARD"\ @@ -2247,15 +2271,15 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@parcel/utils", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-utils-npm-2.10.0-1f25fbc366-9f4953ff9a.zip/node_modules/@parcel/utils/",\ - "packageDependencies": [\ - ["@parcel/utils", "npm:2.10.0"],\ - ["@parcel/codeframe", "npm:2.10.0"],\ - ["@parcel/diagnostic", "npm:2.10.0"],\ - ["@parcel/logger", "npm:2.10.0"],\ - ["@parcel/markdown-ansi", "npm:2.10.0"],\ - ["@parcel/rust", "npm:2.10.0"],\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-utils-npm-2.12.0-d8a9a48a66-ba80a60fed.zip/node_modules/@parcel/utils/",\ + "packageDependencies": [\ + ["@parcel/utils", "npm:2.12.0"],\ + ["@parcel/codeframe", "npm:2.12.0"],\ + ["@parcel/diagnostic", "npm:2.12.0"],\ + ["@parcel/logger", "npm:2.12.0"],\ + ["@parcel/markdown-ansi", "npm:2.12.0"],\ + ["@parcel/rust", "npm:2.12.0"],\ ["@parcel/source-map", "npm:2.1.1"],\ ["chalk", "npm:4.1.2"],\ ["nullthrows", "npm:1.1.1"]\ @@ -2300,10 +2324,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@parcel/workers", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/@parcel-workers-npm-2.10.0-7f8aa5ad5a-e8b1701b53.zip/node_modules/@parcel/workers/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/@parcel-workers-npm-2.12.0-3ddd4664bc-e19c3c0a66.zip/node_modules/@parcel/workers/",\ "packageDependencies": [\ - ["@parcel/workers", "npm:2.10.0"]\ + ["@parcel/workers", "npm:2.12.0"]\ ],\ "linkType": "SOFT"\ }],\ @@ -2314,34 +2338,16 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ],\ "linkType": "SOFT"\ }],\ - ["virtual:270e786ba124f493b75e4cd9a08f7491010f97327e0fcf0c93872db7e85ab335c548e71e39c548e3ecd0ddd319719697b172c5c43cd0b75c1948a8e82873b962#npm:2.10.0", {\ - "packageLocation": "./.yarn/__virtual__/@parcel-workers-virtual-2fd2d815f8/0/cache/@parcel-workers-npm-2.10.0-7f8aa5ad5a-e8b1701b53.zip/node_modules/@parcel/workers/",\ - "packageDependencies": [\ - ["@parcel/workers", "virtual:270e786ba124f493b75e4cd9a08f7491010f97327e0fcf0c93872db7e85ab335c548e71e39c548e3ecd0ddd319719697b172c5c43cd0b75c1948a8e82873b962#npm:2.10.0"],\ - ["@parcel/core", "npm:2.6.2"],\ - ["@parcel/diagnostic", "npm:2.10.0"],\ - ["@parcel/logger", "npm:2.10.0"],\ - ["@parcel/profiler", "npm:2.10.0"],\ - ["@parcel/types", "npm:2.10.0"],\ - ["@parcel/utils", "npm:2.10.0"],\ - ["@types/parcel__core", null],\ - ["nullthrows", "npm:1.1.1"]\ - ],\ - "packagePeers": [\ - "@types/parcel__core"\ - ],\ - "linkType": "HARD"\ - }],\ - ["virtual:59eaeeba7a5d21408bb7b40531b36a88648baa29ed841afea77b484ce4124f400b3aed2f6c7b6598bebbcce34fe625391a4c262c0e17b5a4f9e1ebbf693fa69b#npm:2.10.0", {\ - "packageLocation": "./.yarn/__virtual__/@parcel-workers-virtual-5aa58b2681/0/cache/@parcel-workers-npm-2.10.0-7f8aa5ad5a-e8b1701b53.zip/node_modules/@parcel/workers/",\ - "packageDependencies": [\ - ["@parcel/workers", "virtual:59eaeeba7a5d21408bb7b40531b36a88648baa29ed841afea77b484ce4124f400b3aed2f6c7b6598bebbcce34fe625391a4c262c0e17b5a4f9e1ebbf693fa69b#npm:2.10.0"],\ - ["@parcel/core", "npm:2.10.0"],\ - ["@parcel/diagnostic", "npm:2.10.0"],\ - ["@parcel/logger", "npm:2.10.0"],\ - ["@parcel/profiler", "npm:2.10.0"],\ - ["@parcel/types", "npm:2.10.0"],\ - ["@parcel/utils", "npm:2.10.0"],\ + ["virtual:8f08b883d4cc438aa2ec719eb5cec278f9ea627197c55f35530bcaf9cd4e4738e04be8abe946bd2702b3f5c94b812f529f1b87c05c7d6de04e1ade9b3f3e00f6#npm:2.12.0", {\ + "packageLocation": "./.yarn/__virtual__/@parcel-workers-virtual-fbd6240557/0/cache/@parcel-workers-npm-2.12.0-3ddd4664bc-e19c3c0a66.zip/node_modules/@parcel/workers/",\ + "packageDependencies": [\ + ["@parcel/workers", "virtual:8f08b883d4cc438aa2ec719eb5cec278f9ea627197c55f35530bcaf9cd4e4738e04be8abe946bd2702b3f5c94b812f529f1b87c05c7d6de04e1ade9b3f3e00f6#npm:2.12.0"],\ + ["@parcel/core", "npm:2.12.0"],\ + ["@parcel/diagnostic", "npm:2.12.0"],\ + ["@parcel/logger", "npm:2.12.0"],\ + ["@parcel/profiler", "npm:2.12.0"],\ + ["@parcel/types", "npm:2.12.0"],\ + ["@parcel/utils", "npm:2.12.0"],\ ["@types/parcel__core", null],\ ["nullthrows", "npm:1.1.1"]\ ],\ @@ -2368,6 +2374,24 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "@types/parcel__core"\ ],\ "linkType": "HARD"\ + }],\ + ["virtual:ffe47febbf7847f9b64454e506be514f3cbd8bbd1821ba64e8e762685b5100c3f7867a926c2aa7f5349f2a1370184e7d2f8f70428bcab9b21701f56d9632c378#npm:2.12.0", {\ + "packageLocation": "./.yarn/__virtual__/@parcel-workers-virtual-0f6ac1cb6e/0/cache/@parcel-workers-npm-2.12.0-3ddd4664bc-e19c3c0a66.zip/node_modules/@parcel/workers/",\ + "packageDependencies": [\ + ["@parcel/workers", "virtual:ffe47febbf7847f9b64454e506be514f3cbd8bbd1821ba64e8e762685b5100c3f7867a926c2aa7f5349f2a1370184e7d2f8f70428bcab9b21701f56d9632c378#npm:2.12.0"],\ + ["@parcel/core", "npm:2.6.2"],\ + ["@parcel/diagnostic", "npm:2.12.0"],\ + ["@parcel/logger", "npm:2.12.0"],\ + ["@parcel/profiler", "npm:2.12.0"],\ + ["@parcel/types", "npm:2.12.0"],\ + ["@parcel/utils", "npm:2.12.0"],\ + ["@types/parcel__core", null],\ + ["nullthrows", "npm:1.1.1"]\ + ],\ + "packagePeers": [\ + "@types/parcel__core"\ + ],\ + "linkType": "HARD"\ }]\ ]],\ ["@pkgjs/parseargs", [\ @@ -2396,17 +2420,17 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@rollup/pluginutils", [\ - ["npm:5.0.5", {\ - "packageLocation": "./.yarn/cache/@rollup-pluginutils-npm-5.0.5-cfa8fafc53-dcd4d6e3cb.zip/node_modules/@rollup/pluginutils/",\ + ["npm:5.1.0", {\ + "packageLocation": "./.yarn/cache/@rollup-pluginutils-npm-5.1.0-6939820ef8-3cc5a6d914.zip/node_modules/@rollup/pluginutils/",\ "packageDependencies": [\ - ["@rollup/pluginutils", "npm:5.0.5"]\ + ["@rollup/pluginutils", "npm:5.1.0"]\ ],\ "linkType": "SOFT"\ }],\ - ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:5.0.5", {\ - "packageLocation": "./.yarn/__virtual__/@rollup-pluginutils-virtual-9173e115a7/0/cache/@rollup-pluginutils-npm-5.0.5-cfa8fafc53-dcd4d6e3cb.zip/node_modules/@rollup/pluginutils/",\ + ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:5.1.0", {\ + "packageLocation": "./.yarn/__virtual__/@rollup-pluginutils-virtual-e968017249/0/cache/@rollup-pluginutils-npm-5.1.0-6939820ef8-3cc5a6d914.zip/node_modules/@rollup/pluginutils/",\ "packageDependencies": [\ - ["@rollup/pluginutils", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:5.0.5"],\ + ["@rollup/pluginutils", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:5.1.0"],\ ["@types/estree", "npm:1.0.0"],\ ["@types/rollup", null],\ ["estree-walker", "npm:2.0.2"],\ @@ -2421,17 +2445,17 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@sidvind/better-ajv-errors", [\ - ["npm:2.0.0", {\ - "packageLocation": "./.yarn/cache/@sidvind-better-ajv-errors-npm-2.0.0-3531bddef9-12b0d87855.zip/node_modules/@sidvind/better-ajv-errors/",\ + ["npm:2.1.3", {\ + "packageLocation": "./.yarn/cache/@sidvind-better-ajv-errors-npm-2.1.3-e3d1c524a8-949cb805a1.zip/node_modules/@sidvind/better-ajv-errors/",\ "packageDependencies": [\ - ["@sidvind/better-ajv-errors", "npm:2.0.0"]\ + ["@sidvind/better-ajv-errors", "npm:2.1.3"]\ ],\ "linkType": "SOFT"\ }],\ - ["virtual:c902a8d1d11dd34ee7f7648077b48b959e6723fe684e99968d7e8c7b195a007900ac0290074641f9d2ed5797dad844cdb897c4c6de1b44897449ec92f70b817f#npm:2.0.0", {\ - "packageLocation": "./.yarn/__virtual__/@sidvind-better-ajv-errors-virtual-b3857f99f1/0/cache/@sidvind-better-ajv-errors-npm-2.0.0-3531bddef9-12b0d87855.zip/node_modules/@sidvind/better-ajv-errors/",\ + ["virtual:640261ed3b7a9880a388cc504caacf8ea790dd52f1cb31fbc3be445cb2adc6e73fc87097de620863105eb917510145ef2457d30000c7361456ab67ec0b895136#npm:2.1.3", {\ + "packageLocation": "./.yarn/__virtual__/@sidvind-better-ajv-errors-virtual-ff98ba00e3/0/cache/@sidvind-better-ajv-errors-npm-2.1.3-e3d1c524a8-949cb805a1.zip/node_modules/@sidvind/better-ajv-errors/",\ "packageDependencies": [\ - ["@sidvind/better-ajv-errors", "virtual:c902a8d1d11dd34ee7f7648077b48b959e6723fe684e99968d7e8c7b195a007900ac0290074641f9d2ed5797dad844cdb897c4c6de1b44897449ec92f70b817f#npm:2.0.0"],\ + ["@sidvind/better-ajv-errors", "virtual:640261ed3b7a9880a388cc504caacf8ea790dd52f1cb31fbc3be445cb2adc6e73fc87097de620863105eb917510145ef2457d30000c7361456ab67ec0b895136#npm:2.1.3"],\ ["@babel/code-frame", "npm:7.16.7"],\ ["@types/ajv", null],\ ["ajv", "npm:8.11.0"],\ @@ -2446,16 +2470,16 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ]],\ ["@swc/core", [\ ["npm:1.3.62", {\ - "packageLocation": "./.yarn/unplugged/@swc-core-virtual-fa54a417e9/node_modules/@swc/core/",\ + "packageLocation": "./.yarn/unplugged/@swc-core-virtual-8fda1c3f9b/node_modules/@swc/core/",\ "packageDependencies": [\ ["@swc/core", "npm:1.3.62"]\ ],\ "linkType": "SOFT"\ }],\ - ["virtual:caf3bb9c02ae9f768ff8cb8f830dcff0d7f38e60f1817c3f155faf0af46cd208a17e673fb908c23a477f907e553fbf9eef21af5f078ed79b4c34aca3fefc5224#npm:1.3.62", {\ - "packageLocation": "./.yarn/unplugged/@swc-core-virtual-fa54a417e9/node_modules/@swc/core/",\ + ["virtual:5f8211ac5fe0096c8679c8fc747f0917af84ce168460ce1b592cb42613ababf55139691f5b329cd10e1e2b99af39861401c7b9633ed396447c506b02a80144b0#npm:1.3.62", {\ + "packageLocation": "./.yarn/unplugged/@swc-core-virtual-8fda1c3f9b/node_modules/@swc/core/",\ "packageDependencies": [\ - ["@swc/core", "virtual:caf3bb9c02ae9f768ff8cb8f830dcff0d7f38e60f1817c3f155faf0af46cd208a17e673fb908c23a477f907e553fbf9eef21af5f078ed79b4c34aca3fefc5224#npm:1.3.62"],\ + ["@swc/core", "virtual:5f8211ac5fe0096c8679c8fc747f0917af84ce168460ce1b592cb42613ababf55139691f5b329cd10e1e2b99af39861401c7b9633ed396447c506b02a80144b0#npm:1.3.62"],\ ["@swc/core-darwin-arm64", "npm:1.3.62"],\ ["@swc/core-darwin-x64", "npm:1.3.62"],\ ["@swc/core-linux-arm-gnueabihf", "npm:1.3.62"],\ @@ -2612,6 +2636,15 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "linkType": "HARD"\ }]\ ]],\ + ["@types/hammerjs", [\ + ["npm:2.0.46", {\ + "packageLocation": "./.yarn/cache/@types-hammerjs-npm-2.0.46-de99d4d9d1-caba6ec788.zip/node_modules/@types/hammerjs/",\ + "packageDependencies": [\ + ["@types/hammerjs", "npm:2.0.46"]\ + ],\ + "linkType": "HARD"\ + }]\ + ]],\ ["@types/istanbul-lib-coverage", [\ ["npm:2.0.4", {\ "packageLocation": "./.yarn/cache/@types-istanbul-lib-coverage-npm-2.0.4-734954bb56-a25d7589ee.zip/node_modules/@types/istanbul-lib-coverage/",\ @@ -2683,22 +2716,31 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "linkType": "HARD"\ }]\ ]],\ + ["@ungap/structured-clone", [\ + ["npm:1.2.0", {\ + "packageLocation": "./.yarn/cache/@ungap-structured-clone-npm-1.2.0-648f0b82e0-4f656b7b46.zip/node_modules/@ungap/structured-clone/",\ + "packageDependencies": [\ + ["@ungap/structured-clone", "npm:1.2.0"]\ + ],\ + "linkType": "HARD"\ + }]\ + ]],\ ["@vitejs/plugin-vue", [\ - ["npm:4.4.0", {\ - "packageLocation": "./.yarn/cache/@vitejs-plugin-vue-npm-4.4.0-c33d65c6f6-37b6987951.zip/node_modules/@vitejs/plugin-vue/",\ + ["npm:4.6.2", {\ + "packageLocation": "./.yarn/cache/@vitejs-plugin-vue-npm-4.6.2-d7ace53203-01bc4ed643.zip/node_modules/@vitejs/plugin-vue/",\ "packageDependencies": [\ - ["@vitejs/plugin-vue", "npm:4.4.0"]\ + ["@vitejs/plugin-vue", "npm:4.6.2"]\ ],\ "linkType": "SOFT"\ }],\ - ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:4.4.0", {\ - "packageLocation": "./.yarn/__virtual__/@vitejs-plugin-vue-virtual-6c0c604c2e/0/cache/@vitejs-plugin-vue-npm-4.4.0-c33d65c6f6-37b6987951.zip/node_modules/@vitejs/plugin-vue/",\ + ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:4.6.2", {\ + "packageLocation": "./.yarn/__virtual__/@vitejs-plugin-vue-virtual-090b584a9c/0/cache/@vitejs-plugin-vue-npm-4.6.2-d7ace53203-01bc4ed643.zip/node_modules/@vitejs/plugin-vue/",\ "packageDependencies": [\ - ["@vitejs/plugin-vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:4.4.0"],\ + ["@vitejs/plugin-vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:4.6.2"],\ ["@types/vite", null],\ ["@types/vue", null],\ - ["vite", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:4.4.11"],\ - ["vue", "npm:3.3.4"]\ + ["vite", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:4.5.3"],\ + ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.21"]\ ],\ "packagePeers": [\ "@types/vite",\ @@ -2709,13 +2751,56 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "linkType": "HARD"\ }]\ ]],\ + ["@volar/language-core", [\ + ["npm:2.1.4", {\ + "packageLocation": "./.yarn/cache/@volar-language-core-npm-2.1.4-18ee1a037d-7430f65143.zip/node_modules/@volar/language-core/",\ + "packageDependencies": [\ + ["@volar/language-core", "npm:2.1.4"],\ + ["@volar/source-map", "npm:2.1.4"]\ + ],\ + "linkType": "HARD"\ + }]\ + ]],\ + ["@volar/language-service", [\ + ["npm:2.1.4", {\ + "packageLocation": "./.yarn/cache/@volar-language-service-npm-2.1.4-2d34cb628f-06cdcfacf0.zip/node_modules/@volar/language-service/",\ + "packageDependencies": [\ + ["@volar/language-service", "npm:2.1.4"],\ + ["@volar/language-core", "npm:2.1.4"],\ + ["vscode-languageserver-protocol", "npm:3.17.5"],\ + ["vscode-languageserver-textdocument", "npm:1.0.11"],\ + ["vscode-uri", "npm:3.0.8"]\ + ],\ + "linkType": "HARD"\ + }]\ + ]],\ + ["@volar/source-map", [\ + ["npm:2.1.4", {\ + "packageLocation": "./.yarn/cache/@volar-source-map-npm-2.1.4-5963b1701f-e2f65bcfd6.zip/node_modules/@volar/source-map/",\ + "packageDependencies": [\ + ["@volar/source-map", "npm:2.1.4"],\ + ["muggle-string", "npm:0.4.1"]\ + ],\ + "linkType": "HARD"\ + }]\ + ]],\ + ["@vscode/l10n", [\ + ["npm:0.0.18", {\ + "packageLocation": "./.yarn/cache/@vscode-l10n-npm-0.0.18-8a12efe4b5-c33876cebd.zip/node_modules/@vscode/l10n/",\ + "packageDependencies": [\ + ["@vscode/l10n", "npm:0.0.18"]\ + ],\ + "linkType": "HARD"\ + }]\ + ]],\ ["@vue/compiler-core", [\ - ["npm:3.3.4", {\ - "packageLocation": "./.yarn/cache/@vue-compiler-core-npm-3.3.4-e514bded25-5437942ea6.zip/node_modules/@vue/compiler-core/",\ + ["npm:3.4.21", {\ + "packageLocation": "./.yarn/cache/@vue-compiler-core-npm-3.4.21-ec7f24d7f5-0d6b7732bc.zip/node_modules/@vue/compiler-core/",\ "packageDependencies": [\ - ["@vue/compiler-core", "npm:3.3.4"],\ - ["@babel/parser", "npm:7.22.4"],\ - ["@vue/shared", "npm:3.3.4"],\ + ["@vue/compiler-core", "npm:3.4.21"],\ + ["@babel/parser", "npm:7.23.9"],\ + ["@vue/shared", "npm:3.4.21"],\ + ["entities", "npm:4.5.0"],\ ["estree-walker", "npm:2.0.2"],\ ["source-map-js", "npm:1.0.2"]\ ],\ @@ -2723,42 +2808,41 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@vue/compiler-dom", [\ - ["npm:3.3.4", {\ - "packageLocation": "./.yarn/cache/@vue-compiler-dom-npm-3.3.4-029250af79-1c2ac0c89d.zip/node_modules/@vue/compiler-dom/",\ + ["npm:3.4.21", {\ + "packageLocation": "./.yarn/cache/@vue-compiler-dom-npm-3.4.21-3d49f99020-f53e4f4e0a.zip/node_modules/@vue/compiler-dom/",\ "packageDependencies": [\ - ["@vue/compiler-dom", "npm:3.3.4"],\ - ["@vue/compiler-core", "npm:3.3.4"],\ - ["@vue/shared", "npm:3.3.4"]\ + ["@vue/compiler-dom", "npm:3.4.21"],\ + ["@vue/compiler-core", "npm:3.4.21"],\ + ["@vue/shared", "npm:3.4.21"]\ ],\ "linkType": "HARD"\ }]\ ]],\ ["@vue/compiler-sfc", [\ - ["npm:3.3.4", {\ - "packageLocation": "./.yarn/cache/@vue-compiler-sfc-npm-3.3.4-783aff746b-0a0adfdd3e.zip/node_modules/@vue/compiler-sfc/",\ - "packageDependencies": [\ - ["@vue/compiler-sfc", "npm:3.3.4"],\ - ["@babel/parser", "npm:7.22.4"],\ - ["@vue/compiler-core", "npm:3.3.4"],\ - ["@vue/compiler-dom", "npm:3.3.4"],\ - ["@vue/compiler-ssr", "npm:3.3.4"],\ - ["@vue/reactivity-transform", "npm:3.3.4"],\ - ["@vue/shared", "npm:3.3.4"],\ + ["npm:3.4.21", {\ + "packageLocation": "./.yarn/cache/@vue-compiler-sfc-npm-3.4.21-c2b76ee1ff-226dc404be.zip/node_modules/@vue/compiler-sfc/",\ + "packageDependencies": [\ + ["@vue/compiler-sfc", "npm:3.4.21"],\ + ["@babel/parser", "npm:7.23.9"],\ + ["@vue/compiler-core", "npm:3.4.21"],\ + ["@vue/compiler-dom", "npm:3.4.21"],\ + ["@vue/compiler-ssr", "npm:3.4.21"],\ + ["@vue/shared", "npm:3.4.21"],\ ["estree-walker", "npm:2.0.2"],\ - ["magic-string", "npm:0.30.0"],\ - ["postcss", "npm:8.4.12"],\ + ["magic-string", "npm:0.30.7"],\ + ["postcss", "npm:8.4.35"],\ ["source-map-js", "npm:1.0.2"]\ ],\ "linkType": "HARD"\ }]\ ]],\ ["@vue/compiler-ssr", [\ - ["npm:3.3.4", {\ - "packageLocation": "./.yarn/cache/@vue-compiler-ssr-npm-3.3.4-9c5036c29f-5d1875d55e.zip/node_modules/@vue/compiler-ssr/",\ + ["npm:3.4.21", {\ + "packageLocation": "./.yarn/cache/@vue-compiler-ssr-npm-3.4.21-e6f043341e-c510bee68b.zip/node_modules/@vue/compiler-ssr/",\ "packageDependencies": [\ - ["@vue/compiler-ssr", "npm:3.3.4"],\ - ["@vue/compiler-dom", "npm:3.3.4"],\ - ["@vue/shared", "npm:3.3.4"]\ + ["@vue/compiler-ssr", "npm:3.4.21"],\ + ["@vue/compiler-dom", "npm:3.4.21"],\ + ["@vue/shared", "npm:3.4.21"]\ ],\ "linkType": "HARD"\ }]\ @@ -2770,71 +2854,75 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ["@vue/devtools-api", "npm:6.5.0"]\ ],\ "linkType": "HARD"\ + }],\ + ["npm:6.6.1", {\ + "packageLocation": "./.yarn/cache/@vue-devtools-api-npm-6.6.1-ef3c82703e-cf12b5ebcc.zip/node_modules/@vue/devtools-api/",\ + "packageDependencies": [\ + ["@vue/devtools-api", "npm:6.6.1"]\ + ],\ + "linkType": "HARD"\ }]\ ]],\ - ["@vue/reactivity", [\ - ["npm:3.3.4", {\ - "packageLocation": "./.yarn/cache/@vue-reactivity-npm-3.3.4-4bb841d3a9-81c3d0c587.zip/node_modules/@vue/reactivity/",\ + ["@vue/language-plugin-pug", [\ + ["npm:2.0.7", {\ + "packageLocation": "./.yarn/cache/@vue-language-plugin-pug-npm-2.0.7-547300c7e0-11cc96eb5f.zip/node_modules/@vue/language-plugin-pug/",\ "packageDependencies": [\ - ["@vue/reactivity", "npm:3.3.4"],\ - ["@vue/shared", "npm:3.3.4"]\ + ["@vue/language-plugin-pug", "npm:2.0.7"],\ + ["@volar/source-map", "npm:2.1.4"],\ + ["volar-service-pug", "npm:0.0.34"]\ ],\ "linkType": "HARD"\ }]\ ]],\ - ["@vue/reactivity-transform", [\ - ["npm:3.3.4", {\ - "packageLocation": "./.yarn/cache/@vue-reactivity-transform-npm-3.3.4-bfbf394bf7-b425e78b20.zip/node_modules/@vue/reactivity-transform/",\ + ["@vue/reactivity", [\ + ["npm:3.4.21", {\ + "packageLocation": "./.yarn/cache/@vue-reactivity-npm-3.4.21-fd3e254d08-79c7ebe3ec.zip/node_modules/@vue/reactivity/",\ "packageDependencies": [\ - ["@vue/reactivity-transform", "npm:3.3.4"],\ - ["@babel/parser", "npm:7.22.4"],\ - ["@vue/compiler-core", "npm:3.3.4"],\ - ["@vue/shared", "npm:3.3.4"],\ - ["estree-walker", "npm:2.0.2"],\ - ["magic-string", "npm:0.30.0"]\ + ["@vue/reactivity", "npm:3.4.21"],\ + ["@vue/shared", "npm:3.4.21"]\ ],\ "linkType": "HARD"\ }]\ ]],\ ["@vue/runtime-core", [\ - ["npm:3.3.4", {\ - "packageLocation": "./.yarn/cache/@vue-runtime-core-npm-3.3.4-4a56fcce5e-d402da5126.zip/node_modules/@vue/runtime-core/",\ + ["npm:3.4.21", {\ + "packageLocation": "./.yarn/cache/@vue-runtime-core-npm-3.4.21-7bf985040b-4eb9b5d91f.zip/node_modules/@vue/runtime-core/",\ "packageDependencies": [\ - ["@vue/runtime-core", "npm:3.3.4"],\ - ["@vue/reactivity", "npm:3.3.4"],\ - ["@vue/shared", "npm:3.3.4"]\ + ["@vue/runtime-core", "npm:3.4.21"],\ + ["@vue/reactivity", "npm:3.4.21"],\ + ["@vue/shared", "npm:3.4.21"]\ ],\ "linkType": "HARD"\ }]\ ]],\ ["@vue/runtime-dom", [\ - ["npm:3.3.4", {\ - "packageLocation": "./.yarn/cache/@vue-runtime-dom-npm-3.3.4-554b8c4277-dac9ada7f6.zip/node_modules/@vue/runtime-dom/",\ + ["npm:3.4.21", {\ + "packageLocation": "./.yarn/cache/@vue-runtime-dom-npm-3.4.21-40f99cf9a2-ebfdaa081f.zip/node_modules/@vue/runtime-dom/",\ "packageDependencies": [\ - ["@vue/runtime-dom", "npm:3.3.4"],\ - ["@vue/runtime-core", "npm:3.3.4"],\ - ["@vue/shared", "npm:3.3.4"],\ - ["csstype", "npm:3.1.2"]\ + ["@vue/runtime-dom", "npm:3.4.21"],\ + ["@vue/runtime-core", "npm:3.4.21"],\ + ["@vue/shared", "npm:3.4.21"],\ + ["csstype", "npm:3.1.3"]\ ],\ "linkType": "HARD"\ }]\ ]],\ ["@vue/server-renderer", [\ - ["npm:3.3.4", {\ - "packageLocation": "./.yarn/cache/@vue-server-renderer-npm-3.3.4-75b963f24d-e8598ed1a4.zip/node_modules/@vue/server-renderer/",\ + ["npm:3.4.21", {\ + "packageLocation": "./.yarn/cache/@vue-server-renderer-npm-3.4.21-bf6b2daebb-faa3dc4876.zip/node_modules/@vue/server-renderer/",\ "packageDependencies": [\ - ["@vue/server-renderer", "npm:3.3.4"]\ + ["@vue/server-renderer", "npm:3.4.21"]\ ],\ "linkType": "SOFT"\ }],\ - ["virtual:174fadbea44493263729fa2b6f65daab4b25e9b0a7a008d5887cf4635c65a7954c0e996fdf3e5d8529a9ab481440768b691dd3c59aca8db6f1f12ce74ed59685#npm:3.3.4", {\ - "packageLocation": "./.yarn/__virtual__/@vue-server-renderer-virtual-17865bc934/0/cache/@vue-server-renderer-npm-3.3.4-75b963f24d-e8598ed1a4.zip/node_modules/@vue/server-renderer/",\ + ["virtual:b79af6274dddda2b283f42be2b827e30c3e5389bce2938ee73bdb74ee9781811fc079c6836719e57940708d59b3beeb14d9e3c12f37f2d22582a53e6c32e4c97#npm:3.4.21", {\ + "packageLocation": "./.yarn/__virtual__/@vue-server-renderer-virtual-4c61378d94/0/cache/@vue-server-renderer-npm-3.4.21-bf6b2daebb-faa3dc4876.zip/node_modules/@vue/server-renderer/",\ "packageDependencies": [\ - ["@vue/server-renderer", "virtual:174fadbea44493263729fa2b6f65daab4b25e9b0a7a008d5887cf4635c65a7954c0e996fdf3e5d8529a9ab481440768b691dd3c59aca8db6f1f12ce74ed59685#npm:3.3.4"],\ + ["@vue/server-renderer", "virtual:b79af6274dddda2b283f42be2b827e30c3e5389bce2938ee73bdb74ee9781811fc079c6836719e57940708d59b3beeb14d9e3c12f37f2d22582a53e6c32e4c97#npm:3.4.21"],\ ["@types/vue", null],\ - ["@vue/compiler-ssr", "npm:3.3.4"],\ - ["@vue/shared", "npm:3.3.4"],\ - ["vue", "npm:3.3.4"]\ + ["@vue/compiler-ssr", "npm:3.4.21"],\ + ["@vue/shared", "npm:3.4.21"],\ + ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.21"]\ ],\ "packagePeers": [\ "@types/vue",\ @@ -2844,10 +2932,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@vue/shared", [\ - ["npm:3.3.4", {\ - "packageLocation": "./.yarn/cache/@vue-shared-npm-3.3.4-76d250afa2-12fe53ff81.zip/node_modules/@vue/shared/",\ + ["npm:3.4.21", {\ + "packageLocation": "./.yarn/cache/@vue-shared-npm-3.4.21-2aee4ae0bc-5f30a40891.zip/node_modules/@vue/shared/",\ "packageDependencies": [\ - ["@vue/shared", "npm:3.3.4"]\ + ["@vue/shared", "npm:3.4.21"]\ ],\ "linkType": "HARD"\ }]\ @@ -3078,67 +3166,68 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["array-includes", [\ - ["npm:3.1.6", {\ - "packageLocation": "./.yarn/cache/array-includes-npm-3.1.6-d0ff9d248b-f22f8cd8ba.zip/node_modules/array-includes/",\ + ["npm:3.1.7", {\ + "packageLocation": "./.yarn/cache/array-includes-npm-3.1.7-d32a5ee179-06f9e4598f.zip/node_modules/array-includes/",\ "packageDependencies": [\ - ["array-includes", "npm:3.1.6"],\ + ["array-includes", "npm:3.1.7"],\ ["call-bind", "npm:1.0.2"],\ - ["define-properties", "npm:1.1.4"],\ - ["es-abstract", "npm:1.21.1"],\ - ["get-intrinsic", "npm:1.2.0"],\ + ["define-properties", "npm:1.2.0"],\ + ["es-abstract", "npm:1.22.3"],\ + ["get-intrinsic", "npm:1.2.1"],\ ["is-string", "npm:1.0.7"]\ ],\ "linkType": "HARD"\ }]\ ]],\ ["array.prototype.findlastindex", [\ - ["npm:1.2.2", {\ - "packageLocation": "./.yarn/cache/array.prototype.findlastindex-npm-1.2.2-dc5ee7bf67-8a166359f6.zip/node_modules/array.prototype.findlastindex/",\ + ["npm:1.2.3", {\ + "packageLocation": "./.yarn/cache/array.prototype.findlastindex-npm-1.2.3-2a36f4417b-31f35d7b37.zip/node_modules/array.prototype.findlastindex/",\ "packageDependencies": [\ - ["array.prototype.findlastindex", "npm:1.2.2"],\ + ["array.prototype.findlastindex", "npm:1.2.3"],\ ["call-bind", "npm:1.0.2"],\ - ["define-properties", "npm:1.1.4"],\ - ["es-abstract", "npm:1.21.1"],\ + ["define-properties", "npm:1.2.0"],\ + ["es-abstract", "npm:1.22.3"],\ ["es-shim-unscopables", "npm:1.0.0"],\ - ["get-intrinsic", "npm:1.2.0"]\ + ["get-intrinsic", "npm:1.2.1"]\ ],\ "linkType": "HARD"\ }]\ ]],\ ["array.prototype.flat", [\ - ["npm:1.3.1", {\ - "packageLocation": "./.yarn/cache/array.prototype.flat-npm-1.3.1-e9a9e389c0-5a8415949d.zip/node_modules/array.prototype.flat/",\ + ["npm:1.3.2", {\ + "packageLocation": "./.yarn/cache/array.prototype.flat-npm-1.3.2-350729f7f4-5d6b4bf102.zip/node_modules/array.prototype.flat/",\ "packageDependencies": [\ - ["array.prototype.flat", "npm:1.3.1"],\ + ["array.prototype.flat", "npm:1.3.2"],\ ["call-bind", "npm:1.0.2"],\ - ["define-properties", "npm:1.1.4"],\ - ["es-abstract", "npm:1.21.1"],\ + ["define-properties", "npm:1.2.0"],\ + ["es-abstract", "npm:1.22.3"],\ ["es-shim-unscopables", "npm:1.0.0"]\ ],\ "linkType": "HARD"\ }]\ ]],\ ["array.prototype.flatmap", [\ - ["npm:1.3.1", {\ - "packageLocation": "./.yarn/cache/array.prototype.flatmap-npm-1.3.1-c65186ca34-8c1c43a499.zip/node_modules/array.prototype.flatmap/",\ + ["npm:1.3.2", {\ + "packageLocation": "./.yarn/cache/array.prototype.flatmap-npm-1.3.2-5c6a4af226-ce09fe21dc.zip/node_modules/array.prototype.flatmap/",\ "packageDependencies": [\ - ["array.prototype.flatmap", "npm:1.3.1"],\ + ["array.prototype.flatmap", "npm:1.3.2"],\ ["call-bind", "npm:1.0.2"],\ - ["define-properties", "npm:1.1.4"],\ - ["es-abstract", "npm:1.21.1"],\ + ["define-properties", "npm:1.2.0"],\ + ["es-abstract", "npm:1.22.3"],\ ["es-shim-unscopables", "npm:1.0.0"]\ ],\ "linkType": "HARD"\ }]\ ]],\ ["arraybuffer.prototype.slice", [\ - ["npm:1.0.1", {\ - "packageLocation": "./.yarn/cache/arraybuffer.prototype.slice-npm-1.0.1-d44cb5acc0-e3e9b2a3e9.zip/node_modules/arraybuffer.prototype.slice/",\ + ["npm:1.0.2", {\ + "packageLocation": "./.yarn/cache/arraybuffer.prototype.slice-npm-1.0.2-4eda52ad8c-c200faf437.zip/node_modules/arraybuffer.prototype.slice/",\ "packageDependencies": [\ - ["arraybuffer.prototype.slice", "npm:1.0.1"],\ + ["arraybuffer.prototype.slice", "npm:1.0.2"],\ ["array-buffer-byte-length", "npm:1.0.0"],\ ["call-bind", "npm:1.0.2"],\ ["define-properties", "npm:1.2.0"],\ + ["es-abstract", "npm:1.22.3"],\ ["get-intrinsic", "npm:1.2.1"],\ ["is-array-buffer", "npm:3.0.2"],\ ["is-shared-array-buffer", "npm:1.0.2"]\ @@ -3237,10 +3326,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ],\ "linkType": "SOFT"\ }],\ - ["npm:5.3.2", {\ - "packageLocation": "./.yarn/cache/bootstrap-npm-5.3.2-20b391b636-d5580b253d.zip/node_modules/bootstrap/",\ + ["npm:5.3.3", {\ + "packageLocation": "./.yarn/cache/bootstrap-npm-5.3.3-da08e2f0fe-537b68db30.zip/node_modules/bootstrap/",\ "packageDependencies": [\ - ["bootstrap", "npm:5.3.2"]\ + ["bootstrap", "npm:5.3.3"]\ ],\ "linkType": "SOFT"\ }],\ @@ -3257,10 +3346,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ],\ "linkType": "HARD"\ }],\ - ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:5.3.2", {\ - "packageLocation": "./.yarn/__virtual__/bootstrap-virtual-b366fabcb3/0/cache/bootstrap-npm-5.3.2-20b391b636-d5580b253d.zip/node_modules/bootstrap/",\ + ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:5.3.3", {\ + "packageLocation": "./.yarn/__virtual__/bootstrap-virtual-2c24090b13/0/cache/bootstrap-npm-5.3.3-da08e2f0fe-537b68db30.zip/node_modules/bootstrap/",\ "packageDependencies": [\ - ["bootstrap", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:5.3.2"],\ + ["bootstrap", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:5.3.3"],\ ["@popperjs/core", "npm:2.11.8"],\ ["@types/popperjs__core", null]\ ],\ @@ -3272,10 +3361,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["bootstrap-icons", [\ - ["npm:1.11.1", {\ - "packageLocation": "./.yarn/cache/bootstrap-icons-npm-1.11.1-9f55aea76a-d78ff24a83.zip/node_modules/bootstrap-icons/",\ + ["npm:1.11.3", {\ + "packageLocation": "./.yarn/cache/bootstrap-icons-npm-1.11.3-8d5387bef2-d5cdb90fe3.zip/node_modules/bootstrap-icons/",\ "packageDependencies": [\ - ["bootstrap-icons", "npm:1.11.1"]\ + ["bootstrap-icons", "npm:1.11.3"]\ ],\ "linkType": "HARD"\ }]\ @@ -3342,6 +3431,15 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "linkType": "HARD"\ }]\ ]],\ + ["builtin-modules", [\ + ["npm:3.3.0", {\ + "packageLocation": "./.yarn/cache/builtin-modules-npm-3.3.0-db4f3d32de-db021755d7.zip/node_modules/builtin-modules/",\ + "packageDependencies": [\ + ["builtin-modules", "npm:3.3.0"]\ + ],\ + "linkType": "HARD"\ + }]\ + ]],\ ["builtins", [\ ["npm:5.0.1", {\ "packageLocation": "./.yarn/cache/builtins-npm-5.0.1-6d4820dd76-66d204657f.zip/node_modules/builtins/",\ @@ -3353,18 +3451,17 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["c8", [\ - ["npm:8.0.1", {\ - "packageLocation": "./.yarn/cache/c8-npm-8.0.1-6462c8130b-2c47531d21.zip/node_modules/c8/",\ + ["npm:9.1.0", {\ + "packageLocation": "./.yarn/cache/c8-npm-9.1.0-92c3d37f46-c5249bf9c3.zip/node_modules/c8/",\ "packageDependencies": [\ - ["c8", "npm:8.0.1"],\ + ["c8", "npm:9.1.0"],\ ["@bcoe/v8-coverage", "npm:0.2.3"],\ ["@istanbuljs/schema", "npm:0.1.3"],\ ["find-up", "npm:5.0.0"],\ - ["foreground-child", "npm:2.0.0"],\ + ["foreground-child", "npm:3.1.1"],\ ["istanbul-lib-coverage", "npm:3.2.0"],\ ["istanbul-lib-report", "npm:3.0.1"],\ ["istanbul-reports", "npm:3.1.6"],\ - ["rimraf", "npm:3.0.2"],\ ["test-exclude", "npm:6.0.0"],\ ["v8-to-istanbul", "npm:9.0.1"],\ ["yargs", "npm:17.7.2"],\ @@ -3409,6 +3506,16 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ["get-intrinsic", "npm:1.1.1"]\ ],\ "linkType": "HARD"\ + }],\ + ["npm:1.0.5", {\ + "packageLocation": "./.yarn/cache/call-bind-npm-1.0.5-65600fae47-449e83ecbd.zip/node_modules/call-bind/",\ + "packageDependencies": [\ + ["call-bind", "npm:1.0.5"],\ + ["function-bind", "npm:1.1.2"],\ + ["get-intrinsic", "npm:1.2.1"],\ + ["set-function-length", "npm:1.1.1"]\ + ],\ + "linkType": "HARD"\ }]\ ]],\ ["callsites", [\ @@ -3428,10 +3535,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ],\ "linkType": "HARD"\ }],\ - ["npm:1.0.30001538", {\ - "packageLocation": "./.yarn/cache/caniuse-lite-npm-1.0.30001538-68bfe8259b-94c5d55757.zip/node_modules/caniuse-lite/",\ + ["npm:1.0.30001603", {\ + "packageLocation": "./.yarn/cache/caniuse-lite-npm-1.0.30001603-77af81f60b-e66e0d24b8.zip/node_modules/caniuse-lite/",\ "packageDependencies": [\ - ["caniuse-lite", "npm:1.0.30001538"]\ + ["caniuse-lite", "npm:1.0.30001603"]\ ],\ "linkType": "HARD"\ }]\ @@ -3467,6 +3574,66 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "linkType": "HARD"\ }]\ ]],\ + ["chart.js", [\ + ["npm:4.5.1", {\ + "packageLocation": "./.yarn/cache/chart.js-npm-4.5.1-97698d58cc-34b35b3736.zip/node_modules/chart.js/",\ + "packageDependencies": [\ + ["chart.js", "npm:4.5.1"],\ + ["@kurkle/color", "npm:0.3.4"]\ + ],\ + "linkType": "HARD"\ + }]\ + ]],\ + ["chartjs-plugin-autocolors", [\ + ["npm:0.3.1", {\ + "packageLocation": "./.yarn/cache/chartjs-plugin-autocolors-npm-0.3.1-7e93d38139-de4f87b5bb.zip/node_modules/chartjs-plugin-autocolors/",\ + "packageDependencies": [\ + ["chartjs-plugin-autocolors", "npm:0.3.1"]\ + ],\ + "linkType": "SOFT"\ + }],\ + ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:0.3.1", {\ + "packageLocation": "./.yarn/__virtual__/chartjs-plugin-autocolors-virtual-6e228c1a1e/0/cache/chartjs-plugin-autocolors-npm-0.3.1-7e93d38139-de4f87b5bb.zip/node_modules/chartjs-plugin-autocolors/",\ + "packageDependencies": [\ + ["chartjs-plugin-autocolors", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:0.3.1"],\ + ["@kurkle/color", "npm:0.3.1"],\ + ["@types/chart.js", null],\ + ["@types/kurkle__color", null],\ + ["chart.js", "npm:4.5.1"]\ + ],\ + "packagePeers": [\ + "@kurkle/color",\ + "@types/chart.js",\ + "@types/kurkle__color",\ + "chart.js"\ + ],\ + "linkType": "HARD"\ + }]\ + ]],\ + ["chartjs-plugin-zoom", [\ + ["npm:2.2.0", {\ + "packageLocation": "./.yarn/cache/chartjs-plugin-zoom-npm-2.2.0-85aea0b81e-a540e38340.zip/node_modules/chartjs-plugin-zoom/",\ + "packageDependencies": [\ + ["chartjs-plugin-zoom", "npm:2.2.0"]\ + ],\ + "linkType": "SOFT"\ + }],\ + ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.2.0", {\ + "packageLocation": "./.yarn/__virtual__/chartjs-plugin-zoom-virtual-45332d2c47/0/cache/chartjs-plugin-zoom-npm-2.2.0-85aea0b81e-a540e38340.zip/node_modules/chartjs-plugin-zoom/",\ + "packageDependencies": [\ + ["chartjs-plugin-zoom", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.2.0"],\ + ["@types/chart.js", null],\ + ["@types/hammerjs", "npm:2.0.46"],\ + ["chart.js", "npm:4.5.1"],\ + ["hammerjs", "npm:2.0.8"]\ + ],\ + "packagePeers": [\ + "@types/chart.js",\ + "chart.js"\ + ],\ + "linkType": "HARD"\ + }]\ + ]],\ ["chokidar", [\ ["npm:3.5.3", {\ "packageLocation": "./.yarn/cache/chokidar-npm-3.5.3-c5f9b0a56a-b49fcde401.zip/node_modules/chokidar/",\ @@ -3731,19 +3898,19 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ],\ "linkType": "HARD"\ }],\ - ["npm:3.1.2", {\ - "packageLocation": "./.yarn/cache/csstype-npm-3.1.2-cead7d99b2-e1a52e6c25.zip/node_modules/csstype/",\ + ["npm:3.1.3", {\ + "packageLocation": "./.yarn/cache/csstype-npm-3.1.3-e9a1c85013-8db785cc92.zip/node_modules/csstype/",\ "packageDependencies": [\ - ["csstype", "npm:3.1.2"]\ + ["csstype", "npm:3.1.3"]\ ],\ "linkType": "HARD"\ }]\ ]],\ ["d3", [\ - ["npm:7.8.5", {\ - "packageLocation": "./.yarn/cache/d3-npm-7.8.5-5db20a5616-e407e79731.zip/node_modules/d3/",\ + ["npm:7.9.0", {\ + "packageLocation": "./.yarn/cache/d3-npm-7.9.0-d293821ce6-1c0e9135f1.zip/node_modules/d3/",\ "packageDependencies": [\ - ["d3", "npm:7.8.5"],\ + ["d3", "npm:7.9.0"],\ ["d3-array", "npm:3.1.6"],\ ["d3-axis", "npm:3.0.0"],\ ["d3-brush", "npm:3.0.0"],\ @@ -4127,10 +4294,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ],\ "linkType": "SOFT"\ }],\ - ["virtual:d5901c8fe9a0c32ef9bd30914b8624afdd53ad520846726499200f014090a72c0a1d5e3737654e39af21acf7bf6f0993bedc3c908b3b8804daa47faed23d0085#npm:2.0.0", {\ - "packageLocation": "./.yarn/__virtual__/date-fns-tz-virtual-b19bed24ca/0/cache/date-fns-tz-npm-2.0.0-9b7996f292-a6553603a9.zip/node_modules/date-fns-tz/",\ + ["virtual:32fd9c861d759cd42dabb479e4fd652286369e629cc7ef63c9cf4f1af5387c64be25fafc985023ea8534b1ec1f4cc92e6c918c7f3b594aa0f8acad026c671a6a#npm:2.0.0", {\ + "packageLocation": "./.yarn/__virtual__/date-fns-tz-virtual-6610d5adee/0/cache/date-fns-tz-npm-2.0.0-9b7996f292-a6553603a9.zip/node_modules/date-fns-tz/",\ "packageDependencies": [\ - ["date-fns-tz", "virtual:d5901c8fe9a0c32ef9bd30914b8624afdd53ad520846726499200f014090a72c0a1d5e3737654e39af21acf7bf6f0993bedc3c908b3b8804daa47faed23d0085#npm:2.0.0"],\ + ["date-fns-tz", "virtual:32fd9c861d759cd42dabb479e4fd652286369e629cc7ef63c9cf4f1af5387c64be25fafc985023ea8534b1ec1f4cc92e6c918c7f3b594aa0f8acad026c671a6a#npm:2.0.0"],\ ["@types/date-fns", null],\ ["date-fns", "npm:2.30.0"]\ ],\ @@ -4163,10 +4330,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ],\ "linkType": "SOFT"\ }],\ - ["virtual:65bed195431eadffc59e2238eb20cc12d9a1665bc7458ce780a9320ff795091b03cb5c4c2094938315ddd967b5b02c0f1df67b3ed435c69b7457092b7cc06ed8#npm:3.2.7", {\ - "packageLocation": "./.yarn/__virtual__/debug-virtual-2e2daad542/0/cache/debug-npm-3.2.7-754e818c7a-b3d8c59407.zip/node_modules/debug/",\ + ["virtual:2a426afc4b2eef43db12a540d29c2b5476640459bfcd5c24f86bb401cf8cce97e63bd81794d206a5643057e7f662643afd5ce3dfc4d4bfd8e706006c6309c5fa#npm:3.2.7", {\ + "packageLocation": "./.yarn/__virtual__/debug-virtual-d2345003b7/0/cache/debug-npm-3.2.7-754e818c7a-b3d8c59407.zip/node_modules/debug/",\ "packageDependencies": [\ - ["debug", "virtual:65bed195431eadffc59e2238eb20cc12d9a1665bc7458ce780a9320ff795091b03cb5c4c2094938315ddd967b5b02c0f1df67b3ed435c69b7457092b7cc06ed8#npm:3.2.7"],\ + ["debug", "virtual:2a426afc4b2eef43db12a540d29c2b5476640459bfcd5c24f86bb401cf8cce97e63bd81794d206a5643057e7f662643afd5ce3dfc4d4bfd8e706006c6309c5fa#npm:3.2.7"],\ ["@types/supports-color", null],\ ["ms", "npm:2.1.2"],\ ["supports-color", null]\ @@ -4216,13 +4383,6 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["deepmerge", [\ - ["npm:4.2.2", {\ - "packageLocation": "./.yarn/cache/deepmerge-npm-4.2.2-112165ced2-a8c43a1ed8.zip/node_modules/deepmerge/",\ - "packageDependencies": [\ - ["deepmerge", "npm:4.2.2"]\ - ],\ - "linkType": "HARD"\ - }],\ ["npm:4.3.1", {\ "packageLocation": "./.yarn/cache/deepmerge-npm-4.3.1-4f751a0844-2024c6a980.zip/node_modules/deepmerge/",\ "packageDependencies": [\ @@ -4231,6 +4391,18 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "linkType": "HARD"\ }]\ ]],\ + ["define-data-property", [\ + ["npm:1.1.1", {\ + "packageLocation": "./.yarn/cache/define-data-property-npm-1.1.1-2b5156d112-a29855ad3f.zip/node_modules/define-data-property/",\ + "packageDependencies": [\ + ["define-data-property", "npm:1.1.1"],\ + ["get-intrinsic", "npm:1.2.1"],\ + ["gopd", "npm:1.0.1"],\ + ["has-property-descriptors", "npm:1.0.0"]\ + ],\ + "linkType": "HARD"\ + }]\ + ]],\ ["define-properties", [\ ["npm:1.1.4", {\ "packageLocation": "./.yarn/cache/define-properties-npm-1.1.4-85ee575655-ce0aef3f9e.zip/node_modules/define-properties/",\ @@ -4475,6 +4647,13 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ["entities", "npm:3.0.1"]\ ],\ "linkType": "HARD"\ + }],\ + ["npm:4.5.0", {\ + "packageLocation": "./.yarn/cache/entities-npm-4.5.0-7cdb83b832-853f8ebd5b.zip/node_modules/entities/",\ + "packageDependencies": [\ + ["entities", "npm:4.5.0"]\ + ],\ + "linkType": "HARD"\ }]\ ]],\ ["env-paths", [\ @@ -4506,65 +4685,25 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["es-abstract", [\ - ["npm:1.21.1", {\ - "packageLocation": "./.yarn/cache/es-abstract-npm-1.21.1-28d9a4a469-23ff60d42d.zip/node_modules/es-abstract/",\ + ["npm:1.22.3", {\ + "packageLocation": "./.yarn/cache/es-abstract-npm-1.22.3-15a58832e5-b1bdc96285.zip/node_modules/es-abstract/",\ "packageDependencies": [\ - ["es-abstract", "npm:1.21.1"],\ - ["available-typed-arrays", "npm:1.0.5"],\ - ["call-bind", "npm:1.0.2"],\ - ["es-set-tostringtag", "npm:2.0.1"],\ - ["es-to-primitive", "npm:1.2.1"],\ - ["function-bind", "npm:1.1.1"],\ - ["function.prototype.name", "npm:1.1.5"],\ - ["get-intrinsic", "npm:1.2.0"],\ - ["get-symbol-description", "npm:1.0.0"],\ - ["globalthis", "npm:1.0.3"],\ - ["gopd", "npm:1.0.1"],\ - ["has", "npm:1.0.3"],\ - ["has-property-descriptors", "npm:1.0.0"],\ - ["has-proto", "npm:1.0.1"],\ - ["has-symbols", "npm:1.0.3"],\ - ["internal-slot", "npm:1.0.4"],\ - ["is-array-buffer", "npm:3.0.1"],\ - ["is-callable", "npm:1.2.7"],\ - ["is-negative-zero", "npm:2.0.2"],\ - ["is-regex", "npm:1.1.4"],\ - ["is-shared-array-buffer", "npm:1.0.2"],\ - ["is-string", "npm:1.0.7"],\ - ["is-typed-array", "npm:1.1.10"],\ - ["is-weakref", "npm:1.0.2"],\ - ["object-inspect", "npm:1.12.3"],\ - ["object-keys", "npm:1.1.1"],\ - ["object.assign", "npm:4.1.4"],\ - ["regexp.prototype.flags", "npm:1.4.3"],\ - ["safe-regex-test", "npm:1.0.0"],\ - ["string.prototype.trimend", "npm:1.0.6"],\ - ["string.prototype.trimstart", "npm:1.0.6"],\ - ["typed-array-length", "npm:1.0.4"],\ - ["unbox-primitive", "npm:1.0.2"],\ - ["which-typed-array", "npm:1.1.9"]\ - ],\ - "linkType": "HARD"\ - }],\ - ["npm:1.22.1", {\ - "packageLocation": "./.yarn/cache/es-abstract-npm-1.22.1-bfe4c9a3e1-614e2c1c37.zip/node_modules/es-abstract/",\ - "packageDependencies": [\ - ["es-abstract", "npm:1.22.1"],\ + ["es-abstract", "npm:1.22.3"],\ ["array-buffer-byte-length", "npm:1.0.0"],\ - ["arraybuffer.prototype.slice", "npm:1.0.1"],\ + ["arraybuffer.prototype.slice", "npm:1.0.2"],\ ["available-typed-arrays", "npm:1.0.5"],\ - ["call-bind", "npm:1.0.2"],\ + ["call-bind", "npm:1.0.5"],\ ["es-set-tostringtag", "npm:2.0.1"],\ ["es-to-primitive", "npm:1.2.1"],\ - ["function.prototype.name", "npm:1.1.5"],\ - ["get-intrinsic", "npm:1.2.1"],\ + ["function.prototype.name", "npm:1.1.6"],\ + ["get-intrinsic", "npm:1.2.2"],\ ["get-symbol-description", "npm:1.0.0"],\ ["globalthis", "npm:1.0.3"],\ ["gopd", "npm:1.0.1"],\ - ["has", "npm:1.0.3"],\ ["has-property-descriptors", "npm:1.0.0"],\ ["has-proto", "npm:1.0.1"],\ ["has-symbols", "npm:1.0.3"],\ + ["hasown", "npm:2.0.0"],\ ["internal-slot", "npm:1.0.5"],\ ["is-array-buffer", "npm:3.0.2"],\ ["is-callable", "npm:1.2.7"],\ @@ -4572,23 +4711,23 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ["is-regex", "npm:1.1.4"],\ ["is-shared-array-buffer", "npm:1.0.2"],\ ["is-string", "npm:1.0.7"],\ - ["is-typed-array", "npm:1.1.10"],\ + ["is-typed-array", "npm:1.1.12"],\ ["is-weakref", "npm:1.0.2"],\ - ["object-inspect", "npm:1.12.3"],\ + ["object-inspect", "npm:1.13.1"],\ ["object-keys", "npm:1.1.1"],\ ["object.assign", "npm:4.1.4"],\ - ["regexp.prototype.flags", "npm:1.5.0"],\ - ["safe-array-concat", "npm:1.0.0"],\ + ["regexp.prototype.flags", "npm:1.5.1"],\ + ["safe-array-concat", "npm:1.0.1"],\ ["safe-regex-test", "npm:1.0.0"],\ - ["string.prototype.trim", "npm:1.2.7"],\ - ["string.prototype.trimend", "npm:1.0.6"],\ - ["string.prototype.trimstart", "npm:1.0.6"],\ + ["string.prototype.trim", "npm:1.2.8"],\ + ["string.prototype.trimend", "npm:1.0.7"],\ + ["string.prototype.trimstart", "npm:1.0.7"],\ ["typed-array-buffer", "npm:1.0.0"],\ ["typed-array-byte-length", "npm:1.0.0"],\ ["typed-array-byte-offset", "npm:1.0.0"],\ ["typed-array-length", "npm:1.0.4"],\ ["unbox-primitive", "npm:1.0.2"],\ - ["which-typed-array", "npm:1.1.11"]\ + ["which-typed-array", "npm:1.1.13"]\ ],\ "linkType": "HARD"\ }]\ @@ -4693,17 +4832,18 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["eslint", [\ - ["npm:8.51.0", {\ - "packageLocation": "./.yarn/cache/eslint-npm-8.51.0-77fce3ec74-214fa5d1fc.zip/node_modules/eslint/",\ + ["npm:8.57.0", {\ + "packageLocation": "./.yarn/cache/eslint-npm-8.57.0-4286e12a3a-3a48d7ff85.zip/node_modules/eslint/",\ "packageDependencies": [\ - ["eslint", "npm:8.51.0"],\ - ["@eslint-community/eslint-utils", "virtual:77fce3ec74d55c7e6791631c329cf3adde374e21e618e865127f72e63efeb3376dcf7fc8217de80f5a310e81c791a72e9d099b00fd3252d5653ff68dff50c2fa#npm:4.4.0"],\ + ["eslint", "npm:8.57.0"],\ + ["@eslint-community/eslint-utils", "virtual:4286e12a3a0f74af013bc8f16c6d8fdde823cfbf6389660266b171e551f576c805b0a7a8eb2a7087a5cee7dfe6ebb6e1ea3808d93daf915edc95656907a381bb#npm:4.4.0"],\ ["@eslint-community/regexpp", "npm:4.8.0"],\ - ["@eslint/eslintrc", "npm:2.1.2"],\ - ["@eslint/js", "npm:8.51.0"],\ - ["@humanwhocodes/config-array", "npm:0.11.11"],\ + ["@eslint/eslintrc", "npm:2.1.4"],\ + ["@eslint/js", "npm:8.57.0"],\ + ["@humanwhocodes/config-array", "npm:0.11.14"],\ ["@humanwhocodes/module-importer", "npm:1.0.1"],\ ["@nodelib/fs.walk", "npm:1.2.8"],\ + ["@ungap/structured-clone", "npm:1.2.0"],\ ["ajv", "npm:6.12.6"],\ ["chalk", "npm:4.1.2"],\ ["cross-spawn", "npm:7.0.3"],\ @@ -4738,6 +4878,28 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "linkType": "HARD"\ }]\ ]],\ + ["eslint-compat-utils", [\ + ["npm:0.1.2", {\ + "packageLocation": "./.yarn/cache/eslint-compat-utils-npm-0.1.2-361c6992b1-2315d9db81.zip/node_modules/eslint-compat-utils/",\ + "packageDependencies": [\ + ["eslint-compat-utils", "npm:0.1.2"]\ + ],\ + "linkType": "SOFT"\ + }],\ + ["virtual:ff64d06f93654b25d9cae47199e62d111efde9ee7d408664ae44397cd2ddf7906aefd54fcc2557f4d5619d92da3af68c7898126469c2a57c381e05b06491f0da#npm:0.1.2", {\ + "packageLocation": "./.yarn/__virtual__/eslint-compat-utils-virtual-a5f7e6147b/0/cache/eslint-compat-utils-npm-0.1.2-361c6992b1-2315d9db81.zip/node_modules/eslint-compat-utils/",\ + "packageDependencies": [\ + ["eslint-compat-utils", "virtual:ff64d06f93654b25d9cae47199e62d111efde9ee7d408664ae44397cd2ddf7906aefd54fcc2557f4d5619d92da3af68c7898126469c2a57c381e05b06491f0da#npm:0.1.2"],\ + ["@types/eslint", null],\ + ["eslint", "npm:8.57.0"]\ + ],\ + "packagePeers": [\ + "@types/eslint",\ + "eslint"\ + ],\ + "linkType": "HARD"\ + }]\ + ]],\ ["eslint-config-standard", [\ ["npm:17.1.0", {\ "packageLocation": "./.yarn/cache/eslint-config-standard-npm-17.1.0-e72fd623cc-8ed14ffe42.zip/node_modules/eslint-config-standard/",\ @@ -4754,9 +4916,9 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ["@types/eslint-plugin-import", null],\ ["@types/eslint-plugin-n", null],\ ["@types/eslint-plugin-promise", null],\ - ["eslint", "npm:8.51.0"],\ - ["eslint-plugin-import", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.28.1"],\ - ["eslint-plugin-n", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:16.2.0"],\ + ["eslint", "npm:8.57.0"],\ + ["eslint-plugin-import", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.29.1"],\ + ["eslint-plugin-n", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:16.6.2"],\ ["eslint-plugin-promise", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.1"]\ ],\ "packagePeers": [\ @@ -4773,13 +4935,13 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["eslint-import-resolver-node", [\ - ["npm:0.3.7", {\ - "packageLocation": "./.yarn/cache/eslint-import-resolver-node-npm-0.3.7-65bed19543-3379aacf1d.zip/node_modules/eslint-import-resolver-node/",\ + ["npm:0.3.9", {\ + "packageLocation": "./.yarn/cache/eslint-import-resolver-node-npm-0.3.9-2a426afc4b-439b912712.zip/node_modules/eslint-import-resolver-node/",\ "packageDependencies": [\ - ["eslint-import-resolver-node", "npm:0.3.7"],\ - ["debug", "virtual:65bed195431eadffc59e2238eb20cc12d9a1665bc7458ce780a9320ff795091b03cb5c4c2094938315ddd967b5b02c0f1df67b3ed435c69b7457092b7cc06ed8#npm:3.2.7"],\ - ["is-core-module", "npm:2.11.0"],\ - ["resolve", "patch:resolve@npm%3A1.22.1#~builtin::version=1.22.1&hash=07638b"]\ + ["eslint-import-resolver-node", "npm:0.3.9"],\ + ["debug", "virtual:2a426afc4b2eef43db12a540d29c2b5476640459bfcd5c24f86bb401cf8cce97e63bd81794d206a5643057e7f662643afd5ce3dfc4d4bfd8e706006c6309c5fa#npm:3.2.7"],\ + ["is-core-module", "npm:2.13.0"],\ + ["resolve", "patch:resolve@npm%3A1.22.8#~builtin::version=1.22.8&hash=07638b"]\ ],\ "linkType": "HARD"\ }]\ @@ -4792,19 +4954,19 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ],\ "linkType": "SOFT"\ }],\ - ["virtual:ef2ff17f0affe5aeeb05f2e27f2212e975bb78d898c026b74cc62e05a17de36abb35a54f0831f2ff5fced26e6128bfc2c0cf332f7c60149823619b008d0ea480#npm:2.8.0", {\ - "packageLocation": "./.yarn/__virtual__/eslint-module-utils-virtual-0e305f99a7/0/cache/eslint-module-utils-npm-2.8.0-05e42bcab0-74c6dfea76.zip/node_modules/eslint-module-utils/",\ + ["virtual:caddce79266c9767570f5c081ff9adaab1d8b040965749cfca6a3f3f4fbd011bf36f7d755f18ef80e67a5402a33b10c9e1ffc34efb6909461044fc5d60cfbcd0#npm:2.8.0", {\ + "packageLocation": "./.yarn/__virtual__/eslint-module-utils-virtual-d80573de1e/0/cache/eslint-module-utils-npm-2.8.0-05e42bcab0-74c6dfea76.zip/node_modules/eslint-module-utils/",\ "packageDependencies": [\ - ["eslint-module-utils", "virtual:ef2ff17f0affe5aeeb05f2e27f2212e975bb78d898c026b74cc62e05a17de36abb35a54f0831f2ff5fced26e6128bfc2c0cf332f7c60149823619b008d0ea480#npm:2.8.0"],\ + ["eslint-module-utils", "virtual:caddce79266c9767570f5c081ff9adaab1d8b040965749cfca6a3f3f4fbd011bf36f7d755f18ef80e67a5402a33b10c9e1ffc34efb6909461044fc5d60cfbcd0#npm:2.8.0"],\ ["@types/eslint", null],\ ["@types/eslint-import-resolver-node", null],\ ["@types/eslint-import-resolver-typescript", null],\ ["@types/eslint-import-resolver-webpack", null],\ ["@types/typescript-eslint__parser", null],\ ["@typescript-eslint/parser", null],\ - ["debug", "virtual:65bed195431eadffc59e2238eb20cc12d9a1665bc7458ce780a9320ff795091b03cb5c4c2094938315ddd967b5b02c0f1df67b3ed435c69b7457092b7cc06ed8#npm:3.2.7"],\ - ["eslint", "npm:8.51.0"],\ - ["eslint-import-resolver-node", "npm:0.3.7"],\ + ["debug", "virtual:2a426afc4b2eef43db12a540d29c2b5476640459bfcd5c24f86bb401cf8cce97e63bd81794d206a5643057e7f662643afd5ce3dfc4d4bfd8e706006c6309c5fa#npm:3.2.7"],\ + ["eslint", "npm:8.57.0"],\ + ["eslint-import-resolver-node", "npm:0.3.9"],\ ["eslint-import-resolver-typescript", null],\ ["eslint-import-resolver-webpack", null]\ ],\ @@ -4836,7 +4998,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "packageDependencies": [\ ["eslint-plugin-cypress", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.15.1"],\ ["@types/eslint", null],\ - ["eslint", "npm:8.51.0"],\ + ["eslint", "npm:8.57.0"],\ ["globals", "npm:13.21.0"]\ ],\ "packagePeers": [\ @@ -4859,7 +5021,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "packageDependencies": [\ ["eslint-plugin-es", "virtual:5cccaf00e87dfff96dbbb5eaf7a3055373358b8114d6a1adfb32f54ed6b40ba06068d3aa1fdd8062899a0cad040f68c17cc6b72bac2cdbe9700f3d6330d112f3#npm:3.0.1"],\ ["@types/eslint", null],\ - ["eslint", "npm:8.51.0"],\ + ["eslint", "npm:8.57.0"],\ ["eslint-utils", "npm:2.1.0"],\ ["regexpp", "npm:3.2.0"]\ ],\ @@ -4871,21 +5033,22 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["eslint-plugin-es-x", [\ - ["npm:7.1.0", {\ - "packageLocation": "./.yarn/cache/eslint-plugin-es-x-npm-7.1.0-35735e8bbc-a19924313c.zip/node_modules/eslint-plugin-es-x/",\ + ["npm:7.5.0", {\ + "packageLocation": "./.yarn/cache/eslint-plugin-es-x-npm-7.5.0-77e84d6e5d-e770e57df7.zip/node_modules/eslint-plugin-es-x/",\ "packageDependencies": [\ - ["eslint-plugin-es-x", "npm:7.1.0"]\ + ["eslint-plugin-es-x", "npm:7.5.0"]\ ],\ "linkType": "SOFT"\ }],\ - ["virtual:990a558e2898625aeb04d5e4d99c65569b456f1205bf2f887fd96310a4f502e48e83c1c993f5a46d1cb27cf7bf7d2b513e2850b2c2df904e744e7d8325464673#npm:7.1.0", {\ - "packageLocation": "./.yarn/__virtual__/eslint-plugin-es-x-virtual-c166cb743f/0/cache/eslint-plugin-es-x-npm-7.1.0-35735e8bbc-a19924313c.zip/node_modules/eslint-plugin-es-x/",\ + ["virtual:e72a0a9306438b1033938dd0da350cf9f4ec062648c9360382edaa21499b6290430f07b640481cdb3f67c818af79a821eb8f3071ebf7284ab09c47cb982d8502#npm:7.5.0", {\ + "packageLocation": "./.yarn/__virtual__/eslint-plugin-es-x-virtual-ff64d06f93/0/cache/eslint-plugin-es-x-npm-7.5.0-77e84d6e5d-e770e57df7.zip/node_modules/eslint-plugin-es-x/",\ "packageDependencies": [\ - ["eslint-plugin-es-x", "virtual:990a558e2898625aeb04d5e4d99c65569b456f1205bf2f887fd96310a4f502e48e83c1c993f5a46d1cb27cf7bf7d2b513e2850b2c2df904e744e7d8325464673#npm:7.1.0"],\ - ["@eslint-community/eslint-utils", "virtual:77fce3ec74d55c7e6791631c329cf3adde374e21e618e865127f72e63efeb3376dcf7fc8217de80f5a310e81c791a72e9d099b00fd3252d5653ff68dff50c2fa#npm:4.4.0"],\ - ["@eslint-community/regexpp", "npm:4.5.1"],\ + ["eslint-plugin-es-x", "virtual:e72a0a9306438b1033938dd0da350cf9f4ec062648c9360382edaa21499b6290430f07b640481cdb3f67c818af79a821eb8f3071ebf7284ab09c47cb982d8502#npm:7.5.0"],\ + ["@eslint-community/eslint-utils", "virtual:4286e12a3a0f74af013bc8f16c6d8fdde823cfbf6389660266b171e551f576c805b0a7a8eb2a7087a5cee7dfe6ebb6e1ea3808d93daf915edc95656907a381bb#npm:4.4.0"],\ + ["@eslint-community/regexpp", "npm:4.10.0"],\ ["@types/eslint", null],\ - ["eslint", "npm:8.51.0"]\ + ["eslint", "npm:8.57.0"],\ + ["eslint-compat-utils", "virtual:ff64d06f93654b25d9cae47199e62d111efde9ee7d408664ae44397cd2ddf7906aefd54fcc2557f4d5619d92da3af68c7898126469c2a57c381e05b06491f0da#npm:0.1.2"]\ ],\ "packagePeers": [\ "@types/eslint",\ @@ -4895,38 +5058,38 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["eslint-plugin-import", [\ - ["npm:2.28.1", {\ - "packageLocation": "./.yarn/cache/eslint-plugin-import-npm-2.28.1-2056ddf35c-e8ae6dd8f0.zip/node_modules/eslint-plugin-import/",\ + ["npm:2.29.1", {\ + "packageLocation": "./.yarn/cache/eslint-plugin-import-npm-2.29.1-b94305f7dc-e65159aef8.zip/node_modules/eslint-plugin-import/",\ "packageDependencies": [\ - ["eslint-plugin-import", "npm:2.28.1"]\ + ["eslint-plugin-import", "npm:2.29.1"]\ ],\ "linkType": "SOFT"\ }],\ - ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.28.1", {\ - "packageLocation": "./.yarn/__virtual__/eslint-plugin-import-virtual-ef2ff17f0a/0/cache/eslint-plugin-import-npm-2.28.1-2056ddf35c-e8ae6dd8f0.zip/node_modules/eslint-plugin-import/",\ + ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.29.1", {\ + "packageLocation": "./.yarn/__virtual__/eslint-plugin-import-virtual-caddce7926/0/cache/eslint-plugin-import-npm-2.29.1-b94305f7dc-e65159aef8.zip/node_modules/eslint-plugin-import/",\ "packageDependencies": [\ - ["eslint-plugin-import", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.28.1"],\ + ["eslint-plugin-import", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.29.1"],\ ["@types/eslint", null],\ ["@types/typescript-eslint__parser", null],\ ["@typescript-eslint/parser", null],\ - ["array-includes", "npm:3.1.6"],\ - ["array.prototype.findlastindex", "npm:1.2.2"],\ - ["array.prototype.flat", "npm:1.3.1"],\ - ["array.prototype.flatmap", "npm:1.3.1"],\ - ["debug", "virtual:65bed195431eadffc59e2238eb20cc12d9a1665bc7458ce780a9320ff795091b03cb5c4c2094938315ddd967b5b02c0f1df67b3ed435c69b7457092b7cc06ed8#npm:3.2.7"],\ + ["array-includes", "npm:3.1.7"],\ + ["array.prototype.findlastindex", "npm:1.2.3"],\ + ["array.prototype.flat", "npm:1.3.2"],\ + ["array.prototype.flatmap", "npm:1.3.2"],\ + ["debug", "virtual:2a426afc4b2eef43db12a540d29c2b5476640459bfcd5c24f86bb401cf8cce97e63bd81794d206a5643057e7f662643afd5ce3dfc4d4bfd8e706006c6309c5fa#npm:3.2.7"],\ ["doctrine", "npm:2.1.0"],\ - ["eslint", "npm:8.51.0"],\ - ["eslint-import-resolver-node", "npm:0.3.7"],\ - ["eslint-module-utils", "virtual:ef2ff17f0affe5aeeb05f2e27f2212e975bb78d898c026b74cc62e05a17de36abb35a54f0831f2ff5fced26e6128bfc2c0cf332f7c60149823619b008d0ea480#npm:2.8.0"],\ - ["has", "npm:1.0.3"],\ - ["is-core-module", "npm:2.13.0"],\ + ["eslint", "npm:8.57.0"],\ + ["eslint-import-resolver-node", "npm:0.3.9"],\ + ["eslint-module-utils", "virtual:caddce79266c9767570f5c081ff9adaab1d8b040965749cfca6a3f3f4fbd011bf36f7d755f18ef80e67a5402a33b10c9e1ffc34efb6909461044fc5d60cfbcd0#npm:2.8.0"],\ + ["hasown", "npm:2.0.0"],\ + ["is-core-module", "npm:2.13.1"],\ ["is-glob", "npm:4.0.3"],\ ["minimatch", "npm:3.1.2"],\ - ["object.fromentries", "npm:2.0.6"],\ - ["object.groupby", "npm:1.0.0"],\ - ["object.values", "npm:1.1.6"],\ + ["object.fromentries", "npm:2.0.7"],\ + ["object.groupby", "npm:1.0.1"],\ + ["object.values", "npm:1.1.7"],\ ["semver", "npm:6.3.1"],\ - ["tsconfig-paths", "npm:3.14.2"]\ + ["tsconfig-paths", "npm:3.15.0"]\ ],\ "packagePeers": [\ "@types/eslint",\ @@ -4938,24 +5101,26 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["eslint-plugin-n", [\ - ["npm:16.2.0", {\ - "packageLocation": "./.yarn/cache/eslint-plugin-n-npm-16.2.0-b2b8355312-124ba4f418.zip/node_modules/eslint-plugin-n/",\ + ["npm:16.6.2", {\ + "packageLocation": "./.yarn/cache/eslint-plugin-n-npm-16.6.2-77775852d0-3b468da003.zip/node_modules/eslint-plugin-n/",\ "packageDependencies": [\ - ["eslint-plugin-n", "npm:16.2.0"]\ + ["eslint-plugin-n", "npm:16.6.2"]\ ],\ "linkType": "SOFT"\ }],\ - ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:16.2.0", {\ - "packageLocation": "./.yarn/__virtual__/eslint-plugin-n-virtual-990a558e28/0/cache/eslint-plugin-n-npm-16.2.0-b2b8355312-124ba4f418.zip/node_modules/eslint-plugin-n/",\ + ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:16.6.2", {\ + "packageLocation": "./.yarn/__virtual__/eslint-plugin-n-virtual-e72a0a9306/0/cache/eslint-plugin-n-npm-16.6.2-77775852d0-3b468da003.zip/node_modules/eslint-plugin-n/",\ "packageDependencies": [\ - ["eslint-plugin-n", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:16.2.0"],\ - ["@eslint-community/eslint-utils", "virtual:77fce3ec74d55c7e6791631c329cf3adde374e21e618e865127f72e63efeb3376dcf7fc8217de80f5a310e81c791a72e9d099b00fd3252d5653ff68dff50c2fa#npm:4.4.0"],\ + ["eslint-plugin-n", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:16.6.2"],\ + ["@eslint-community/eslint-utils", "virtual:4286e12a3a0f74af013bc8f16c6d8fdde823cfbf6389660266b171e551f576c805b0a7a8eb2a7087a5cee7dfe6ebb6e1ea3808d93daf915edc95656907a381bb#npm:4.4.0"],\ ["@types/eslint", null],\ ["builtins", "npm:5.0.1"],\ - ["eslint", "npm:8.51.0"],\ - ["eslint-plugin-es-x", "virtual:990a558e2898625aeb04d5e4d99c65569b456f1205bf2f887fd96310a4f502e48e83c1c993f5a46d1cb27cf7bf7d2b513e2850b2c2df904e744e7d8325464673#npm:7.1.0"],\ + ["eslint", "npm:8.57.0"],\ + ["eslint-plugin-es-x", "virtual:e72a0a9306438b1033938dd0da350cf9f4ec062648c9360382edaa21499b6290430f07b640481cdb3f67c818af79a821eb8f3071ebf7284ab09c47cb982d8502#npm:7.5.0"],\ ["get-tsconfig", "npm:4.7.2"],\ + ["globals", "npm:13.24.0"],\ ["ignore", "npm:5.2.4"],\ + ["is-builtin-module", "npm:3.2.1"],\ ["is-core-module", "npm:2.12.1"],\ ["minimatch", "npm:3.1.2"],\ ["resolve", "patch:resolve@npm%3A1.22.3#~builtin::version=1.22.3&hash=07638b"],\ @@ -4981,7 +5146,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "packageDependencies": [\ ["eslint-plugin-node", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:11.1.0"],\ ["@types/eslint", null],\ - ["eslint", "npm:8.51.0"],\ + ["eslint", "npm:8.57.0"],\ ["eslint-plugin-es", "virtual:5cccaf00e87dfff96dbbb5eaf7a3055373358b8114d6a1adfb32f54ed6b40ba06068d3aa1fdd8062899a0cad040f68c17cc6b72bac2cdbe9700f3d6330d112f3#npm:3.0.1"],\ ["eslint-utils", "npm:2.1.0"],\ ["ignore", "npm:5.2.0"],\ @@ -5009,7 +5174,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "packageDependencies": [\ ["eslint-plugin-promise", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.1"],\ ["@types/eslint", null],\ - ["eslint", "npm:8.51.0"]\ + ["eslint", "npm:8.57.0"]\ ],\ "packagePeers": [\ "@types/eslint",\ @@ -5019,25 +5184,26 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["eslint-plugin-vue", [\ - ["npm:9.17.0", {\ - "packageLocation": "./.yarn/cache/eslint-plugin-vue-npm-9.17.0-c32115eab8-2ef53a0387.zip/node_modules/eslint-plugin-vue/",\ + ["npm:9.24.0", {\ + "packageLocation": "./.yarn/cache/eslint-plugin-vue-npm-9.24.0-4c6dba51bf-2309b919d8.zip/node_modules/eslint-plugin-vue/",\ "packageDependencies": [\ - ["eslint-plugin-vue", "npm:9.17.0"]\ + ["eslint-plugin-vue", "npm:9.24.0"]\ ],\ "linkType": "SOFT"\ }],\ - ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:9.17.0", {\ - "packageLocation": "./.yarn/__virtual__/eslint-plugin-vue-virtual-e39e5d6bef/0/cache/eslint-plugin-vue-npm-9.17.0-c32115eab8-2ef53a0387.zip/node_modules/eslint-plugin-vue/",\ + ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:9.24.0", {\ + "packageLocation": "./.yarn/__virtual__/eslint-plugin-vue-virtual-e080dd5dc6/0/cache/eslint-plugin-vue-npm-9.24.0-4c6dba51bf-2309b919d8.zip/node_modules/eslint-plugin-vue/",\ "packageDependencies": [\ - ["eslint-plugin-vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:9.17.0"],\ - ["@eslint-community/eslint-utils", "virtual:77fce3ec74d55c7e6791631c329cf3adde374e21e618e865127f72e63efeb3376dcf7fc8217de80f5a310e81c791a72e9d099b00fd3252d5653ff68dff50c2fa#npm:4.4.0"],\ + ["eslint-plugin-vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:9.24.0"],\ + ["@eslint-community/eslint-utils", "virtual:4286e12a3a0f74af013bc8f16c6d8fdde823cfbf6389660266b171e551f576c805b0a7a8eb2a7087a5cee7dfe6ebb6e1ea3808d93daf915edc95656907a381bb#npm:4.4.0"],\ ["@types/eslint", null],\ - ["eslint", "npm:8.51.0"],\ + ["eslint", "npm:8.57.0"],\ + ["globals", "npm:13.24.0"],\ ["natural-compare", "npm:1.4.0"],\ ["nth-check", "npm:2.1.1"],\ - ["postcss-selector-parser", "npm:6.0.13"],\ - ["semver", "npm:7.5.4"],\ - ["vue-eslint-parser", "virtual:e39e5d6bef7a93bd3b21c5c9ba6ef825c92fc73c8d9c9e01699e1dc11e40fd3bc150ba16509e2cf59495cb098c32b2e4a85c0c21802fddeffc3208b01f4f5a16#npm:9.3.1"],\ + ["postcss-selector-parser", "npm:6.0.15"],\ + ["semver", "npm:7.6.0"],\ + ["vue-eslint-parser", "virtual:e080dd5dc65fb3541eb98fd929c3a1d3733f3aff4bb24b09a6b5cce9fba4a29aca07e286ef93079f2144caa0fd33bb6545549286d3a9f2b9a211caa1f4b68ff9#npm:9.4.2"],\ ["xml-name-validator", "npm:4.0.0"]\ ],\ "packagePeers": [\ @@ -5317,15 +5483,6 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["foreground-child", [\ - ["npm:2.0.0", {\ - "packageLocation": "./.yarn/cache/foreground-child-npm-2.0.0-80c976b61e-f77ec9aff6.zip/node_modules/foreground-child/",\ - "packageDependencies": [\ - ["foreground-child", "npm:2.0.0"],\ - ["cross-spawn", "npm:7.0.3"],\ - ["signal-exit", "npm:3.0.7"]\ - ],\ - "linkType": "HARD"\ - }],\ ["npm:3.1.1", {\ "packageLocation": "./.yarn/cache/foreground-child-npm-3.1.1-77e78ed774-139d270bc8.zip/node_modules/foreground-child/",\ "packageDependencies": [\ @@ -5381,16 +5538,23 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ["function-bind", "npm:1.1.1"]\ ],\ "linkType": "HARD"\ + }],\ + ["npm:1.1.2", {\ + "packageLocation": "./.yarn/cache/function-bind-npm-1.1.2-7a55be9b03-2b0ff4ce70.zip/node_modules/function-bind/",\ + "packageDependencies": [\ + ["function-bind", "npm:1.1.2"]\ + ],\ + "linkType": "HARD"\ }]\ ]],\ ["function.prototype.name", [\ - ["npm:1.1.5", {\ - "packageLocation": "./.yarn/cache/function.prototype.name-npm-1.1.5-e776a642bb-acd21d733a.zip/node_modules/function.prototype.name/",\ + ["npm:1.1.6", {\ + "packageLocation": "./.yarn/cache/function.prototype.name-npm-1.1.6-fd3a6a5cdd-7a3f9bd98a.zip/node_modules/function.prototype.name/",\ "packageDependencies": [\ - ["function.prototype.name", "npm:1.1.5"],\ + ["function.prototype.name", "npm:1.1.6"],\ ["call-bind", "npm:1.0.2"],\ - ["define-properties", "npm:1.1.4"],\ - ["es-abstract", "npm:1.21.1"],\ + ["define-properties", "npm:1.2.0"],\ + ["es-abstract", "npm:1.22.3"],\ ["functions-have-names", "npm:1.2.3"]\ ],\ "linkType": "HARD"\ @@ -5462,6 +5626,17 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ["has-symbols", "npm:1.0.3"]\ ],\ "linkType": "HARD"\ + }],\ + ["npm:1.2.2", {\ + "packageLocation": "./.yarn/cache/get-intrinsic-npm-1.2.2-3f446d8847-447ff0724d.zip/node_modules/get-intrinsic/",\ + "packageDependencies": [\ + ["get-intrinsic", "npm:1.2.2"],\ + ["function-bind", "npm:1.1.2"],\ + ["has-proto", "npm:1.0.1"],\ + ["has-symbols", "npm:1.0.3"],\ + ["hasown", "npm:2.0.0"]\ + ],\ + "linkType": "HARD"\ }]\ ]],\ ["get-port", [\ @@ -5575,6 +5750,14 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ["type-fest", "npm:0.20.2"]\ ],\ "linkType": "HARD"\ + }],\ + ["npm:13.24.0", {\ + "packageLocation": "./.yarn/cache/globals-npm-13.24.0-cc7713139c-56066ef058.zip/node_modules/globals/",\ + "packageDependencies": [\ + ["globals", "npm:13.24.0"],\ + ["type-fest", "npm:0.20.2"]\ + ],\ + "linkType": "HARD"\ }]\ ]],\ ["globalthis", [\ @@ -5615,6 +5798,15 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "linkType": "HARD"\ }]\ ]],\ + ["hammerjs", [\ + ["npm:2.0.8", {\ + "packageLocation": "./.yarn/cache/hammerjs-npm-2.0.8-f656ba2573-b092da7d15.zip/node_modules/hammerjs/",\ + "packageDependencies": [\ + ["hammerjs", "npm:2.0.8"]\ + ],\ + "linkType": "HARD"\ + }]\ + ]],\ ["has", [\ ["npm:1.0.3", {\ "packageLocation": "./.yarn/cache/has-npm-1.0.3-b7f00631c1-b9ad53d53b.zip/node_modules/has/",\ @@ -5697,11 +5889,21 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "linkType": "HARD"\ }]\ ]],\ + ["hasown", [\ + ["npm:2.0.0", {\ + "packageLocation": "./.yarn/cache/hasown-npm-2.0.0-78b794ceef-6151c75ca1.zip/node_modules/hasown/",\ + "packageDependencies": [\ + ["hasown", "npm:2.0.0"],\ + ["function-bind", "npm:1.1.2"]\ + ],\ + "linkType": "HARD"\ + }]\ + ]],\ ["highcharts", [\ - ["npm:11.1.0", {\ - "packageLocation": "./.yarn/cache/highcharts-npm-11.1.0-0d42a04430-f9b8cdc38b.zip/node_modules/highcharts/",\ + ["npm:11.4.0", {\ + "packageLocation": "./.yarn/cache/highcharts-npm-11.4.0-8a1f46b545-873e661914.zip/node_modules/highcharts/",\ "packageDependencies": [\ - ["highcharts", "npm:11.1.0"]\ + ["highcharts", "npm:11.4.0"]\ ],\ "linkType": "HARD"\ }]\ @@ -5725,28 +5927,28 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["html-validate", [\ - ["npm:8.5.0", {\ - "packageLocation": "./.yarn/cache/html-validate-npm-8.5.0-a5c06a51e6-38ef4c832e.zip/node_modules/html-validate/",\ + ["npm:8.18.1", {\ + "packageLocation": "./.yarn/cache/html-validate-npm-8.18.1-c5271a0fb9-53479bf75b.zip/node_modules/html-validate/",\ "packageDependencies": [\ - ["html-validate", "npm:8.5.0"]\ + ["html-validate", "npm:8.18.1"]\ ],\ "linkType": "SOFT"\ }],\ - ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:8.5.0", {\ - "packageLocation": "./.yarn/__virtual__/html-validate-virtual-c902a8d1d1/0/cache/html-validate-npm-8.5.0-a5c06a51e6-38ef4c832e.zip/node_modules/html-validate/",\ + ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:8.18.1", {\ + "packageLocation": "./.yarn/__virtual__/html-validate-virtual-640261ed3b/0/cache/html-validate-npm-8.18.1-c5271a0fb9-53479bf75b.zip/node_modules/html-validate/",\ "packageDependencies": [\ - ["html-validate", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:8.5.0"],\ + ["html-validate", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:8.18.1"],\ ["@babel/code-frame", "npm:7.16.7"],\ ["@html-validate/stylish", "npm:4.1.0"],\ - ["@sidvind/better-ajv-errors", "virtual:c902a8d1d11dd34ee7f7648077b48b959e6723fe684e99968d7e8c7b195a007900ac0290074641f9d2ed5797dad844cdb897c4c6de1b44897449ec92f70b817f#npm:2.0.0"],\ + ["@sidvind/better-ajv-errors", "virtual:640261ed3b7a9880a388cc504caacf8ea790dd52f1cb31fbc3be445cb2adc6e73fc87097de620863105eb917510145ef2457d30000c7361456ab67ec0b895136#npm:2.1.3"],\ ["@types/jest", null],\ ["@types/jest-diff", null],\ ["@types/jest-snapshot", null],\ ["@types/vitest", null],\ ["ajv", "npm:8.11.0"],\ - ["deepmerge", "npm:4.2.2"],\ + ["deepmerge", "npm:4.3.1"],\ ["glob", "npm:10.2.4"],\ - ["ignore", "npm:5.2.0"],\ + ["ignore", "npm:5.3.1"],\ ["jest", null],\ ["jest-diff", null],\ ["jest-snapshot", null],\ @@ -5777,10 +5979,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ],\ "linkType": "SOFT"\ }],\ - ["virtual:ee0243765cbdf501388f259b4f1148af5bb4df5c2fa392d4cf1f1d61d3475a9c15a5729ae4be6dd2e258041e618368d112e36aa7b208b01a51861aaaf92fa944#npm:2.0.2", {\ - "packageLocation": "./.yarn/__virtual__/htmlnano-virtual-cd24a048c0/0/cache/htmlnano-npm-2.0.2-a89803bfeb-41f9e0c0e5.zip/node_modules/htmlnano/",\ + ["virtual:cdd2835c1202e86fad55b2266578ff3755267672440481af37bdfff670fd205f561469a10385c20d1ff403af7fad49006bc71ffff21d12592a8ebd0c8be79c0c#npm:2.0.2", {\ + "packageLocation": "./.yarn/__virtual__/htmlnano-virtual-d2bb6df599/0/cache/htmlnano-npm-2.0.2-a89803bfeb-41f9e0c0e5.zip/node_modules/htmlnano/",\ "packageDependencies": [\ - ["htmlnano", "virtual:ee0243765cbdf501388f259b4f1148af5bb4df5c2fa392d4cf1f1d61d3475a9c15a5729ae4be6dd2e258041e618368d112e36aa7b208b01a51861aaaf92fa944#npm:2.0.2"],\ + ["htmlnano", "virtual:cdd2835c1202e86fad55b2266578ff3755267672440481af37bdfff670fd205f561469a10385c20d1ff403af7fad49006bc71ffff21d12592a8ebd0c8be79c0c#npm:2.0.2"],\ ["@types/cssnano", null],\ ["@types/postcss", null],\ ["@types/purgecss", null],\ @@ -5924,6 +6126,13 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ["ignore", "npm:5.2.4"]\ ],\ "linkType": "HARD"\ + }],\ + ["npm:5.3.1", {\ + "packageLocation": "./.yarn/cache/ignore-npm-5.3.1-f6947c5df7-71d7bb4c1d.zip/node_modules/ignore/",\ + "packageDependencies": [\ + ["ignore", "npm:5.3.1"]\ + ],\ + "linkType": "HARD"\ }]\ ]],\ ["immutable", [\ @@ -5994,16 +6203,6 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["internal-slot", [\ - ["npm:1.0.4", {\ - "packageLocation": "./.yarn/cache/internal-slot-npm-1.0.4-9183007374-8974588d06.zip/node_modules/internal-slot/",\ - "packageDependencies": [\ - ["internal-slot", "npm:1.0.4"],\ - ["get-intrinsic", "npm:1.2.0"],\ - ["has", "npm:1.0.3"],\ - ["side-channel", "npm:1.0.4"]\ - ],\ - "linkType": "HARD"\ - }],\ ["npm:1.0.5", {\ "packageLocation": "./.yarn/cache/internal-slot-npm-1.0.5-a2241f3e66-97e84046bf.zip/node_modules/internal-slot/",\ "packageDependencies": [\ @@ -6095,6 +6294,16 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "linkType": "HARD"\ }]\ ]],\ + ["is-builtin-module", [\ + ["npm:3.2.1", {\ + "packageLocation": "./.yarn/cache/is-builtin-module-npm-3.2.1-2f92a5d353-e8f0ffc19a.zip/node_modules/is-builtin-module/",\ + "packageDependencies": [\ + ["is-builtin-module", "npm:3.2.1"],\ + ["builtin-modules", "npm:3.3.0"]\ + ],\ + "linkType": "HARD"\ + }]\ + ]],\ ["is-callable", [\ ["npm:1.2.4", {\ "packageLocation": "./.yarn/cache/is-callable-npm-1.2.4-03fc17459c-1a28d57dc4.zip/node_modules/is-callable/",\ @@ -6112,22 +6321,6 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["is-core-module", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/is-core-module-npm-2.10.0-6dff9310aa-0f3f77811f.zip/node_modules/is-core-module/",\ - "packageDependencies": [\ - ["is-core-module", "npm:2.10.0"],\ - ["has", "npm:1.0.3"]\ - ],\ - "linkType": "HARD"\ - }],\ - ["npm:2.11.0", {\ - "packageLocation": "./.yarn/cache/is-core-module-npm-2.11.0-70061e141a-f96fd490c6.zip/node_modules/is-core-module/",\ - "packageDependencies": [\ - ["is-core-module", "npm:2.11.0"],\ - ["has", "npm:1.0.3"]\ - ],\ - "linkType": "HARD"\ - }],\ ["npm:2.12.1", {\ "packageLocation": "./.yarn/cache/is-core-module-npm-2.12.1-ce74e89160-f04ea30533.zip/node_modules/is-core-module/",\ "packageDependencies": [\ @@ -6144,6 +6337,14 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ],\ "linkType": "HARD"\ }],\ + ["npm:2.13.1", {\ + "packageLocation": "./.yarn/cache/is-core-module-npm-2.13.1-36e17434f9-256559ee8a.zip/node_modules/is-core-module/",\ + "packageDependencies": [\ + ["is-core-module", "npm:2.13.1"],\ + ["hasown", "npm:2.0.0"]\ + ],\ + "linkType": "HARD"\ + }],\ ["npm:2.9.0", {\ "packageLocation": "./.yarn/cache/is-core-module-npm-2.9.0-5ba77c35ae-b27034318b.zip/node_modules/is-core-module/",\ "packageDependencies": [\ @@ -6319,6 +6520,14 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ["has-tostringtag", "npm:1.0.0"]\ ],\ "linkType": "HARD"\ + }],\ + ["npm:1.1.12", {\ + "packageLocation": "./.yarn/cache/is-typed-array-npm-1.1.12-6135c91b1a-4c89c4a3be.zip/node_modules/is-typed-array/",\ + "packageDependencies": [\ + ["is-typed-array", "npm:1.1.12"],\ + ["which-typed-array", "npm:1.1.13"]\ + ],\ + "linkType": "HARD"\ }]\ ]],\ ["is-weakref", [\ @@ -6779,19 +6988,19 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["luxon", [\ - ["npm:3.4.3", {\ - "packageLocation": "./.yarn/cache/luxon-npm-3.4.3-1b54517fa6-3eade81506.zip/node_modules/luxon/",\ + ["npm:3.4.4", {\ + "packageLocation": "./.yarn/cache/luxon-npm-3.4.4-c93f95dde8-36c1f99c47.zip/node_modules/luxon/",\ "packageDependencies": [\ - ["luxon", "npm:3.4.3"]\ + ["luxon", "npm:3.4.4"]\ ],\ "linkType": "HARD"\ }]\ ]],\ ["magic-string", [\ - ["npm:0.30.0", {\ - "packageLocation": "./.yarn/cache/magic-string-npm-0.30.0-20d8e0b6e4-7bdf22e273.zip/node_modules/magic-string/",\ + ["npm:0.30.7", {\ + "packageLocation": "./.yarn/cache/magic-string-npm-0.30.7-0bb5819095-bdf102e36a.zip/node_modules/magic-string/",\ "packageDependencies": [\ - ["magic-string", "npm:0.30.0"],\ + ["magic-string", "npm:0.30.7"],\ ["@jridgewell/sourcemap-codec", "npm:1.4.15"]\ ],\ "linkType": "HARD"\ @@ -6997,13 +7206,20 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ["moment", "npm:2.29.4"]\ ],\ "linkType": "HARD"\ + }],\ + ["npm:2.30.1", {\ + "packageLocation": "./.yarn/cache/moment-npm-2.30.1-1c51a5c631-859236bab1.zip/node_modules/moment/",\ + "packageDependencies": [\ + ["moment", "npm:2.30.1"]\ + ],\ + "linkType": "HARD"\ }]\ ]],\ ["moment-timezone", [\ - ["npm:0.5.43", {\ - "packageLocation": "./.yarn/cache/moment-timezone-npm-0.5.43-1304d8602a-8075c897ed.zip/node_modules/moment-timezone/",\ + ["npm:0.5.45", {\ + "packageLocation": "./.yarn/cache/moment-timezone-npm-0.5.45-2df3ad72a4-a22e9f983f.zip/node_modules/moment-timezone/",\ "packageDependencies": [\ - ["moment-timezone", "npm:0.5.43"],\ + ["moment-timezone", "npm:0.5.45"],\ ["moment", "npm:2.29.4"]\ ],\ "linkType": "HARD"\ @@ -7033,6 +7249,14 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["msgpackr", [\ + ["npm:1.10.1", {\ + "packageLocation": "./.yarn/cache/msgpackr-npm-1.10.1-5c5ff5c553-e422d18b01.zip/node_modules/msgpackr/",\ + "packageDependencies": [\ + ["msgpackr", "npm:1.10.1"],\ + ["msgpackr-extract", "npm:3.0.2"]\ + ],\ + "linkType": "HARD"\ + }],\ ["npm:1.6.0", {\ "packageLocation": "./.yarn/cache/msgpackr-npm-1.6.0-de9303a46e-7f94acbe93.zip/node_modules/msgpackr/",\ "packageDependencies": [\ @@ -7082,6 +7306,15 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "linkType": "HARD"\ }]\ ]],\ + ["muggle-string", [\ + ["npm:0.4.1", {\ + "packageLocation": "./.yarn/cache/muggle-string-npm-0.4.1-fe3c825cc2-85fe1766d1.zip/node_modules/muggle-string/",\ + "packageDependencies": [\ + ["muggle-string", "npm:0.4.1"]\ + ],\ + "linkType": "HARD"\ + }]\ + ]],\ ["murmurhash-js", [\ ["npm:1.0.0", {\ "packageLocation": "./.yarn/cache/murmurhash-js-npm-1.0.0-b1fa804bc0-083cea92a1.zip/node_modules/murmurhash-js/",\ @@ -7092,37 +7325,38 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["naive-ui", [\ - ["npm:2.35.0", {\ - "packageLocation": "./.yarn/cache/naive-ui-npm-2.35.0-2bb3f5a46d-53239b8cbe.zip/node_modules/naive-ui/",\ + ["npm:2.38.1", {\ + "packageLocation": "./.yarn/cache/naive-ui-npm-2.38.1-0edd2e5816-88a8f981de.zip/node_modules/naive-ui/",\ "packageDependencies": [\ - ["naive-ui", "npm:2.35.0"]\ + ["naive-ui", "npm:2.38.1"]\ ],\ "linkType": "SOFT"\ }],\ - ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.35.0", {\ - "packageLocation": "./.yarn/__virtual__/naive-ui-virtual-d5901c8fe9/0/cache/naive-ui-npm-2.35.0-2bb3f5a46d-53239b8cbe.zip/node_modules/naive-ui/",\ + ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.38.1", {\ + "packageLocation": "./.yarn/__virtual__/naive-ui-virtual-32fd9c861d/0/cache/naive-ui-npm-2.38.1-0edd2e5816-88a8f981de.zip/node_modules/naive-ui/",\ "packageDependencies": [\ - ["naive-ui", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.35.0"],\ - ["@css-render/plugin-bem", "virtual:d5901c8fe9a0c32ef9bd30914b8624afdd53ad520846726499200f014090a72c0a1d5e3737654e39af21acf7bf6f0993bedc3c908b3b8804daa47faed23d0085#npm:0.15.12"],\ - ["@css-render/vue3-ssr", "virtual:d5901c8fe9a0c32ef9bd30914b8624afdd53ad520846726499200f014090a72c0a1d5e3737654e39af21acf7bf6f0993bedc3c908b3b8804daa47faed23d0085#npm:0.15.12"],\ + ["naive-ui", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.38.1"],\ + ["@css-render/plugin-bem", "virtual:32fd9c861d759cd42dabb479e4fd652286369e629cc7ef63c9cf4f1af5387c64be25fafc985023ea8534b1ec1f4cc92e6c918c7f3b594aa0f8acad026c671a6a#npm:0.15.12"],\ + ["@css-render/vue3-ssr", "virtual:32fd9c861d759cd42dabb479e4fd652286369e629cc7ef63c9cf4f1af5387c64be25fafc985023ea8534b1ec1f4cc92e6c918c7f3b594aa0f8acad026c671a6a#npm:0.15.12"],\ ["@types/katex", "npm:0.16.5"],\ ["@types/lodash", "npm:4.14.200"],\ ["@types/lodash-es", "npm:4.17.10"],\ ["@types/vue", null],\ ["async-validator", "npm:4.2.5"],\ ["css-render", "npm:0.15.12"],\ + ["csstype", "npm:3.1.3"],\ ["date-fns", "npm:2.30.0"],\ - ["date-fns-tz", "virtual:d5901c8fe9a0c32ef9bd30914b8624afdd53ad520846726499200f014090a72c0a1d5e3737654e39af21acf7bf6f0993bedc3c908b3b8804daa47faed23d0085#npm:2.0.0"],\ + ["date-fns-tz", "virtual:32fd9c861d759cd42dabb479e4fd652286369e629cc7ef63c9cf4f1af5387c64be25fafc985023ea8534b1ec1f4cc92e6c918c7f3b594aa0f8acad026c671a6a#npm:2.0.0"],\ ["evtd", "npm:0.2.4"],\ ["highlight.js", "npm:11.9.0"],\ ["lodash", "npm:4.17.21"],\ ["lodash-es", "npm:4.17.21"],\ - ["seemly", "npm:0.3.6"],\ + ["seemly", "npm:0.3.8"],\ ["treemate", "npm:0.3.11"],\ - ["vdirs", "virtual:d5901c8fe9a0c32ef9bd30914b8624afdd53ad520846726499200f014090a72c0a1d5e3737654e39af21acf7bf6f0993bedc3c908b3b8804daa47faed23d0085#npm:0.1.8"],\ - ["vooks", "virtual:d5901c8fe9a0c32ef9bd30914b8624afdd53ad520846726499200f014090a72c0a1d5e3737654e39af21acf7bf6f0993bedc3c908b3b8804daa47faed23d0085#npm:0.2.12"],\ - ["vue", "npm:3.3.4"],\ - ["vueuc", "virtual:d5901c8fe9a0c32ef9bd30914b8624afdd53ad520846726499200f014090a72c0a1d5e3737654e39af21acf7bf6f0993bedc3c908b3b8804daa47faed23d0085#npm:0.4.51"]\ + ["vdirs", "virtual:32fd9c861d759cd42dabb479e4fd652286369e629cc7ef63c9cf4f1af5387c64be25fafc985023ea8534b1ec1f4cc92e6c918c7f3b594aa0f8acad026c671a6a#npm:0.1.8"],\ + ["vooks", "virtual:32fd9c861d759cd42dabb479e4fd652286369e629cc7ef63c9cf4f1af5387c64be25fafc985023ea8534b1ec1f4cc92e6c918c7f3b594aa0f8acad026c671a6a#npm:0.2.12"],\ + ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.21"],\ + ["vueuc", "virtual:32fd9c861d759cd42dabb479e4fd652286369e629cc7ef63c9cf4f1af5387c64be25fafc985023ea8534b1ec1f4cc92e6c918c7f3b594aa0f8acad026c671a6a#npm:0.4.58"]\ ],\ "packagePeers": [\ "@types/vue",\ @@ -7132,17 +7366,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["nanoid", [\ - ["npm:3.3.3", {\ - "packageLocation": "./.yarn/cache/nanoid-npm-3.3.3-25d865be84-ada019402a.zip/node_modules/nanoid/",\ - "packageDependencies": [\ - ["nanoid", "npm:3.3.3"]\ - ],\ - "linkType": "HARD"\ - }],\ - ["npm:3.3.6", {\ - "packageLocation": "./.yarn/cache/nanoid-npm-3.3.6-e6d6ae7e71-7d0eda6570.zip/node_modules/nanoid/",\ + ["npm:3.3.7", {\ + "packageLocation": "./.yarn/cache/nanoid-npm-3.3.7-98824ba130-d36c427e53.zip/node_modules/nanoid/",\ "packageDependencies": [\ - ["nanoid", "npm:3.3.6"]\ + ["nanoid", "npm:3.3.7"]\ ],\ "linkType": "HARD"\ }]\ @@ -7327,10 +7554,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ],\ "linkType": "HARD"\ }],\ - ["npm:1.12.3", {\ - "packageLocation": "./.yarn/cache/object-inspect-npm-1.12.3-1e7d20f5ff-dabfd824d9.zip/node_modules/object-inspect/",\ + ["npm:1.13.1", {\ + "packageLocation": "./.yarn/cache/object-inspect-npm-1.13.1-fd038a2f0a-7d9fa9221d.zip/node_modules/object-inspect/",\ "packageDependencies": [\ - ["object-inspect", "npm:1.12.3"]\ + ["object-inspect", "npm:1.13.1"]\ ],\ "linkType": "HARD"\ }]\ @@ -7358,38 +7585,38 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["object.fromentries", [\ - ["npm:2.0.6", {\ - "packageLocation": "./.yarn/cache/object.fromentries-npm-2.0.6-424cf4cd3c-453c6d6941.zip/node_modules/object.fromentries/",\ + ["npm:2.0.7", {\ + "packageLocation": "./.yarn/cache/object.fromentries-npm-2.0.7-2e38392540-7341ce246e.zip/node_modules/object.fromentries/",\ "packageDependencies": [\ - ["object.fromentries", "npm:2.0.6"],\ + ["object.fromentries", "npm:2.0.7"],\ ["call-bind", "npm:1.0.2"],\ - ["define-properties", "npm:1.1.4"],\ - ["es-abstract", "npm:1.21.1"]\ + ["define-properties", "npm:1.2.0"],\ + ["es-abstract", "npm:1.22.3"]\ ],\ "linkType": "HARD"\ }]\ ]],\ ["object.groupby", [\ - ["npm:1.0.0", {\ - "packageLocation": "./.yarn/cache/object.groupby-npm-1.0.0-b360bea3aa-64b00b287d.zip/node_modules/object.groupby/",\ + ["npm:1.0.1", {\ + "packageLocation": "./.yarn/cache/object.groupby-npm-1.0.1-fc268391fe-d7959d6eaa.zip/node_modules/object.groupby/",\ "packageDependencies": [\ - ["object.groupby", "npm:1.0.0"],\ + ["object.groupby", "npm:1.0.1"],\ ["call-bind", "npm:1.0.2"],\ ["define-properties", "npm:1.2.0"],\ - ["es-abstract", "npm:1.22.1"],\ + ["es-abstract", "npm:1.22.3"],\ ["get-intrinsic", "npm:1.2.1"]\ ],\ "linkType": "HARD"\ }]\ ]],\ ["object.values", [\ - ["npm:1.1.6", {\ - "packageLocation": "./.yarn/cache/object.values-npm-1.1.6-ab9b67ccd3-f6fff9fd81.zip/node_modules/object.values/",\ + ["npm:1.1.7", {\ + "packageLocation": "./.yarn/cache/object.values-npm-1.1.7-deae619f88-f3e4ae4f21.zip/node_modules/object.values/",\ "packageDependencies": [\ - ["object.values", "npm:1.1.6"],\ + ["object.values", "npm:1.1.7"],\ ["call-bind", "npm:1.0.2"],\ - ["define-properties", "npm:1.1.4"],\ - ["es-abstract", "npm:1.21.1"]\ + ["define-properties", "npm:1.2.0"],\ + ["es-abstract", "npm:1.22.3"]\ ],\ "linkType": "HARD"\ }]\ @@ -7476,28 +7703,28 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["parcel", [\ - ["npm:2.10.0", {\ - "packageLocation": "./.yarn/cache/parcel-npm-2.10.0-8e794fc289-fe25ddcf2d.zip/node_modules/parcel/",\ + ["npm:2.12.0", {\ + "packageLocation": "./.yarn/cache/parcel-npm-2.12.0-96a4bb6cc3-d8e6cb690a.zip/node_modules/parcel/",\ "packageDependencies": [\ - ["parcel", "npm:2.10.0"]\ + ["parcel", "npm:2.12.0"]\ ],\ "linkType": "SOFT"\ }],\ - ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.10.0", {\ - "packageLocation": "./.yarn/__virtual__/parcel-virtual-71592776e8/0/cache/parcel-npm-2.10.0-8e794fc289-fe25ddcf2d.zip/node_modules/parcel/",\ - "packageDependencies": [\ - ["parcel", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.10.0"],\ - ["@parcel/config-default", "virtual:71592776e81a3a98123fea990d2adcb9a2eb4cc84ca35ac4be3a6f331fe8d1f764a124c4f9a2dad3afd35076e01667fb0ef9ccd5629fbe405b31f0d1b14a14fd#npm:2.10.0"],\ - ["@parcel/core", "npm:2.10.0"],\ - ["@parcel/diagnostic", "npm:2.10.0"],\ - ["@parcel/events", "npm:2.10.0"],\ - ["@parcel/fs", "virtual:59eaeeba7a5d21408bb7b40531b36a88648baa29ed841afea77b484ce4124f400b3aed2f6c7b6598bebbcce34fe625391a4c262c0e17b5a4f9e1ebbf693fa69b#npm:2.10.0"],\ - ["@parcel/logger", "npm:2.10.0"],\ - ["@parcel/package-manager", "virtual:59eaeeba7a5d21408bb7b40531b36a88648baa29ed841afea77b484ce4124f400b3aed2f6c7b6598bebbcce34fe625391a4c262c0e17b5a4f9e1ebbf693fa69b#npm:2.10.0"],\ - ["@parcel/reporter-cli", "npm:2.10.0"],\ - ["@parcel/reporter-dev-server", "npm:2.10.0"],\ - ["@parcel/reporter-tracer", "npm:2.10.0"],\ - ["@parcel/utils", "npm:2.10.0"],\ + ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.12.0", {\ + "packageLocation": "./.yarn/__virtual__/parcel-virtual-fdd74b573c/0/cache/parcel-npm-2.12.0-96a4bb6cc3-d8e6cb690a.zip/node_modules/parcel/",\ + "packageDependencies": [\ + ["parcel", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.12.0"],\ + ["@parcel/config-default", "virtual:fdd74b573cf769bcde15fb47c39fbe0d73f59838182900fd59d3d43b2214ea01b1d45084fb49d0c192fc3e8a49adea5782afcb7fe14e09c63bedaf09f4939e35#npm:2.12.0"],\ + ["@parcel/core", "npm:2.12.0"],\ + ["@parcel/diagnostic", "npm:2.12.0"],\ + ["@parcel/events", "npm:2.12.0"],\ + ["@parcel/fs", "virtual:8f08b883d4cc438aa2ec719eb5cec278f9ea627197c55f35530bcaf9cd4e4738e04be8abe946bd2702b3f5c94b812f529f1b87c05c7d6de04e1ade9b3f3e00f6#npm:2.12.0"],\ + ["@parcel/logger", "npm:2.12.0"],\ + ["@parcel/package-manager", "virtual:8f08b883d4cc438aa2ec719eb5cec278f9ea627197c55f35530bcaf9cd4e4738e04be8abe946bd2702b3f5c94b812f529f1b87c05c7d6de04e1ade9b3f3e00f6#npm:2.12.0"],\ + ["@parcel/reporter-cli", "npm:2.12.0"],\ + ["@parcel/reporter-dev-server", "npm:2.12.0"],\ + ["@parcel/reporter-tracer", "npm:2.12.0"],\ + ["@parcel/utils", "npm:2.12.0"],\ ["@types/parcel__core", null],\ ["chalk", "npm:4.1.2"],\ ["commander", "npm:7.2.0"],\ @@ -7624,7 +7851,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ["@vue/composition-api", null],\ ["@vue/devtools-api", "npm:6.5.0"],\ ["typescript", null],\ - ["vue", "npm:3.3.4"],\ + ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.21"],\ ["vue-demi", "virtual:cf6f7439ee76dfd2e7f8f2565ae847d76901434fc49c65702190cdf3d1c61e61c701a5c45b514c4bdeacb8f4bcac9c8a98bd4db3d0bc8e403d9e8db2cf14372a#npm:0.14.5"]\ ],\ "packagePeers": [\ @@ -7655,7 +7882,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ["@types/vue__composition-api", null],\ ["@vue/composition-api", null],\ ["pinia", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.1.7"],\ - ["vue", "npm:3.3.4"],\ + ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.21"],\ ["vue-demi", "virtual:f56fcf19bbebc2ada1b28955da8cc216b1e9a569a1a7337d2d1926c1ebd1bc7a5bd91aedae1d05c15c8562f33caf7c59bd3020a667340f6bdc6a7b13fc2ba847#npm:0.12.5"]\ ],\ "packagePeers": [\ @@ -7670,21 +7897,21 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["postcss", [\ - ["npm:8.4.12", {\ - "packageLocation": "./.yarn/cache/postcss-npm-8.4.12-e941d78a98-248e3d0f9b.zip/node_modules/postcss/",\ + ["npm:8.4.33", {\ + "packageLocation": "./.yarn/cache/postcss-npm-8.4.33-6ba8157009-6f98b2af4b.zip/node_modules/postcss/",\ "packageDependencies": [\ - ["postcss", "npm:8.4.12"],\ - ["nanoid", "npm:3.3.3"],\ + ["postcss", "npm:8.4.33"],\ + ["nanoid", "npm:3.3.7"],\ ["picocolors", "npm:1.0.0"],\ ["source-map-js", "npm:1.0.2"]\ ],\ "linkType": "HARD"\ }],\ - ["npm:8.4.27", {\ - "packageLocation": "./.yarn/cache/postcss-npm-8.4.27-2a9f5f8f40-1cdd0c2988.zip/node_modules/postcss/",\ + ["npm:8.4.35", {\ + "packageLocation": "./.yarn/cache/postcss-npm-8.4.35-6bc1848fff-cf3c3124d3.zip/node_modules/postcss/",\ "packageDependencies": [\ - ["postcss", "npm:8.4.27"],\ - ["nanoid", "npm:3.3.6"],\ + ["postcss", "npm:8.4.35"],\ + ["nanoid", "npm:3.3.7"],\ ["picocolors", "npm:1.0.0"],\ ["source-map-js", "npm:1.0.2"]\ ],\ @@ -7692,10 +7919,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["postcss-selector-parser", [\ - ["npm:6.0.13", {\ - "packageLocation": "./.yarn/cache/postcss-selector-parser-npm-6.0.13-f732d92326-f89163338a.zip/node_modules/postcss-selector-parser/",\ + ["npm:6.0.15", {\ + "packageLocation": "./.yarn/cache/postcss-selector-parser-npm-6.0.15-0ec4819b4e-57decb9415.zip/node_modules/postcss-selector-parser/",\ "packageDependencies": [\ - ["postcss-selector-parser", "npm:6.0.13"],\ + ["postcss-selector-parser", "npm:6.0.15"],\ ["cssesc", "npm:3.0.0"],\ ["util-deprecate", "npm:1.0.2"]\ ],\ @@ -8048,23 +8275,13 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["regexp.prototype.flags", [\ - ["npm:1.4.3", {\ - "packageLocation": "./.yarn/cache/regexp.prototype.flags-npm-1.4.3-df1c08b65d-51228bae73.zip/node_modules/regexp.prototype.flags/",\ - "packageDependencies": [\ - ["regexp.prototype.flags", "npm:1.4.3"],\ - ["call-bind", "npm:1.0.2"],\ - ["define-properties", "npm:1.1.4"],\ - ["functions-have-names", "npm:1.2.3"]\ - ],\ - "linkType": "HARD"\ - }],\ - ["npm:1.5.0", {\ - "packageLocation": "./.yarn/cache/regexp.prototype.flags-npm-1.5.0-5623b9e07f-c541687cdb.zip/node_modules/regexp.prototype.flags/",\ + ["npm:1.5.1", {\ + "packageLocation": "./.yarn/cache/regexp.prototype.flags-npm-1.5.1-b8faeee306-869edff002.zip/node_modules/regexp.prototype.flags/",\ "packageDependencies": [\ - ["regexp.prototype.flags", "npm:1.5.0"],\ + ["regexp.prototype.flags", "npm:1.5.1"],\ ["call-bind", "npm:1.0.2"],\ ["define-properties", "npm:1.2.0"],\ - ["functions-have-names", "npm:1.2.3"]\ + ["set-function-name", "npm:2.0.1"]\ ],\ "linkType": "HARD"\ }]\ @@ -8107,21 +8324,21 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ],\ "linkType": "HARD"\ }],\ - ["patch:resolve@npm%3A1.22.1#~builtin::version=1.22.1&hash=07638b", {\ - "packageLocation": "./.yarn/cache/resolve-patch-46f9469d0d-5656f4d0be.zip/node_modules/resolve/",\ + ["patch:resolve@npm%3A1.22.3#~builtin::version=1.22.3&hash=07638b", {\ + "packageLocation": "./.yarn/cache/resolve-patch-8df1eb26d0-ad59734723.zip/node_modules/resolve/",\ "packageDependencies": [\ - ["resolve", "patch:resolve@npm%3A1.22.1#~builtin::version=1.22.1&hash=07638b"],\ - ["is-core-module", "npm:2.10.0"],\ + ["resolve", "patch:resolve@npm%3A1.22.3#~builtin::version=1.22.3&hash=07638b"],\ + ["is-core-module", "npm:2.12.1"],\ ["path-parse", "npm:1.0.7"],\ ["supports-preserve-symlinks-flag", "npm:1.0.0"]\ ],\ "linkType": "HARD"\ }],\ - ["patch:resolve@npm%3A1.22.3#~builtin::version=1.22.3&hash=07638b", {\ - "packageLocation": "./.yarn/cache/resolve-patch-8df1eb26d0-ad59734723.zip/node_modules/resolve/",\ + ["patch:resolve@npm%3A1.22.8#~builtin::version=1.22.8&hash=07638b", {\ + "packageLocation": "./.yarn/cache/resolve-patch-f6b5304cab-5479b7d431.zip/node_modules/resolve/",\ "packageDependencies": [\ - ["resolve", "patch:resolve@npm%3A1.22.3#~builtin::version=1.22.3&hash=07638b"],\ - ["is-core-module", "npm:2.12.1"],\ + ["resolve", "patch:resolve@npm%3A1.22.8#~builtin::version=1.22.8&hash=07638b"],\ + ["is-core-module", "npm:2.13.0"],\ ["path-parse", "npm:1.0.7"],\ ["supports-preserve-symlinks-flag", "npm:1.0.0"]\ ],\ @@ -8184,10 +8401,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["rollup", [\ - ["npm:3.28.0", {\ - "packageLocation": "./.yarn/cache/rollup-npm-3.28.0-4ab1b4022e-6ded4a0d3c.zip/node_modules/rollup/",\ + ["npm:3.29.4", {\ + "packageLocation": "./.yarn/cache/rollup-npm-3.29.4-5e5e5f2087-8bb20a39c8.zip/node_modules/rollup/",\ "packageDependencies": [\ - ["rollup", "npm:3.28.0"],\ + ["rollup", "npm:3.29.4"],\ ["fsevents", "patch:fsevents@npm%3A2.3.2#~builtin::version=2.3.2&hash=18f3a7"]\ ],\ "linkType": "HARD"\ @@ -8198,40 +8415,45 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "packageLocation": "./",\ "packageDependencies": [\ ["root-workspace-0b6124", "workspace:."],\ - ["@fullcalendar/bootstrap5", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.9"],\ - ["@fullcalendar/core", "npm:6.1.9"],\ - ["@fullcalendar/daygrid", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.9"],\ - ["@fullcalendar/icalendar", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.9"],\ - ["@fullcalendar/interaction", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.9"],\ - ["@fullcalendar/list", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.9"],\ - ["@fullcalendar/luxon3", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.9"],\ - ["@fullcalendar/timegrid", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.9"],\ - ["@fullcalendar/vue3", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.9"],\ - ["@parcel/optimizer-data-url", "npm:2.10.0"],\ - ["@parcel/transformer-inline-string", "npm:2.10.0"],\ - ["@parcel/transformer-sass", "npm:2.10.0"],\ + ["@fullcalendar/bootstrap5", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.11"],\ + ["@fullcalendar/core", "npm:6.1.11"],\ + ["@fullcalendar/daygrid", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.11"],\ + ["@fullcalendar/icalendar", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.11"],\ + ["@fullcalendar/interaction", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.11"],\ + ["@fullcalendar/list", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.11"],\ + ["@fullcalendar/luxon3", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.11"],\ + ["@fullcalendar/timegrid", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.11"],\ + ["@fullcalendar/vue3", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.11"],\ + ["@kurkle/color", "npm:0.3.1"],\ + ["@parcel/optimizer-data-url", "npm:2.12.0"],\ + ["@parcel/transformer-inline-string", "npm:2.12.0"],\ + ["@parcel/transformer-sass", "npm:2.12.0"],\ ["@popperjs/core", "npm:2.11.8"],\ - ["@rollup/pluginutils", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:5.0.5"],\ + ["@rollup/pluginutils", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:5.1.0"],\ ["@twuni/emojify", "npm:1.0.2"],\ - ["@vitejs/plugin-vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:4.4.0"],\ - ["bootstrap", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:5.3.2"],\ - ["bootstrap-icons", "npm:1.11.1"],\ + ["@vitejs/plugin-vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:4.6.2"],\ + ["@vue/language-plugin-pug", "npm:2.0.7"],\ + ["bootstrap", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:5.3.3"],\ + ["bootstrap-icons", "npm:1.11.3"],\ ["browser-fs-access", "npm:0.35.0"],\ ["browserlist", "npm:1.0.1"],\ - ["c8", "npm:8.0.1"],\ - ["caniuse-lite", "npm:1.0.30001538"],\ - ["d3", "npm:7.8.5"],\ - ["eslint", "npm:8.51.0"],\ + ["c8", "npm:9.1.0"],\ + ["caniuse-lite", "npm:1.0.30001603"],\ + ["chart.js", "npm:4.5.1"],\ + ["chartjs-plugin-autocolors", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:0.3.1"],\ + ["chartjs-plugin-zoom", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.2.0"],\ + ["d3", "npm:7.9.0"],\ + ["eslint", "npm:8.57.0"],\ ["eslint-config-standard", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:17.1.0"],\ ["eslint-plugin-cypress", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.15.1"],\ - ["eslint-plugin-import", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.28.1"],\ - ["eslint-plugin-n", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:16.2.0"],\ + ["eslint-plugin-import", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.29.1"],\ + ["eslint-plugin-n", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:16.6.2"],\ ["eslint-plugin-node", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:11.1.0"],\ ["eslint-plugin-promise", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.1"],\ - ["eslint-plugin-vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:9.17.0"],\ + ["eslint-plugin-vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:9.24.0"],\ ["file-saver", "npm:2.0.5"],\ - ["highcharts", "npm:11.1.0"],\ - ["html-validate", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:8.5.0"],\ + ["highcharts", "npm:11.4.0"],\ + ["html-validate", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:8.18.1"],\ ["ical.js", "npm:1.5.0"],\ ["jquery", "npm:3.7.1"],\ ["jquery-migrate", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.1"],\ @@ -8239,28 +8461,28 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ["list.js", "npm:2.3.1"],\ ["lodash", "npm:4.17.21"],\ ["lodash-es", "npm:4.17.21"],\ - ["luxon", "npm:3.4.3"],\ - ["moment", "npm:2.29.4"],\ - ["moment-timezone", "npm:0.5.43"],\ + ["luxon", "npm:3.4.4"],\ + ["moment", "npm:2.30.1"],\ + ["moment-timezone", "npm:0.5.45"],\ ["ms", "npm:2.1.3"],\ ["murmurhash-js", "npm:1.0.0"],\ - ["naive-ui", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.35.0"],\ - ["parcel", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.10.0"],\ + ["naive-ui", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.38.1"],\ + ["parcel", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.12.0"],\ ["pinia", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.1.7"],\ ["pinia-plugin-persist", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:1.0.0"],\ ["pug", "npm:3.0.2"],\ - ["sass", "npm:1.69.4"],\ + ["sass", "npm:1.72.0"],\ ["seedrandom", "npm:3.0.5"],\ ["select2", "npm:4.1.0-rc.0"],\ ["select2-bootstrap-5-theme", "npm:1.3.0"],\ ["send", "npm:0.18.0"],\ ["shepherd.js", "npm:11.2.0"],\ ["slugify", "npm:1.6.6"],\ - ["sortablejs", "npm:1.15.0"],\ + ["sortablejs", "npm:1.15.2"],\ ["vanillajs-datepicker", "npm:1.3.4"],\ - ["vite", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:4.4.11"],\ - ["vue", "npm:3.3.4"],\ - ["vue-router", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:4.2.5"],\ + ["vite", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:4.5.3"],\ + ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.21"],\ + ["vue-router", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:4.3.0"],\ ["zxcvbn", "npm:4.4.2"]\ ],\ "linkType": "SOFT"\ @@ -8286,10 +8508,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["safe-array-concat", [\ - ["npm:1.0.0", {\ - "packageLocation": "./.yarn/cache/safe-array-concat-npm-1.0.0-897b2c630a-f43cb98fe3.zip/node_modules/safe-array-concat/",\ + ["npm:1.0.1", {\ + "packageLocation": "./.yarn/cache/safe-array-concat-npm-1.0.1-8a42907bbf-001ecf1d8a.zip/node_modules/safe-array-concat/",\ "packageDependencies": [\ - ["safe-array-concat", "npm:1.0.0"],\ + ["safe-array-concat", "npm:1.0.1"],\ ["call-bind", "npm:1.0.2"],\ ["get-intrinsic", "npm:1.2.1"],\ ["has-symbols", "npm:1.0.3"],\ @@ -8346,10 +8568,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ],\ "linkType": "HARD"\ }],\ - ["npm:1.69.4", {\ - "packageLocation": "./.yarn/cache/sass-npm-1.69.4-bea57e4b30-ed5558445b.zip/node_modules/sass/",\ + ["npm:1.72.0", {\ + "packageLocation": "./.yarn/cache/sass-npm-1.72.0-fb38bb530c-f420079c7d.zip/node_modules/sass/",\ "packageDependencies": [\ - ["sass", "npm:1.69.4"],\ + ["sass", "npm:1.72.0"],\ ["chokidar", "npm:3.5.3"],\ ["immutable", "npm:4.0.0"],\ ["source-map-js", "npm:1.0.2"]\ @@ -8373,6 +8595,13 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ["seemly", "npm:0.3.6"]\ ],\ "linkType": "HARD"\ + }],\ + ["npm:0.3.8", {\ + "packageLocation": "./.yarn/cache/seemly-npm-0.3.8-4940336497-98171fd4d9.zip/node_modules/seemly/",\ + "packageDependencies": [\ + ["seemly", "npm:0.3.8"]\ + ],\ + "linkType": "HARD"\ }]\ ]],\ ["select2", [\ @@ -8440,6 +8669,14 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ["lru-cache", "npm:6.0.0"]\ ],\ "linkType": "HARD"\ + }],\ + ["npm:7.6.0", {\ + "packageLocation": "./.yarn/cache/semver-npm-7.6.0-f4630729f6-7427f05b70.zip/node_modules/semver/",\ + "packageDependencies": [\ + ["semver", "npm:7.6.0"],\ + ["lru-cache", "npm:6.0.0"]\ + ],\ + "linkType": "HARD"\ }]\ ]],\ ["send", [\ @@ -8473,6 +8710,31 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "linkType": "HARD"\ }]\ ]],\ + ["set-function-length", [\ + ["npm:1.1.1", {\ + "packageLocation": "./.yarn/cache/set-function-length-npm-1.1.1-d362bf8221-c131d7569c.zip/node_modules/set-function-length/",\ + "packageDependencies": [\ + ["set-function-length", "npm:1.1.1"],\ + ["define-data-property", "npm:1.1.1"],\ + ["get-intrinsic", "npm:1.2.1"],\ + ["gopd", "npm:1.0.1"],\ + ["has-property-descriptors", "npm:1.0.0"]\ + ],\ + "linkType": "HARD"\ + }]\ + ]],\ + ["set-function-name", [\ + ["npm:2.0.1", {\ + "packageLocation": "./.yarn/cache/set-function-name-npm-2.0.1-a9f970eea0-4975d17d90.zip/node_modules/set-function-name/",\ + "packageDependencies": [\ + ["set-function-name", "npm:2.0.1"],\ + ["define-data-property", "npm:1.1.1"],\ + ["functions-have-names", "npm:1.2.3"],\ + ["has-property-descriptors", "npm:1.0.0"]\ + ],\ + "linkType": "HARD"\ + }]\ + ]],\ ["setprototypeof", [\ ["npm:1.2.0", {\ "packageLocation": "./.yarn/cache/setprototypeof-npm-1.2.0-0fedbdcd3a-be18cbbf70.zip/node_modules/setprototypeof/",\ @@ -8591,10 +8853,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["sortablejs", [\ - ["npm:1.15.0", {\ - "packageLocation": "./.yarn/cache/sortablejs-npm-1.15.0-f3a393abcc-bb82223a66.zip/node_modules/sortablejs/",\ + ["npm:1.15.2", {\ + "packageLocation": "./.yarn/cache/sortablejs-npm-1.15.2-73347ae85a-36b20b144f.zip/node_modules/sortablejs/",\ "packageDependencies": [\ - ["sortablejs", "npm:1.15.0"]\ + ["sortablejs", "npm:1.15.2"]\ ],\ "linkType": "HARD"\ }]\ @@ -8686,37 +8948,37 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["string.prototype.trim", [\ - ["npm:1.2.7", {\ - "packageLocation": "./.yarn/cache/string.prototype.trim-npm-1.2.7-3fbaf3b9d2-05b7b2d6af.zip/node_modules/string.prototype.trim/",\ + ["npm:1.2.8", {\ + "packageLocation": "./.yarn/cache/string.prototype.trim-npm-1.2.8-7ed4517ce8-49eb1a862a.zip/node_modules/string.prototype.trim/",\ "packageDependencies": [\ - ["string.prototype.trim", "npm:1.2.7"],\ + ["string.prototype.trim", "npm:1.2.8"],\ ["call-bind", "npm:1.0.2"],\ - ["define-properties", "npm:1.1.4"],\ - ["es-abstract", "npm:1.21.1"]\ + ["define-properties", "npm:1.2.0"],\ + ["es-abstract", "npm:1.22.3"]\ ],\ "linkType": "HARD"\ }]\ ]],\ ["string.prototype.trimend", [\ - ["npm:1.0.6", {\ - "packageLocation": "./.yarn/cache/string.prototype.trimend-npm-1.0.6-304246ecc1-0fdc34645a.zip/node_modules/string.prototype.trimend/",\ + ["npm:1.0.7", {\ + "packageLocation": "./.yarn/cache/string.prototype.trimend-npm-1.0.7-159b9dcfbc-2375516272.zip/node_modules/string.prototype.trimend/",\ "packageDependencies": [\ - ["string.prototype.trimend", "npm:1.0.6"],\ + ["string.prototype.trimend", "npm:1.0.7"],\ ["call-bind", "npm:1.0.2"],\ - ["define-properties", "npm:1.1.4"],\ - ["es-abstract", "npm:1.21.1"]\ + ["define-properties", "npm:1.2.0"],\ + ["es-abstract", "npm:1.22.3"]\ ],\ "linkType": "HARD"\ }]\ ]],\ ["string.prototype.trimstart", [\ - ["npm:1.0.6", {\ - "packageLocation": "./.yarn/cache/string.prototype.trimstart-npm-1.0.6-0926caea6c-89080feef4.zip/node_modules/string.prototype.trimstart/",\ + ["npm:1.0.7", {\ + "packageLocation": "./.yarn/cache/string.prototype.trimstart-npm-1.0.7-ae2f803b78-13d0c2cb0d.zip/node_modules/string.prototype.trimstart/",\ "packageDependencies": [\ - ["string.prototype.trimstart", "npm:1.0.6"],\ + ["string.prototype.trimstart", "npm:1.0.7"],\ ["call-bind", "npm:1.0.2"],\ - ["define-properties", "npm:1.1.4"],\ - ["es-abstract", "npm:1.21.1"]\ + ["define-properties", "npm:1.2.0"],\ + ["es-abstract", "npm:1.22.3"]\ ],\ "linkType": "HARD"\ }]\ @@ -8911,10 +9173,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["tsconfig-paths", [\ - ["npm:3.14.2", {\ - "packageLocation": "./.yarn/cache/tsconfig-paths-npm-3.14.2-90ce75420d-a6162eaa1a.zip/node_modules/tsconfig-paths/",\ + ["npm:3.15.0", {\ + "packageLocation": "./.yarn/cache/tsconfig-paths-npm-3.15.0-ff68930e0e-59f35407a3.zip/node_modules/tsconfig-paths/",\ "packageDependencies": [\ - ["tsconfig-paths", "npm:3.14.2"],\ + ["tsconfig-paths", "npm:3.15.0"],\ ["@types/json5", "npm:0.0.29"],\ ["json5", "npm:1.0.2"],\ ["minimist", "npm:1.2.6"],\ @@ -9092,13 +9354,13 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ],\ "linkType": "SOFT"\ }],\ - ["virtual:d5901c8fe9a0c32ef9bd30914b8624afdd53ad520846726499200f014090a72c0a1d5e3737654e39af21acf7bf6f0993bedc3c908b3b8804daa47faed23d0085#npm:0.1.8", {\ - "packageLocation": "./.yarn/__virtual__/vdirs-virtual-35aac8a6c6/0/cache/vdirs-npm-0.1.8-59a32a98d6-a7be8ccad3.zip/node_modules/vdirs/",\ + ["virtual:32fd9c861d759cd42dabb479e4fd652286369e629cc7ef63c9cf4f1af5387c64be25fafc985023ea8534b1ec1f4cc92e6c918c7f3b594aa0f8acad026c671a6a#npm:0.1.8", {\ + "packageLocation": "./.yarn/__virtual__/vdirs-virtual-6e8e27ef7d/0/cache/vdirs-npm-0.1.8-59a32a98d6-a7be8ccad3.zip/node_modules/vdirs/",\ "packageDependencies": [\ - ["vdirs", "virtual:d5901c8fe9a0c32ef9bd30914b8624afdd53ad520846726499200f014090a72c0a1d5e3737654e39af21acf7bf6f0993bedc3c908b3b8804daa47faed23d0085#npm:0.1.8"],\ + ["vdirs", "virtual:32fd9c861d759cd42dabb479e4fd652286369e629cc7ef63c9cf4f1af5387c64be25fafc985023ea8534b1ec1f4cc92e6c918c7f3b594aa0f8acad026c671a6a#npm:0.1.8"],\ ["@types/vue", null],\ ["evtd", "npm:0.2.3"],\ - ["vue", "npm:3.3.4"]\ + ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.21"]\ ],\ "packagePeers": [\ "@types/vue",\ @@ -9108,17 +9370,17 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["vite", [\ - ["npm:4.4.11", {\ - "packageLocation": "./.yarn/cache/vite-npm-4.4.11-e7ab057df9-c22145c838.zip/node_modules/vite/",\ + ["npm:4.5.3", {\ + "packageLocation": "./.yarn/cache/vite-npm-4.5.3-5cedc7cb8f-fd3f512ce4.zip/node_modules/vite/",\ "packageDependencies": [\ - ["vite", "npm:4.4.11"]\ + ["vite", "npm:4.5.3"]\ ],\ "linkType": "SOFT"\ }],\ - ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:4.4.11", {\ - "packageLocation": "./.yarn/__virtual__/vite-virtual-6db9baa143/0/cache/vite-npm-4.4.11-e7ab057df9-c22145c838.zip/node_modules/vite/",\ + ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:4.5.3", {\ + "packageLocation": "./.yarn/__virtual__/vite-virtual-69c30fd9fd/0/cache/vite-npm-4.5.3-5cedc7cb8f-fd3f512ce4.zip/node_modules/vite/",\ "packageDependencies": [\ - ["vite", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:4.4.11"],\ + ["vite", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:4.5.3"],\ ["@types/less", null],\ ["@types/lightningcss", null],\ ["@types/node", null],\ @@ -9130,9 +9392,9 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ["fsevents", "patch:fsevents@npm%3A2.3.2#~builtin::version=2.3.2&hash=18f3a7"],\ ["less", null],\ ["lightningcss", null],\ - ["postcss", "npm:8.4.27"],\ - ["rollup", "npm:3.28.0"],\ - ["sass", "npm:1.69.4"],\ + ["postcss", "npm:8.4.33"],\ + ["rollup", "npm:3.29.4"],\ + ["sass", "npm:1.72.0"],\ ["stylus", null],\ ["sugarss", null],\ ["terser", null]\ @@ -9164,6 +9426,46 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "linkType": "HARD"\ }]\ ]],\ + ["volar-service-html", [\ + ["npm:0.0.34", {\ + "packageLocation": "./.yarn/cache/volar-service-html-npm-0.0.34-32b6d24136-83b50cd805.zip/node_modules/volar-service-html/",\ + "packageDependencies": [\ + ["volar-service-html", "npm:0.0.34"]\ + ],\ + "linkType": "SOFT"\ + }],\ + ["virtual:6f5429e17c4ecd390af605a4e97ecc7b34f2f1374a5e30c21f0a978cbdc904738a42d0d6f5d44d2e969250218b3c205853d6afefd88b87bcda877286d12bef83#npm:0.0.34", {\ + "packageLocation": "./.yarn/__virtual__/volar-service-html-virtual-5a9107a24d/0/cache/volar-service-html-npm-0.0.34-32b6d24136-83b50cd805.zip/node_modules/volar-service-html/",\ + "packageDependencies": [\ + ["volar-service-html", "virtual:6f5429e17c4ecd390af605a4e97ecc7b34f2f1374a5e30c21f0a978cbdc904738a42d0d6f5d44d2e969250218b3c205853d6afefd88b87bcda877286d12bef83#npm:0.0.34"],\ + ["@types/volar__language-service", null],\ + ["@volar/language-service", "npm:2.1.4"],\ + ["vscode-html-languageservice", "npm:5.1.2"],\ + ["vscode-languageserver-textdocument", "npm:1.0.11"],\ + ["vscode-uri", "npm:3.0.8"]\ + ],\ + "packagePeers": [\ + "@types/volar__language-service",\ + "@volar/language-service"\ + ],\ + "linkType": "HARD"\ + }]\ + ]],\ + ["volar-service-pug", [\ + ["npm:0.0.34", {\ + "packageLocation": "./.yarn/cache/volar-service-pug-npm-0.0.34-6f5429e17c-4691aa1c8e.zip/node_modules/volar-service-pug/",\ + "packageDependencies": [\ + ["volar-service-pug", "npm:0.0.34"],\ + ["@volar/language-service", "npm:2.1.4"],\ + ["pug-lexer", "npm:5.0.1"],\ + ["pug-parser", "npm:6.0.0"],\ + ["volar-service-html", "virtual:6f5429e17c4ecd390af605a4e97ecc7b34f2f1374a5e30c21f0a978cbdc904738a42d0d6f5d44d2e969250218b3c205853d6afefd88b87bcda877286d12bef83#npm:0.0.34"],\ + ["vscode-html-languageservice", "npm:5.1.2"],\ + ["vscode-languageserver-textdocument", "npm:1.0.11"]\ + ],\ + "linkType": "HARD"\ + }]\ + ]],\ ["vooks", [\ ["npm:0.2.12", {\ "packageLocation": "./.yarn/cache/vooks-npm-0.2.12-0d1a2d856b-e6841ec5b6.zip/node_modules/vooks/",\ @@ -9172,13 +9474,13 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ],\ "linkType": "SOFT"\ }],\ - ["virtual:d5901c8fe9a0c32ef9bd30914b8624afdd53ad520846726499200f014090a72c0a1d5e3737654e39af21acf7bf6f0993bedc3c908b3b8804daa47faed23d0085#npm:0.2.12", {\ - "packageLocation": "./.yarn/__virtual__/vooks-virtual-f099143e8a/0/cache/vooks-npm-0.2.12-0d1a2d856b-e6841ec5b6.zip/node_modules/vooks/",\ + ["virtual:32fd9c861d759cd42dabb479e4fd652286369e629cc7ef63c9cf4f1af5387c64be25fafc985023ea8534b1ec1f4cc92e6c918c7f3b594aa0f8acad026c671a6a#npm:0.2.12", {\ + "packageLocation": "./.yarn/__virtual__/vooks-virtual-ca0a47c4bf/0/cache/vooks-npm-0.2.12-0d1a2d856b-e6841ec5b6.zip/node_modules/vooks/",\ "packageDependencies": [\ - ["vooks", "virtual:d5901c8fe9a0c32ef9bd30914b8624afdd53ad520846726499200f014090a72c0a1d5e3737654e39af21acf7bf6f0993bedc3c908b3b8804daa47faed23d0085#npm:0.2.12"],\ + ["vooks", "virtual:32fd9c861d759cd42dabb479e4fd652286369e629cc7ef63c9cf4f1af5387c64be25fafc985023ea8534b1ec1f4cc92e6c918c7f3b594aa0f8acad026c671a6a#npm:0.2.12"],\ ["@types/vue", null],\ ["evtd", "npm:0.2.3"],\ - ["vue", "npm:3.3.4"]\ + ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.21"]\ ],\ "packagePeers": [\ "@types/vue",\ @@ -9187,16 +9489,89 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "linkType": "HARD"\ }]\ ]],\ + ["vscode-html-languageservice", [\ + ["npm:5.1.2", {\ + "packageLocation": "./.yarn/cache/vscode-html-languageservice-npm-5.1.2-2ea2618bdd-3a2a5ee5ad.zip/node_modules/vscode-html-languageservice/",\ + "packageDependencies": [\ + ["vscode-html-languageservice", "npm:5.1.2"],\ + ["@vscode/l10n", "npm:0.0.18"],\ + ["vscode-languageserver-textdocument", "npm:1.0.11"],\ + ["vscode-languageserver-types", "npm:3.17.5"],\ + ["vscode-uri", "npm:3.0.8"]\ + ],\ + "linkType": "HARD"\ + }]\ + ]],\ + ["vscode-jsonrpc", [\ + ["npm:8.2.0", {\ + "packageLocation": "./.yarn/cache/vscode-jsonrpc-npm-8.2.0-b7d2e5b553-f302a01e59.zip/node_modules/vscode-jsonrpc/",\ + "packageDependencies": [\ + ["vscode-jsonrpc", "npm:8.2.0"]\ + ],\ + "linkType": "HARD"\ + }]\ + ]],\ + ["vscode-languageserver-protocol", [\ + ["npm:3.17.5", {\ + "packageLocation": "./.yarn/cache/vscode-languageserver-protocol-npm-3.17.5-2b07e16989-dfb42d276d.zip/node_modules/vscode-languageserver-protocol/",\ + "packageDependencies": [\ + ["vscode-languageserver-protocol", "npm:3.17.5"],\ + ["vscode-jsonrpc", "npm:8.2.0"],\ + ["vscode-languageserver-types", "npm:3.17.5"]\ + ],\ + "linkType": "HARD"\ + }]\ + ]],\ + ["vscode-languageserver-textdocument", [\ + ["npm:1.0.11", {\ + "packageLocation": "./.yarn/cache/vscode-languageserver-textdocument-npm-1.0.11-6fc94d2b7b-ea7cdc9d4f.zip/node_modules/vscode-languageserver-textdocument/",\ + "packageDependencies": [\ + ["vscode-languageserver-textdocument", "npm:1.0.11"]\ + ],\ + "linkType": "HARD"\ + }]\ + ]],\ + ["vscode-languageserver-types", [\ + ["npm:3.17.5", {\ + "packageLocation": "./.yarn/cache/vscode-languageserver-types-npm-3.17.5-aca3b71a5a-79b420e757.zip/node_modules/vscode-languageserver-types/",\ + "packageDependencies": [\ + ["vscode-languageserver-types", "npm:3.17.5"]\ + ],\ + "linkType": "HARD"\ + }]\ + ]],\ + ["vscode-uri", [\ + ["npm:3.0.8", {\ + "packageLocation": "./.yarn/cache/vscode-uri-npm-3.0.8-56f46b9d24-5142491268.zip/node_modules/vscode-uri/",\ + "packageDependencies": [\ + ["vscode-uri", "npm:3.0.8"]\ + ],\ + "linkType": "HARD"\ + }]\ + ]],\ ["vue", [\ - ["npm:3.3.4", {\ - "packageLocation": "./.yarn/cache/vue-npm-3.3.4-174fadbea4-58b6c62a66.zip/node_modules/vue/",\ + ["npm:3.4.21", {\ + "packageLocation": "./.yarn/cache/vue-npm-3.4.21-02110aa6d9-3c477982a0.zip/node_modules/vue/",\ + "packageDependencies": [\ + ["vue", "npm:3.4.21"]\ + ],\ + "linkType": "SOFT"\ + }],\ + ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.21", {\ + "packageLocation": "./.yarn/__virtual__/vue-virtual-b79af6274d/0/cache/vue-npm-3.4.21-02110aa6d9-3c477982a0.zip/node_modules/vue/",\ "packageDependencies": [\ - ["vue", "npm:3.3.4"],\ - ["@vue/compiler-dom", "npm:3.3.4"],\ - ["@vue/compiler-sfc", "npm:3.3.4"],\ - ["@vue/runtime-dom", "npm:3.3.4"],\ - ["@vue/server-renderer", "virtual:174fadbea44493263729fa2b6f65daab4b25e9b0a7a008d5887cf4635c65a7954c0e996fdf3e5d8529a9ab481440768b691dd3c59aca8db6f1f12ce74ed59685#npm:3.3.4"],\ - ["@vue/shared", "npm:3.3.4"]\ + ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.21"],\ + ["@types/typescript", null],\ + ["@vue/compiler-dom", "npm:3.4.21"],\ + ["@vue/compiler-sfc", "npm:3.4.21"],\ + ["@vue/runtime-dom", "npm:3.4.21"],\ + ["@vue/server-renderer", "virtual:b79af6274dddda2b283f42be2b827e30c3e5389bce2938ee73bdb74ee9781811fc079c6836719e57940708d59b3beeb14d9e3c12f37f2d22582a53e6c32e4c97#npm:3.4.21"],\ + ["@vue/shared", "npm:3.4.21"],\ + ["typescript", null]\ + ],\ + "packagePeers": [\ + "@types/typescript",\ + "typescript"\ ],\ "linkType": "HARD"\ }]\ @@ -9223,7 +9598,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ["@types/vue", null],\ ["@types/vue__composition-api", null],\ ["@vue/composition-api", null],\ - ["vue", "npm:3.3.4"]\ + ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.21"]\ ],\ "packagePeers": [\ "@types/vue",\ @@ -9240,7 +9615,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ["@types/vue", null],\ ["@types/vue__composition-api", null],\ ["@vue/composition-api", null],\ - ["vue", "npm:3.3.4"]\ + ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.21"]\ ],\ "packagePeers": [\ "@types/vue",\ @@ -9252,20 +9627,20 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["vue-eslint-parser", [\ - ["npm:9.3.1", {\ - "packageLocation": "./.yarn/cache/vue-eslint-parser-npm-9.3.1-a0feb51670-6d1476b45f.zip/node_modules/vue-eslint-parser/",\ + ["npm:9.4.2", {\ + "packageLocation": "./.yarn/cache/vue-eslint-parser-npm-9.4.2-3e4e696025-67f14c8ea1.zip/node_modules/vue-eslint-parser/",\ "packageDependencies": [\ - ["vue-eslint-parser", "npm:9.3.1"]\ + ["vue-eslint-parser", "npm:9.4.2"]\ ],\ "linkType": "SOFT"\ }],\ - ["virtual:e39e5d6bef7a93bd3b21c5c9ba6ef825c92fc73c8d9c9e01699e1dc11e40fd3bc150ba16509e2cf59495cb098c32b2e4a85c0c21802fddeffc3208b01f4f5a16#npm:9.3.1", {\ - "packageLocation": "./.yarn/__virtual__/vue-eslint-parser-virtual-a1cfa0fa2e/0/cache/vue-eslint-parser-npm-9.3.1-a0feb51670-6d1476b45f.zip/node_modules/vue-eslint-parser/",\ + ["virtual:e080dd5dc65fb3541eb98fd929c3a1d3733f3aff4bb24b09a6b5cce9fba4a29aca07e286ef93079f2144caa0fd33bb6545549286d3a9f2b9a211caa1f4b68ff9#npm:9.4.2", {\ + "packageLocation": "./.yarn/__virtual__/vue-eslint-parser-virtual-f703c550a2/0/cache/vue-eslint-parser-npm-9.4.2-3e4e696025-67f14c8ea1.zip/node_modules/vue-eslint-parser/",\ "packageDependencies": [\ - ["vue-eslint-parser", "virtual:e39e5d6bef7a93bd3b21c5c9ba6ef825c92fc73c8d9c9e01699e1dc11e40fd3bc150ba16509e2cf59495cb098c32b2e4a85c0c21802fddeffc3208b01f4f5a16#npm:9.3.1"],\ + ["vue-eslint-parser", "virtual:e080dd5dc65fb3541eb98fd929c3a1d3733f3aff4bb24b09a6b5cce9fba4a29aca07e286ef93079f2144caa0fd33bb6545549286d3a9f2b9a211caa1f4b68ff9#npm:9.4.2"],\ ["@types/eslint", null],\ ["debug", "virtual:b86a9fb34323a98c6519528ed55faa0d9b44ca8879307c0b29aa384bde47ff59a7d0c9051b31246f14521dfb71ba3c5d6d0b35c29fffc17bf875aa6ad977d9e8#npm:4.3.4"],\ - ["eslint", "npm:8.51.0"],\ + ["eslint", "npm:8.57.0"],\ ["eslint-scope", "npm:7.1.1"],\ ["eslint-visitor-keys", "npm:3.3.0"],\ ["espree", "npm:9.3.2"],\ @@ -9281,20 +9656,20 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["vue-router", [\ - ["npm:4.2.5", {\ - "packageLocation": "./.yarn/cache/vue-router-npm-4.2.5-3479f41e41-2449db4f3a.zip/node_modules/vue-router/",\ + ["npm:4.3.0", {\ + "packageLocation": "./.yarn/cache/vue-router-npm-4.3.0-b765d40138-0059261d39.zip/node_modules/vue-router/",\ "packageDependencies": [\ - ["vue-router", "npm:4.2.5"]\ + ["vue-router", "npm:4.3.0"]\ ],\ "linkType": "SOFT"\ }],\ - ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:4.2.5", {\ - "packageLocation": "./.yarn/__virtual__/vue-router-virtual-2017aa030a/0/cache/vue-router-npm-4.2.5-3479f41e41-2449db4f3a.zip/node_modules/vue-router/",\ + ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:4.3.0", {\ + "packageLocation": "./.yarn/__virtual__/vue-router-virtual-82f54143bf/0/cache/vue-router-npm-4.3.0-b765d40138-0059261d39.zip/node_modules/vue-router/",\ "packageDependencies": [\ - ["vue-router", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:4.2.5"],\ + ["vue-router", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:4.3.0"],\ ["@types/vue", null],\ - ["@vue/devtools-api", "npm:6.5.0"],\ - ["vue", "npm:3.3.4"]\ + ["@vue/devtools-api", "npm:6.6.1"],\ + ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.21"]\ ],\ "packagePeers": [\ "@types/vue",\ @@ -9304,26 +9679,26 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["vueuc", [\ - ["npm:0.4.51", {\ - "packageLocation": "./.yarn/cache/vueuc-npm-0.4.51-794074113f-7969659fac.zip/node_modules/vueuc/",\ + ["npm:0.4.58", {\ + "packageLocation": "./.yarn/cache/vueuc-npm-0.4.58-be5584770c-fb0b9a69be.zip/node_modules/vueuc/",\ "packageDependencies": [\ - ["vueuc", "npm:0.4.51"]\ + ["vueuc", "npm:0.4.58"]\ ],\ "linkType": "SOFT"\ }],\ - ["virtual:d5901c8fe9a0c32ef9bd30914b8624afdd53ad520846726499200f014090a72c0a1d5e3737654e39af21acf7bf6f0993bedc3c908b3b8804daa47faed23d0085#npm:0.4.51", {\ - "packageLocation": "./.yarn/__virtual__/vueuc-virtual-07229bbf54/0/cache/vueuc-npm-0.4.51-794074113f-7969659fac.zip/node_modules/vueuc/",\ + ["virtual:32fd9c861d759cd42dabb479e4fd652286369e629cc7ef63c9cf4f1af5387c64be25fafc985023ea8534b1ec1f4cc92e6c918c7f3b594aa0f8acad026c671a6a#npm:0.4.58", {\ + "packageLocation": "./.yarn/__virtual__/vueuc-virtual-2366be83ef/0/cache/vueuc-npm-0.4.58-be5584770c-fb0b9a69be.zip/node_modules/vueuc/",\ "packageDependencies": [\ - ["vueuc", "virtual:d5901c8fe9a0c32ef9bd30914b8624afdd53ad520846726499200f014090a72c0a1d5e3737654e39af21acf7bf6f0993bedc3c908b3b8804daa47faed23d0085#npm:0.4.51"],\ - ["@css-render/vue3-ssr", "virtual:07229bbf54bc488d21e48f65df3fcd2cdabd1e401dfffccce7403d04695be90e478a0d508694f896481602b0f9db804b9f384dfa051fe08e896fd18fd1fe0b6b#npm:0.15.10"],\ + ["vueuc", "virtual:32fd9c861d759cd42dabb479e4fd652286369e629cc7ef63c9cf4f1af5387c64be25fafc985023ea8534b1ec1f4cc92e6c918c7f3b594aa0f8acad026c671a6a#npm:0.4.58"],\ + ["@css-render/vue3-ssr", "virtual:2366be83ef58a728ebb5a5e9ed4600f4465f98b2a844262fcfbe89415361d5d5f9e964ec3b9a72d6a5004f37c1024d017c65e67473dd9cc39cd61f51768c65e6#npm:0.15.10"],\ ["@juggle/resize-observer", "npm:3.3.1"],\ ["@types/vue", null],\ ["css-render", "npm:0.15.10"],\ ["evtd", "npm:0.2.4"],\ ["seemly", "npm:0.3.6"],\ - ["vdirs", "virtual:d5901c8fe9a0c32ef9bd30914b8624afdd53ad520846726499200f014090a72c0a1d5e3737654e39af21acf7bf6f0993bedc3c908b3b8804daa47faed23d0085#npm:0.1.8"],\ - ["vooks", "virtual:d5901c8fe9a0c32ef9bd30914b8624afdd53ad520846726499200f014090a72c0a1d5e3737654e39af21acf7bf6f0993bedc3c908b3b8804daa47faed23d0085#npm:0.2.12"],\ - ["vue", "npm:3.3.4"]\ + ["vdirs", "virtual:32fd9c861d759cd42dabb479e4fd652286369e629cc7ef63c9cf4f1af5387c64be25fafc985023ea8534b1ec1f4cc92e6c918c7f3b594aa0f8acad026c671a6a#npm:0.1.8"],\ + ["vooks", "virtual:32fd9c861d759cd42dabb479e4fd652286369e629cc7ef63c9cf4f1af5387c64be25fafc985023ea8534b1ec1f4cc92e6c918c7f3b594aa0f8acad026c671a6a#npm:0.2.12"],\ + ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.21"]\ ],\ "packagePeers": [\ "@types/vue",\ @@ -9366,30 +9741,17 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["which-typed-array", [\ - ["npm:1.1.11", {\ - "packageLocation": "./.yarn/cache/which-typed-array-npm-1.1.11-f37f0cefe2-711ffc8ef8.zip/node_modules/which-typed-array/",\ + ["npm:1.1.13", {\ + "packageLocation": "./.yarn/cache/which-typed-array-npm-1.1.13-92c18b4878-3828a0d5d7.zip/node_modules/which-typed-array/",\ "packageDependencies": [\ - ["which-typed-array", "npm:1.1.11"],\ + ["which-typed-array", "npm:1.1.13"],\ ["available-typed-arrays", "npm:1.0.5"],\ - ["call-bind", "npm:1.0.2"],\ + ["call-bind", "npm:1.0.5"],\ ["for-each", "npm:0.3.3"],\ ["gopd", "npm:1.0.1"],\ ["has-tostringtag", "npm:1.0.0"]\ ],\ "linkType": "HARD"\ - }],\ - ["npm:1.1.9", {\ - "packageLocation": "./.yarn/cache/which-typed-array-npm-1.1.9-9559c95dfc-fe0178ca44.zip/node_modules/which-typed-array/",\ - "packageDependencies": [\ - ["which-typed-array", "npm:1.1.9"],\ - ["available-typed-arrays", "npm:1.0.5"],\ - ["call-bind", "npm:1.0.2"],\ - ["for-each", "npm:0.3.3"],\ - ["gopd", "npm:1.0.1"],\ - ["has-tostringtag", "npm:1.0.0"],\ - ["is-typed-array", "npm:1.1.10"]\ - ],\ - "linkType": "HARD"\ }]\ ]],\ ["wide-align", [\ diff --git a/.vscode/settings.json b/.vscode/settings.json index b0ceba5c9d..b323cd02f7 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -56,5 +56,6 @@ "python.linting.pylintArgs": ["--load-plugins", "pylint_django"], "python.testing.pytestEnabled": false, "python.testing.unittestEnabled": false, - "python.linting.enabled": true + "python.linting.enabled": true, + "python.terminal.shellIntegration.enabled": false } diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 4bd0b99363..8b36b0e6ac 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -105,10 +105,11 @@ "command": "/usr/local/bin/python", "args": [ "-m", - "smtpd", + "aiosmtpd", "-n", "-c", - "DebuggingServer", + "ietf.utils.aiosmtpd.DevDebuggingHandler", + "-l", "localhost:2025" ], "presentation": { diff --git a/.yarn/cache/@babel-parser-npm-7.22.4-32183c89ee-0ca6d3a2d9.zip b/.yarn/cache/@babel-parser-npm-7.22.4-32183c89ee-0ca6d3a2d9.zip deleted file mode 100644 index 98b1f16818..0000000000 Binary files a/.yarn/cache/@babel-parser-npm-7.22.4-32183c89ee-0ca6d3a2d9.zip and /dev/null differ diff --git a/.yarn/cache/@babel-parser-npm-7.23.9-720a0b56cb-e7cd4960ac.zip b/.yarn/cache/@babel-parser-npm-7.23.9-720a0b56cb-e7cd4960ac.zip new file mode 100644 index 0000000000..7b6c44fc3f Binary files /dev/null and b/.yarn/cache/@babel-parser-npm-7.23.9-720a0b56cb-e7cd4960ac.zip differ diff --git a/.yarn/cache/@eslint-community-regexpp-npm-4.10.0-6bfb984c81-2a6e345429.zip b/.yarn/cache/@eslint-community-regexpp-npm-4.10.0-6bfb984c81-2a6e345429.zip new file mode 100644 index 0000000000..7ef5a48973 Binary files /dev/null and b/.yarn/cache/@eslint-community-regexpp-npm-4.10.0-6bfb984c81-2a6e345429.zip differ diff --git a/.yarn/cache/@eslint-community-regexpp-npm-4.5.1-bf72922237-6d901166d6.zip b/.yarn/cache/@eslint-community-regexpp-npm-4.5.1-bf72922237-6d901166d6.zip deleted file mode 100644 index 0f8176b616..0000000000 Binary files a/.yarn/cache/@eslint-community-regexpp-npm-4.5.1-bf72922237-6d901166d6.zip and /dev/null differ diff --git a/.yarn/cache/@eslint-eslintrc-npm-2.1.2-feb0771c9f-bc742a1e3b.zip b/.yarn/cache/@eslint-eslintrc-npm-2.1.2-feb0771c9f-bc742a1e3b.zip deleted file mode 100644 index 43f6713681..0000000000 Binary files a/.yarn/cache/@eslint-eslintrc-npm-2.1.2-feb0771c9f-bc742a1e3b.zip and /dev/null differ diff --git a/.yarn/cache/@eslint-eslintrc-npm-2.1.4-1ff4b5f908-10957c7592.zip b/.yarn/cache/@eslint-eslintrc-npm-2.1.4-1ff4b5f908-10957c7592.zip new file mode 100644 index 0000000000..58788ff7a6 Binary files /dev/null and b/.yarn/cache/@eslint-eslintrc-npm-2.1.4-1ff4b5f908-10957c7592.zip differ diff --git a/.yarn/cache/@eslint-js-npm-8.51.0-51f088b88b-0228bf1e1e.zip b/.yarn/cache/@eslint-js-npm-8.57.0-00ead3710a-315dc65b0e.zip similarity index 50% rename from .yarn/cache/@eslint-js-npm-8.51.0-51f088b88b-0228bf1e1e.zip rename to .yarn/cache/@eslint-js-npm-8.57.0-00ead3710a-315dc65b0e.zip index 0cae34e698..82eab16e7c 100644 Binary files a/.yarn/cache/@eslint-js-npm-8.51.0-51f088b88b-0228bf1e1e.zip and b/.yarn/cache/@eslint-js-npm-8.57.0-00ead3710a-315dc65b0e.zip differ diff --git a/.yarn/cache/@fullcalendar-bootstrap5-npm-6.1.9-ef68c3c094-1d6168fafc.zip b/.yarn/cache/@fullcalendar-bootstrap5-npm-6.1.11-6e0fbf281a-a0c3b94346.zip similarity index 52% rename from .yarn/cache/@fullcalendar-bootstrap5-npm-6.1.9-ef68c3c094-1d6168fafc.zip rename to .yarn/cache/@fullcalendar-bootstrap5-npm-6.1.11-6e0fbf281a-a0c3b94346.zip index 22a51e35a1..edc7da3b25 100644 Binary files a/.yarn/cache/@fullcalendar-bootstrap5-npm-6.1.9-ef68c3c094-1d6168fafc.zip and b/.yarn/cache/@fullcalendar-bootstrap5-npm-6.1.11-6e0fbf281a-a0c3b94346.zip differ diff --git a/.yarn/cache/@fullcalendar-core-npm-6.1.11-ae049c8ace-0078a6f96b.zip b/.yarn/cache/@fullcalendar-core-npm-6.1.11-ae049c8ace-0078a6f96b.zip new file mode 100644 index 0000000000..c9eee67d63 Binary files /dev/null and b/.yarn/cache/@fullcalendar-core-npm-6.1.11-ae049c8ace-0078a6f96b.zip differ diff --git a/.yarn/cache/@fullcalendar-core-npm-6.1.9-b4da84d4b8-836db3e40c.zip b/.yarn/cache/@fullcalendar-core-npm-6.1.9-b4da84d4b8-836db3e40c.zip deleted file mode 100644 index 4e27bfed69..0000000000 Binary files a/.yarn/cache/@fullcalendar-core-npm-6.1.9-b4da84d4b8-836db3e40c.zip and /dev/null differ diff --git a/.yarn/cache/@fullcalendar-daygrid-npm-6.1.11-2187ca1b8f-6eb5606de5.zip b/.yarn/cache/@fullcalendar-daygrid-npm-6.1.11-2187ca1b8f-6eb5606de5.zip new file mode 100644 index 0000000000..3a7449a3a8 Binary files /dev/null and b/.yarn/cache/@fullcalendar-daygrid-npm-6.1.11-2187ca1b8f-6eb5606de5.zip differ diff --git a/.yarn/cache/@fullcalendar-daygrid-npm-6.1.9-4c0da59f84-3db55247c4.zip b/.yarn/cache/@fullcalendar-daygrid-npm-6.1.9-4c0da59f84-3db55247c4.zip deleted file mode 100644 index 5a0ad27421..0000000000 Binary files a/.yarn/cache/@fullcalendar-daygrid-npm-6.1.9-4c0da59f84-3db55247c4.zip and /dev/null differ diff --git a/.yarn/cache/@fullcalendar-icalendar-npm-6.1.9-92e390eda8-d47daf4ae0.zip b/.yarn/cache/@fullcalendar-icalendar-npm-6.1.11-73807e790d-4e6eff15a8.zip similarity index 53% rename from .yarn/cache/@fullcalendar-icalendar-npm-6.1.9-92e390eda8-d47daf4ae0.zip rename to .yarn/cache/@fullcalendar-icalendar-npm-6.1.11-73807e790d-4e6eff15a8.zip index 804279cbb4..861ed1b366 100644 Binary files a/.yarn/cache/@fullcalendar-icalendar-npm-6.1.9-92e390eda8-d47daf4ae0.zip and b/.yarn/cache/@fullcalendar-icalendar-npm-6.1.11-73807e790d-4e6eff15a8.zip differ diff --git a/.yarn/cache/@fullcalendar-interaction-npm-6.1.9-f729b81a3d-787111ea6f.zip b/.yarn/cache/@fullcalendar-interaction-npm-6.1.11-39630596c7-c67d4cfa0b.zip similarity index 57% rename from .yarn/cache/@fullcalendar-interaction-npm-6.1.9-f729b81a3d-787111ea6f.zip rename to .yarn/cache/@fullcalendar-interaction-npm-6.1.11-39630596c7-c67d4cfa0b.zip index 43132b57e8..b04343467b 100644 Binary files a/.yarn/cache/@fullcalendar-interaction-npm-6.1.9-f729b81a3d-787111ea6f.zip and b/.yarn/cache/@fullcalendar-interaction-npm-6.1.11-39630596c7-c67d4cfa0b.zip differ diff --git a/.yarn/cache/@fullcalendar-list-npm-6.1.9-f76695c5ab-978dd54b71.zip b/.yarn/cache/@fullcalendar-list-npm-6.1.11-8f1846f302-84a8cd6e63.zip similarity index 53% rename from .yarn/cache/@fullcalendar-list-npm-6.1.9-f76695c5ab-978dd54b71.zip rename to .yarn/cache/@fullcalendar-list-npm-6.1.11-8f1846f302-84a8cd6e63.zip index 177ec564d1..93cd34af81 100644 Binary files a/.yarn/cache/@fullcalendar-list-npm-6.1.9-f76695c5ab-978dd54b71.zip and b/.yarn/cache/@fullcalendar-list-npm-6.1.11-8f1846f302-84a8cd6e63.zip differ diff --git a/.yarn/cache/@fullcalendar-luxon3-npm-6.1.11-3e90656a71-8e7f45aab2.zip b/.yarn/cache/@fullcalendar-luxon3-npm-6.1.11-3e90656a71-8e7f45aab2.zip new file mode 100644 index 0000000000..6e717b3495 Binary files /dev/null and b/.yarn/cache/@fullcalendar-luxon3-npm-6.1.11-3e90656a71-8e7f45aab2.zip differ diff --git a/.yarn/cache/@fullcalendar-luxon3-npm-6.1.9-d79fc8f961-25122126e2.zip b/.yarn/cache/@fullcalendar-luxon3-npm-6.1.9-d79fc8f961-25122126e2.zip deleted file mode 100644 index 097ca2265a..0000000000 Binary files a/.yarn/cache/@fullcalendar-luxon3-npm-6.1.9-d79fc8f961-25122126e2.zip and /dev/null differ diff --git a/.yarn/cache/@fullcalendar-timegrid-npm-6.1.11-1d43455bfd-4a11e6dd90.zip b/.yarn/cache/@fullcalendar-timegrid-npm-6.1.11-1d43455bfd-4a11e6dd90.zip new file mode 100644 index 0000000000..917beeda69 Binary files /dev/null and b/.yarn/cache/@fullcalendar-timegrid-npm-6.1.11-1d43455bfd-4a11e6dd90.zip differ diff --git a/.yarn/cache/@fullcalendar-timegrid-npm-6.1.9-b227fefa80-8c12a508f7.zip b/.yarn/cache/@fullcalendar-timegrid-npm-6.1.9-b227fefa80-8c12a508f7.zip deleted file mode 100644 index 5d92af9ff1..0000000000 Binary files a/.yarn/cache/@fullcalendar-timegrid-npm-6.1.9-b227fefa80-8c12a508f7.zip and /dev/null differ diff --git a/.yarn/cache/@fullcalendar-vue3-npm-6.1.9-3c150e259d-2c1c0fbe72.zip b/.yarn/cache/@fullcalendar-vue3-npm-6.1.11-f6b8b48da4-5891a596e9.zip similarity index 91% rename from .yarn/cache/@fullcalendar-vue3-npm-6.1.9-3c150e259d-2c1c0fbe72.zip rename to .yarn/cache/@fullcalendar-vue3-npm-6.1.11-f6b8b48da4-5891a596e9.zip index 0ca254a0d6..3054aa761f 100644 Binary files a/.yarn/cache/@fullcalendar-vue3-npm-6.1.9-3c150e259d-2c1c0fbe72.zip and b/.yarn/cache/@fullcalendar-vue3-npm-6.1.11-f6b8b48da4-5891a596e9.zip differ diff --git a/.yarn/cache/@humanwhocodes-config-array-npm-0.11.11-e3582554ee-db84507375.zip b/.yarn/cache/@humanwhocodes-config-array-npm-0.11.14-94a02fcc87-861ccce9ea.zip similarity index 51% rename from .yarn/cache/@humanwhocodes-config-array-npm-0.11.11-e3582554ee-db84507375.zip rename to .yarn/cache/@humanwhocodes-config-array-npm-0.11.14-94a02fcc87-861ccce9ea.zip index 2409a83fb2..166fee4b82 100644 Binary files a/.yarn/cache/@humanwhocodes-config-array-npm-0.11.11-e3582554ee-db84507375.zip and b/.yarn/cache/@humanwhocodes-config-array-npm-0.11.14-94a02fcc87-861ccce9ea.zip differ diff --git a/.yarn/cache/@humanwhocodes-object-schema-npm-1.2.1-eb622b5d0e-a824a1ec31.zip b/.yarn/cache/@humanwhocodes-object-schema-npm-1.2.1-eb622b5d0e-a824a1ec31.zip deleted file mode 100644 index 2b79104af5..0000000000 Binary files a/.yarn/cache/@humanwhocodes-object-schema-npm-1.2.1-eb622b5d0e-a824a1ec31.zip and /dev/null differ diff --git a/.yarn/cache/@humanwhocodes-object-schema-npm-2.0.2-77b42018f9-2fc1150336.zip b/.yarn/cache/@humanwhocodes-object-schema-npm-2.0.2-77b42018f9-2fc1150336.zip new file mode 100644 index 0000000000..cf6847cf44 Binary files /dev/null and b/.yarn/cache/@humanwhocodes-object-schema-npm-2.0.2-77b42018f9-2fc1150336.zip differ diff --git a/.yarn/cache/@parcel-bundler-default-npm-2.10.0-bf1aa01515-58d3619928.zip b/.yarn/cache/@parcel-bundler-default-npm-2.10.0-bf1aa01515-58d3619928.zip deleted file mode 100644 index 302219eaa0..0000000000 Binary files a/.yarn/cache/@parcel-bundler-default-npm-2.10.0-bf1aa01515-58d3619928.zip and /dev/null differ diff --git a/.yarn/cache/@parcel-bundler-default-npm-2.12.0-9ba57d919c-f211a76f55.zip b/.yarn/cache/@parcel-bundler-default-npm-2.12.0-9ba57d919c-f211a76f55.zip new file mode 100644 index 0000000000..024e036391 Binary files /dev/null and b/.yarn/cache/@parcel-bundler-default-npm-2.12.0-9ba57d919c-f211a76f55.zip differ diff --git a/.yarn/cache/@parcel-cache-npm-2.10.0-37f1f83d32-209d474abd.zip b/.yarn/cache/@parcel-cache-npm-2.10.0-37f1f83d32-209d474abd.zip deleted file mode 100644 index 271d9d3f6f..0000000000 Binary files a/.yarn/cache/@parcel-cache-npm-2.10.0-37f1f83d32-209d474abd.zip and /dev/null differ diff --git a/.yarn/cache/@parcel-cache-npm-2.12.0-3389909f2c-a45e799809.zip b/.yarn/cache/@parcel-cache-npm-2.12.0-3389909f2c-a45e799809.zip new file mode 100644 index 0000000000..a358668eb7 Binary files /dev/null and b/.yarn/cache/@parcel-cache-npm-2.12.0-3389909f2c-a45e799809.zip differ diff --git a/.yarn/cache/@parcel-codeframe-npm-2.10.0-e8aa1b4ecc-d87b17d3ce.zip b/.yarn/cache/@parcel-codeframe-npm-2.12.0-aa8027940e-265c4d7ebe.zip similarity index 99% rename from .yarn/cache/@parcel-codeframe-npm-2.10.0-e8aa1b4ecc-d87b17d3ce.zip rename to .yarn/cache/@parcel-codeframe-npm-2.12.0-aa8027940e-265c4d7ebe.zip index 9058e68333..f4239d8ba7 100644 Binary files a/.yarn/cache/@parcel-codeframe-npm-2.10.0-e8aa1b4ecc-d87b17d3ce.zip and b/.yarn/cache/@parcel-codeframe-npm-2.12.0-aa8027940e-265c4d7ebe.zip differ diff --git a/.yarn/cache/@parcel-compressor-raw-npm-2.10.0-961e5d9fe0-043fca0ecb.zip b/.yarn/cache/@parcel-compressor-raw-npm-2.10.0-961e5d9fe0-043fca0ecb.zip deleted file mode 100644 index e6333b4084..0000000000 Binary files a/.yarn/cache/@parcel-compressor-raw-npm-2.10.0-961e5d9fe0-043fca0ecb.zip and /dev/null differ diff --git a/.yarn/cache/@parcel-compressor-raw-npm-2.12.0-19f313c172-16c56704f3.zip b/.yarn/cache/@parcel-compressor-raw-npm-2.12.0-19f313c172-16c56704f3.zip new file mode 100644 index 0000000000..da57625381 Binary files /dev/null and b/.yarn/cache/@parcel-compressor-raw-npm-2.12.0-19f313c172-16c56704f3.zip differ diff --git a/.yarn/cache/@parcel-config-default-npm-2.10.0-2a1fbdf24b-d780d05021.zip b/.yarn/cache/@parcel-config-default-npm-2.12.0-aefd3c699e-72877c5dc4.zip similarity index 66% rename from .yarn/cache/@parcel-config-default-npm-2.10.0-2a1fbdf24b-d780d05021.zip rename to .yarn/cache/@parcel-config-default-npm-2.12.0-aefd3c699e-72877c5dc4.zip index d5a1840883..a4934d017e 100644 Binary files a/.yarn/cache/@parcel-config-default-npm-2.10.0-2a1fbdf24b-d780d05021.zip and b/.yarn/cache/@parcel-config-default-npm-2.12.0-aefd3c699e-72877c5dc4.zip differ diff --git a/.yarn/cache/@parcel-core-npm-2.10.0-59eaeeba7a-c59c2971ea.zip b/.yarn/cache/@parcel-core-npm-2.10.0-59eaeeba7a-c59c2971ea.zip deleted file mode 100644 index 95a14ae902..0000000000 Binary files a/.yarn/cache/@parcel-core-npm-2.10.0-59eaeeba7a-c59c2971ea.zip and /dev/null differ diff --git a/.yarn/cache/@parcel-core-npm-2.12.0-8f08b883d4-5bf6746308.zip b/.yarn/cache/@parcel-core-npm-2.12.0-8f08b883d4-5bf6746308.zip new file mode 100644 index 0000000000..42c39ebe36 Binary files /dev/null and b/.yarn/cache/@parcel-core-npm-2.12.0-8f08b883d4-5bf6746308.zip differ diff --git a/.yarn/cache/@parcel-diagnostic-npm-2.10.0-1e389b369e-45c606ca52.zip b/.yarn/cache/@parcel-diagnostic-npm-2.12.0-6e89ddad28-a4b918c1a0.zip similarity index 73% rename from .yarn/cache/@parcel-diagnostic-npm-2.10.0-1e389b369e-45c606ca52.zip rename to .yarn/cache/@parcel-diagnostic-npm-2.12.0-6e89ddad28-a4b918c1a0.zip index c16d0b42f4..a8e890bf5c 100644 Binary files a/.yarn/cache/@parcel-diagnostic-npm-2.10.0-1e389b369e-45c606ca52.zip and b/.yarn/cache/@parcel-diagnostic-npm-2.12.0-6e89ddad28-a4b918c1a0.zip differ diff --git a/.yarn/cache/@parcel-events-npm-2.10.0-da42a4afa6-1d21cd4186.zip b/.yarn/cache/@parcel-events-npm-2.12.0-e6eff18c8c-136a8a2921.zip similarity index 94% rename from .yarn/cache/@parcel-events-npm-2.10.0-da42a4afa6-1d21cd4186.zip rename to .yarn/cache/@parcel-events-npm-2.12.0-e6eff18c8c-136a8a2921.zip index cf89807381..b806eb99ac 100644 Binary files a/.yarn/cache/@parcel-events-npm-2.10.0-da42a4afa6-1d21cd4186.zip and b/.yarn/cache/@parcel-events-npm-2.12.0-e6eff18c8c-136a8a2921.zip differ diff --git a/.yarn/cache/@parcel-fs-npm-2.10.0-c959567f0f-10faae481c.zip b/.yarn/cache/@parcel-fs-npm-2.10.0-c959567f0f-10faae481c.zip deleted file mode 100644 index 0062d48c08..0000000000 Binary files a/.yarn/cache/@parcel-fs-npm-2.10.0-c959567f0f-10faae481c.zip and /dev/null differ diff --git a/.yarn/cache/@parcel-fs-npm-2.12.0-3c46842e62-43d454d55d.zip b/.yarn/cache/@parcel-fs-npm-2.12.0-3c46842e62-43d454d55d.zip new file mode 100644 index 0000000000..52cbc5f7f1 Binary files /dev/null and b/.yarn/cache/@parcel-fs-npm-2.12.0-3c46842e62-43d454d55d.zip differ diff --git a/.yarn/cache/@parcel-graph-npm-3.0.0-9001abfefc-0a9d5017f6.zip b/.yarn/cache/@parcel-graph-npm-3.0.0-9001abfefc-0a9d5017f6.zip deleted file mode 100644 index 164f2ff077..0000000000 Binary files a/.yarn/cache/@parcel-graph-npm-3.0.0-9001abfefc-0a9d5017f6.zip and /dev/null differ diff --git a/.yarn/cache/@parcel-graph-npm-3.2.0-92821d4289-b4d31624fc.zip b/.yarn/cache/@parcel-graph-npm-3.2.0-92821d4289-b4d31624fc.zip new file mode 100644 index 0000000000..27f3718928 Binary files /dev/null and b/.yarn/cache/@parcel-graph-npm-3.2.0-92821d4289-b4d31624fc.zip differ diff --git a/.yarn/cache/@parcel-logger-npm-2.10.0-41ac90e34c-52d0b5331d.zip b/.yarn/cache/@parcel-logger-npm-2.10.0-41ac90e34c-52d0b5331d.zip deleted file mode 100644 index d7df80c7bf..0000000000 Binary files a/.yarn/cache/@parcel-logger-npm-2.10.0-41ac90e34c-52d0b5331d.zip and /dev/null differ diff --git a/.yarn/cache/@parcel-logger-npm-2.12.0-7d2f85a906-be3fe9d9ea.zip b/.yarn/cache/@parcel-logger-npm-2.12.0-7d2f85a906-be3fe9d9ea.zip new file mode 100644 index 0000000000..7231e4c65d Binary files /dev/null and b/.yarn/cache/@parcel-logger-npm-2.12.0-7d2f85a906-be3fe9d9ea.zip differ diff --git a/.yarn/cache/@parcel-markdown-ansi-npm-2.10.0-4dd4da44f3-35e2d07ec8.zip b/.yarn/cache/@parcel-markdown-ansi-npm-2.12.0-6b0fe453df-850ee665d9.zip similarity index 64% rename from .yarn/cache/@parcel-markdown-ansi-npm-2.10.0-4dd4da44f3-35e2d07ec8.zip rename to .yarn/cache/@parcel-markdown-ansi-npm-2.12.0-6b0fe453df-850ee665d9.zip index f5a836e3cc..22582b46fa 100644 Binary files a/.yarn/cache/@parcel-markdown-ansi-npm-2.10.0-4dd4da44f3-35e2d07ec8.zip and b/.yarn/cache/@parcel-markdown-ansi-npm-2.12.0-6b0fe453df-850ee665d9.zip differ diff --git a/.yarn/cache/@parcel-namer-default-npm-2.10.0-4b82db40fd-f2a32096d1.zip b/.yarn/cache/@parcel-namer-default-npm-2.10.0-4b82db40fd-f2a32096d1.zip deleted file mode 100644 index 46d6a52498..0000000000 Binary files a/.yarn/cache/@parcel-namer-default-npm-2.10.0-4b82db40fd-f2a32096d1.zip and /dev/null differ diff --git a/.yarn/cache/@parcel-namer-default-npm-2.12.0-28980cfd47-dc92ec0945.zip b/.yarn/cache/@parcel-namer-default-npm-2.12.0-28980cfd47-dc92ec0945.zip new file mode 100644 index 0000000000..7db7fb405c Binary files /dev/null and b/.yarn/cache/@parcel-namer-default-npm-2.12.0-28980cfd47-dc92ec0945.zip differ diff --git a/.yarn/cache/@parcel-node-resolver-core-npm-3.1.0-9c9ff3ab8b-dcdd39bc6a.zip b/.yarn/cache/@parcel-node-resolver-core-npm-3.1.0-9c9ff3ab8b-dcdd39bc6a.zip deleted file mode 100644 index 33ee38431b..0000000000 Binary files a/.yarn/cache/@parcel-node-resolver-core-npm-3.1.0-9c9ff3ab8b-dcdd39bc6a.zip and /dev/null differ diff --git a/.yarn/cache/@parcel-node-resolver-core-npm-3.3.0-53804df663-acc3721678.zip b/.yarn/cache/@parcel-node-resolver-core-npm-3.3.0-53804df663-acc3721678.zip new file mode 100644 index 0000000000..76a69962a6 Binary files /dev/null and b/.yarn/cache/@parcel-node-resolver-core-npm-3.3.0-53804df663-acc3721678.zip differ diff --git a/.yarn/cache/@parcel-optimizer-css-npm-2.10.0-dbd5825b4e-ea15989512.zip b/.yarn/cache/@parcel-optimizer-css-npm-2.10.0-dbd5825b4e-ea15989512.zip deleted file mode 100644 index 9956afb109..0000000000 Binary files a/.yarn/cache/@parcel-optimizer-css-npm-2.10.0-dbd5825b4e-ea15989512.zip and /dev/null differ diff --git a/.yarn/cache/@parcel-optimizer-css-npm-2.12.0-f95bd4d060-abcdf58c29.zip b/.yarn/cache/@parcel-optimizer-css-npm-2.12.0-f95bd4d060-abcdf58c29.zip new file mode 100644 index 0000000000..f1c61749b9 Binary files /dev/null and b/.yarn/cache/@parcel-optimizer-css-npm-2.12.0-f95bd4d060-abcdf58c29.zip differ diff --git a/.yarn/cache/@parcel-optimizer-data-url-npm-2.10.0-700cb5aab6-ec9530be83.zip b/.yarn/cache/@parcel-optimizer-data-url-npm-2.10.0-700cb5aab6-ec9530be83.zip deleted file mode 100644 index 39c8d2b426..0000000000 Binary files a/.yarn/cache/@parcel-optimizer-data-url-npm-2.10.0-700cb5aab6-ec9530be83.zip and /dev/null differ diff --git a/.yarn/cache/@parcel-optimizer-data-url-npm-2.12.0-dad3731170-0397293961.zip b/.yarn/cache/@parcel-optimizer-data-url-npm-2.12.0-dad3731170-0397293961.zip new file mode 100644 index 0000000000..28497d3327 Binary files /dev/null and b/.yarn/cache/@parcel-optimizer-data-url-npm-2.12.0-dad3731170-0397293961.zip differ diff --git a/.yarn/cache/@parcel-optimizer-htmlnano-npm-2.10.0-ee0243765c-1f6de13022.zip b/.yarn/cache/@parcel-optimizer-htmlnano-npm-2.10.0-ee0243765c-1f6de13022.zip deleted file mode 100644 index 7b7bff73e7..0000000000 Binary files a/.yarn/cache/@parcel-optimizer-htmlnano-npm-2.10.0-ee0243765c-1f6de13022.zip and /dev/null differ diff --git a/.yarn/cache/@parcel-optimizer-htmlnano-npm-2.12.0-cdd2835c12-64e571f56f.zip b/.yarn/cache/@parcel-optimizer-htmlnano-npm-2.12.0-cdd2835c12-64e571f56f.zip new file mode 100644 index 0000000000..4089a870fb Binary files /dev/null and b/.yarn/cache/@parcel-optimizer-htmlnano-npm-2.12.0-cdd2835c12-64e571f56f.zip differ diff --git a/.yarn/cache/@parcel-optimizer-image-npm-2.10.0-a581b60cbd-94d5db2837.zip b/.yarn/cache/@parcel-optimizer-image-npm-2.10.0-a581b60cbd-94d5db2837.zip deleted file mode 100644 index df6e165df9..0000000000 Binary files a/.yarn/cache/@parcel-optimizer-image-npm-2.10.0-a581b60cbd-94d5db2837.zip and /dev/null differ diff --git a/.yarn/cache/@parcel-optimizer-image-npm-2.12.0-4cbc56f72d-7d28379bf1.zip b/.yarn/cache/@parcel-optimizer-image-npm-2.12.0-4cbc56f72d-7d28379bf1.zip new file mode 100644 index 0000000000..8b0a44e756 Binary files /dev/null and b/.yarn/cache/@parcel-optimizer-image-npm-2.12.0-4cbc56f72d-7d28379bf1.zip differ diff --git a/.yarn/cache/@parcel-optimizer-svgo-npm-2.10.0-154d938969-7201c63222.zip b/.yarn/cache/@parcel-optimizer-svgo-npm-2.10.0-154d938969-7201c63222.zip deleted file mode 100644 index ba2a5ca11a..0000000000 Binary files a/.yarn/cache/@parcel-optimizer-svgo-npm-2.10.0-154d938969-7201c63222.zip and /dev/null differ diff --git a/.yarn/cache/@parcel-optimizer-svgo-npm-2.12.0-08c0f1b17f-d3a4d2de9f.zip b/.yarn/cache/@parcel-optimizer-svgo-npm-2.12.0-08c0f1b17f-d3a4d2de9f.zip new file mode 100644 index 0000000000..441bead99b Binary files /dev/null and b/.yarn/cache/@parcel-optimizer-svgo-npm-2.12.0-08c0f1b17f-d3a4d2de9f.zip differ diff --git a/.yarn/cache/@parcel-optimizer-swc-npm-2.10.0-caf3bb9c02-1fe68ee6ff.zip b/.yarn/cache/@parcel-optimizer-swc-npm-2.10.0-caf3bb9c02-1fe68ee6ff.zip deleted file mode 100644 index ac83217658..0000000000 Binary files a/.yarn/cache/@parcel-optimizer-swc-npm-2.10.0-caf3bb9c02-1fe68ee6ff.zip and /dev/null differ diff --git a/.yarn/cache/@parcel-optimizer-swc-npm-2.12.0-fb535e4283-0b7fdf3df1.zip b/.yarn/cache/@parcel-optimizer-swc-npm-2.12.0-fb535e4283-0b7fdf3df1.zip new file mode 100644 index 0000000000..8b137cf673 Binary files /dev/null and b/.yarn/cache/@parcel-optimizer-swc-npm-2.12.0-fb535e4283-0b7fdf3df1.zip differ diff --git a/.yarn/cache/@parcel-package-manager-npm-2.10.0-4f4a39adee-7c4a95d9df.zip b/.yarn/cache/@parcel-package-manager-npm-2.10.0-4f4a39adee-7c4a95d9df.zip deleted file mode 100644 index 77ba89e562..0000000000 Binary files a/.yarn/cache/@parcel-package-manager-npm-2.10.0-4f4a39adee-7c4a95d9df.zip and /dev/null differ diff --git a/.yarn/cache/@parcel-package-manager-npm-2.12.0-fc90aacf70-a517e9efe1.zip b/.yarn/cache/@parcel-package-manager-npm-2.12.0-fc90aacf70-a517e9efe1.zip new file mode 100644 index 0000000000..1e757bdf2f Binary files /dev/null and b/.yarn/cache/@parcel-package-manager-npm-2.12.0-fc90aacf70-a517e9efe1.zip differ diff --git a/.yarn/cache/@parcel-packager-css-npm-2.10.0-cb31a968a8-11bf4cae4c.zip b/.yarn/cache/@parcel-packager-css-npm-2.10.0-cb31a968a8-11bf4cae4c.zip deleted file mode 100644 index 71fc7e92a4..0000000000 Binary files a/.yarn/cache/@parcel-packager-css-npm-2.10.0-cb31a968a8-11bf4cae4c.zip and /dev/null differ diff --git a/.yarn/cache/@parcel-packager-css-npm-2.12.0-b1c27a8323-684aaa1d85.zip b/.yarn/cache/@parcel-packager-css-npm-2.12.0-b1c27a8323-684aaa1d85.zip new file mode 100644 index 0000000000..4cf7815f57 Binary files /dev/null and b/.yarn/cache/@parcel-packager-css-npm-2.12.0-b1c27a8323-684aaa1d85.zip differ diff --git a/.yarn/cache/@parcel-packager-html-npm-2.10.0-d6f71e7e36-8dfd86e7d6.zip b/.yarn/cache/@parcel-packager-html-npm-2.12.0-ad361b1265-ee558ad616.zip similarity index 50% rename from .yarn/cache/@parcel-packager-html-npm-2.10.0-d6f71e7e36-8dfd86e7d6.zip rename to .yarn/cache/@parcel-packager-html-npm-2.12.0-ad361b1265-ee558ad616.zip index d3c871d581..989402a62c 100644 Binary files a/.yarn/cache/@parcel-packager-html-npm-2.10.0-d6f71e7e36-8dfd86e7d6.zip and b/.yarn/cache/@parcel-packager-html-npm-2.12.0-ad361b1265-ee558ad616.zip differ diff --git a/.yarn/cache/@parcel-packager-js-npm-2.10.0-f84ec4cc7b-9b62598864.zip b/.yarn/cache/@parcel-packager-js-npm-2.12.0-093e3200cd-2189b7ff15.zip similarity index 59% rename from .yarn/cache/@parcel-packager-js-npm-2.10.0-f84ec4cc7b-9b62598864.zip rename to .yarn/cache/@parcel-packager-js-npm-2.12.0-093e3200cd-2189b7ff15.zip index 9084795b65..461ec50d28 100644 Binary files a/.yarn/cache/@parcel-packager-js-npm-2.10.0-f84ec4cc7b-9b62598864.zip and b/.yarn/cache/@parcel-packager-js-npm-2.12.0-093e3200cd-2189b7ff15.zip differ diff --git a/.yarn/cache/@parcel-packager-raw-npm-2.10.0-01ef1b8e3e-492fe07ae5.zip b/.yarn/cache/@parcel-packager-raw-npm-2.10.0-01ef1b8e3e-492fe07ae5.zip deleted file mode 100644 index 132ebb4043..0000000000 Binary files a/.yarn/cache/@parcel-packager-raw-npm-2.10.0-01ef1b8e3e-492fe07ae5.zip and /dev/null differ diff --git a/.yarn/cache/@parcel-packager-raw-npm-2.12.0-b7f15635f8-39ce2fc7ae.zip b/.yarn/cache/@parcel-packager-raw-npm-2.12.0-b7f15635f8-39ce2fc7ae.zip new file mode 100644 index 0000000000..e27b5ed1e3 Binary files /dev/null and b/.yarn/cache/@parcel-packager-raw-npm-2.12.0-b7f15635f8-39ce2fc7ae.zip differ diff --git a/.yarn/cache/@parcel-packager-svg-npm-2.10.0-22326715bd-f49d7f3b88.zip b/.yarn/cache/@parcel-packager-svg-npm-2.10.0-22326715bd-f49d7f3b88.zip deleted file mode 100644 index c3031c1920..0000000000 Binary files a/.yarn/cache/@parcel-packager-svg-npm-2.10.0-22326715bd-f49d7f3b88.zip and /dev/null differ diff --git a/.yarn/cache/@parcel-packager-svg-npm-2.12.0-fa921ce522-436ac9ea39.zip b/.yarn/cache/@parcel-packager-svg-npm-2.12.0-fa921ce522-436ac9ea39.zip new file mode 100644 index 0000000000..f3d37303b0 Binary files /dev/null and b/.yarn/cache/@parcel-packager-svg-npm-2.12.0-fa921ce522-436ac9ea39.zip differ diff --git a/.yarn/cache/@parcel-packager-wasm-npm-2.10.0-b1d2cd8f88-d9a13eb838.zip b/.yarn/cache/@parcel-packager-wasm-npm-2.12.0-ec551a9e29-a10e1cd988.zip similarity index 76% rename from .yarn/cache/@parcel-packager-wasm-npm-2.10.0-b1d2cd8f88-d9a13eb838.zip rename to .yarn/cache/@parcel-packager-wasm-npm-2.12.0-ec551a9e29-a10e1cd988.zip index 2cde1a739e..5b569f2004 100644 Binary files a/.yarn/cache/@parcel-packager-wasm-npm-2.10.0-b1d2cd8f88-d9a13eb838.zip and b/.yarn/cache/@parcel-packager-wasm-npm-2.12.0-ec551a9e29-a10e1cd988.zip differ diff --git a/.yarn/cache/@parcel-plugin-npm-2.10.0-efbc58a209-e13ba6e7e5.zip b/.yarn/cache/@parcel-plugin-npm-2.12.0-947dec85d3-0b52f1dd06.zip similarity index 77% rename from .yarn/cache/@parcel-plugin-npm-2.10.0-efbc58a209-e13ba6e7e5.zip rename to .yarn/cache/@parcel-plugin-npm-2.12.0-947dec85d3-0b52f1dd06.zip index 0c18974840..667d7230e6 100644 Binary files a/.yarn/cache/@parcel-plugin-npm-2.10.0-efbc58a209-e13ba6e7e5.zip and b/.yarn/cache/@parcel-plugin-npm-2.12.0-947dec85d3-0b52f1dd06.zip differ diff --git a/.yarn/cache/@parcel-profiler-npm-2.10.0-b1ba499bc1-78d545edb7.zip b/.yarn/cache/@parcel-profiler-npm-2.10.0-b1ba499bc1-78d545edb7.zip deleted file mode 100644 index 7a39442c82..0000000000 Binary files a/.yarn/cache/@parcel-profiler-npm-2.10.0-b1ba499bc1-78d545edb7.zip and /dev/null differ diff --git a/.yarn/cache/@parcel-profiler-npm-2.12.0-69720a23ab-b683b74e10.zip b/.yarn/cache/@parcel-profiler-npm-2.12.0-69720a23ab-b683b74e10.zip new file mode 100644 index 0000000000..1cacc84571 Binary files /dev/null and b/.yarn/cache/@parcel-profiler-npm-2.12.0-69720a23ab-b683b74e10.zip differ diff --git a/.yarn/cache/@parcel-reporter-cli-npm-2.10.0-083fc2f2d6-0137a91e45.zip b/.yarn/cache/@parcel-reporter-cli-npm-2.10.0-083fc2f2d6-0137a91e45.zip deleted file mode 100644 index cafce4c242..0000000000 Binary files a/.yarn/cache/@parcel-reporter-cli-npm-2.10.0-083fc2f2d6-0137a91e45.zip and /dev/null differ diff --git a/.yarn/cache/@parcel-reporter-cli-npm-2.12.0-b3e4c5fe19-8cc524fa15.zip b/.yarn/cache/@parcel-reporter-cli-npm-2.12.0-b3e4c5fe19-8cc524fa15.zip new file mode 100644 index 0000000000..f6e625d396 Binary files /dev/null and b/.yarn/cache/@parcel-reporter-cli-npm-2.12.0-b3e4c5fe19-8cc524fa15.zip differ diff --git a/.yarn/cache/@parcel-reporter-dev-server-npm-2.10.0-2f19cb846e-e72fd6ec09.zip b/.yarn/cache/@parcel-reporter-dev-server-npm-2.10.0-2f19cb846e-e72fd6ec09.zip deleted file mode 100644 index 655d45cf19..0000000000 Binary files a/.yarn/cache/@parcel-reporter-dev-server-npm-2.10.0-2f19cb846e-e72fd6ec09.zip and /dev/null differ diff --git a/.yarn/cache/@parcel-reporter-dev-server-npm-2.12.0-aed1d2c68c-43957b4656.zip b/.yarn/cache/@parcel-reporter-dev-server-npm-2.12.0-aed1d2c68c-43957b4656.zip new file mode 100644 index 0000000000..f1fb1818e9 Binary files /dev/null and b/.yarn/cache/@parcel-reporter-dev-server-npm-2.12.0-aed1d2c68c-43957b4656.zip differ diff --git a/.yarn/cache/@parcel-reporter-tracer-npm-2.10.0-184a89e262-0f8249b998.zip b/.yarn/cache/@parcel-reporter-tracer-npm-2.10.0-184a89e262-0f8249b998.zip deleted file mode 100644 index 533f83cc16..0000000000 Binary files a/.yarn/cache/@parcel-reporter-tracer-npm-2.10.0-184a89e262-0f8249b998.zip and /dev/null differ diff --git a/.yarn/cache/@parcel-reporter-tracer-npm-2.12.0-5cec9ab2d5-24cddacd19.zip b/.yarn/cache/@parcel-reporter-tracer-npm-2.12.0-5cec9ab2d5-24cddacd19.zip new file mode 100644 index 0000000000..2196f5407c Binary files /dev/null and b/.yarn/cache/@parcel-reporter-tracer-npm-2.12.0-5cec9ab2d5-24cddacd19.zip differ diff --git a/.yarn/cache/@parcel-resolver-default-npm-2.10.0-ca49f01a75-c82e2d3c4b.zip b/.yarn/cache/@parcel-resolver-default-npm-2.10.0-ca49f01a75-c82e2d3c4b.zip deleted file mode 100644 index 39ac48c80c..0000000000 Binary files a/.yarn/cache/@parcel-resolver-default-npm-2.10.0-ca49f01a75-c82e2d3c4b.zip and /dev/null differ diff --git a/.yarn/cache/@parcel-resolver-default-npm-2.12.0-8da790891c-f3652eea09.zip b/.yarn/cache/@parcel-resolver-default-npm-2.12.0-8da790891c-f3652eea09.zip new file mode 100644 index 0000000000..8022d04651 Binary files /dev/null and b/.yarn/cache/@parcel-resolver-default-npm-2.12.0-8da790891c-f3652eea09.zip differ diff --git a/.yarn/cache/@parcel-runtime-browser-hmr-npm-2.10.0-c6b7773a09-12928462c8.zip b/.yarn/cache/@parcel-runtime-browser-hmr-npm-2.10.0-c6b7773a09-12928462c8.zip deleted file mode 100644 index 7f1d968b8a..0000000000 Binary files a/.yarn/cache/@parcel-runtime-browser-hmr-npm-2.10.0-c6b7773a09-12928462c8.zip and /dev/null differ diff --git a/.yarn/cache/@parcel-runtime-browser-hmr-npm-2.12.0-6f0da66673-bbba57ecee.zip b/.yarn/cache/@parcel-runtime-browser-hmr-npm-2.12.0-6f0da66673-bbba57ecee.zip new file mode 100644 index 0000000000..f71de2152b Binary files /dev/null and b/.yarn/cache/@parcel-runtime-browser-hmr-npm-2.12.0-6f0da66673-bbba57ecee.zip differ diff --git a/.yarn/cache/@parcel-runtime-js-npm-2.10.0-6b4cf1576c-3bbd64c5b9.zip b/.yarn/cache/@parcel-runtime-js-npm-2.12.0-e21acc0f42-6afa3e7eb2.zip similarity index 76% rename from .yarn/cache/@parcel-runtime-js-npm-2.10.0-6b4cf1576c-3bbd64c5b9.zip rename to .yarn/cache/@parcel-runtime-js-npm-2.12.0-e21acc0f42-6afa3e7eb2.zip index 10fc3360d3..be9c7d7e4b 100644 Binary files a/.yarn/cache/@parcel-runtime-js-npm-2.10.0-6b4cf1576c-3bbd64c5b9.zip and b/.yarn/cache/@parcel-runtime-js-npm-2.12.0-e21acc0f42-6afa3e7eb2.zip differ diff --git a/.yarn/cache/@parcel-runtime-react-refresh-npm-2.10.0-b1f6c62bdf-dc567474a1.zip b/.yarn/cache/@parcel-runtime-react-refresh-npm-2.10.0-b1f6c62bdf-dc567474a1.zip deleted file mode 100644 index 75e403b9dd..0000000000 Binary files a/.yarn/cache/@parcel-runtime-react-refresh-npm-2.10.0-b1f6c62bdf-dc567474a1.zip and /dev/null differ diff --git a/.yarn/cache/@parcel-runtime-react-refresh-npm-2.12.0-2b09615691-41aee9a874.zip b/.yarn/cache/@parcel-runtime-react-refresh-npm-2.12.0-2b09615691-41aee9a874.zip new file mode 100644 index 0000000000..8dc8e5281c Binary files /dev/null and b/.yarn/cache/@parcel-runtime-react-refresh-npm-2.12.0-2b09615691-41aee9a874.zip differ diff --git a/.yarn/cache/@parcel-runtime-service-worker-npm-2.10.0-3ca99a5366-d0bfd113b9.zip b/.yarn/cache/@parcel-runtime-service-worker-npm-2.10.0-3ca99a5366-d0bfd113b9.zip deleted file mode 100644 index 8602ad88a3..0000000000 Binary files a/.yarn/cache/@parcel-runtime-service-worker-npm-2.10.0-3ca99a5366-d0bfd113b9.zip and /dev/null differ diff --git a/.yarn/cache/@parcel-runtime-service-worker-npm-2.12.0-7d227ff0bf-c71246428e.zip b/.yarn/cache/@parcel-runtime-service-worker-npm-2.12.0-7d227ff0bf-c71246428e.zip new file mode 100644 index 0000000000..18682c22ae Binary files /dev/null and b/.yarn/cache/@parcel-runtime-service-worker-npm-2.12.0-7d227ff0bf-c71246428e.zip differ diff --git a/.yarn/cache/@parcel-rust-npm-2.10.0-99038406b0-466a78d27d.zip b/.yarn/cache/@parcel-rust-npm-2.12.0-0cf943f3e5-51c5b67b9e.zip similarity index 72% rename from .yarn/cache/@parcel-rust-npm-2.10.0-99038406b0-466a78d27d.zip rename to .yarn/cache/@parcel-rust-npm-2.12.0-0cf943f3e5-51c5b67b9e.zip index 22f9773a93..d5fe4206c9 100644 Binary files a/.yarn/cache/@parcel-rust-npm-2.10.0-99038406b0-466a78d27d.zip and b/.yarn/cache/@parcel-rust-npm-2.12.0-0cf943f3e5-51c5b67b9e.zip differ diff --git a/.yarn/cache/@parcel-transformer-babel-npm-2.10.0-fb74ad8c73-fd64092c9c.zip b/.yarn/cache/@parcel-transformer-babel-npm-2.12.0-953de52432-b8c457c0be.zip similarity index 87% rename from .yarn/cache/@parcel-transformer-babel-npm-2.10.0-fb74ad8c73-fd64092c9c.zip rename to .yarn/cache/@parcel-transformer-babel-npm-2.12.0-953de52432-b8c457c0be.zip index df186d3d17..9286325c9e 100644 Binary files a/.yarn/cache/@parcel-transformer-babel-npm-2.10.0-fb74ad8c73-fd64092c9c.zip and b/.yarn/cache/@parcel-transformer-babel-npm-2.12.0-953de52432-b8c457c0be.zip differ diff --git a/.yarn/cache/@parcel-transformer-css-npm-2.10.0-4fc35c8005-acc26e9b3d.zip b/.yarn/cache/@parcel-transformer-css-npm-2.10.0-4fc35c8005-acc26e9b3d.zip deleted file mode 100644 index 9ad2a3992a..0000000000 Binary files a/.yarn/cache/@parcel-transformer-css-npm-2.10.0-4fc35c8005-acc26e9b3d.zip and /dev/null differ diff --git a/.yarn/cache/@parcel-transformer-css-npm-2.12.0-24ddc31ae3-3a6f16321d.zip b/.yarn/cache/@parcel-transformer-css-npm-2.12.0-24ddc31ae3-3a6f16321d.zip new file mode 100644 index 0000000000..f3e0520c71 Binary files /dev/null and b/.yarn/cache/@parcel-transformer-css-npm-2.12.0-24ddc31ae3-3a6f16321d.zip differ diff --git a/.yarn/cache/@parcel-transformer-html-npm-2.10.0-b6d2228044-f28e0d3606.zip b/.yarn/cache/@parcel-transformer-html-npm-2.12.0-be2b9ee40c-7fcfac62ca.zip similarity index 79% rename from .yarn/cache/@parcel-transformer-html-npm-2.10.0-b6d2228044-f28e0d3606.zip rename to .yarn/cache/@parcel-transformer-html-npm-2.12.0-be2b9ee40c-7fcfac62ca.zip index 5a84fa37da..3628f3f90d 100644 Binary files a/.yarn/cache/@parcel-transformer-html-npm-2.10.0-b6d2228044-f28e0d3606.zip and b/.yarn/cache/@parcel-transformer-html-npm-2.12.0-be2b9ee40c-7fcfac62ca.zip differ diff --git a/.yarn/cache/@parcel-transformer-image-npm-2.10.0-e63bd526ed-61a47d7d8e.zip b/.yarn/cache/@parcel-transformer-image-npm-2.12.0-53f04e21c0-0a1581eacc.zip similarity index 50% rename from .yarn/cache/@parcel-transformer-image-npm-2.10.0-e63bd526ed-61a47d7d8e.zip rename to .yarn/cache/@parcel-transformer-image-npm-2.12.0-53f04e21c0-0a1581eacc.zip index 18ec9cbd62..3a78e4e070 100644 Binary files a/.yarn/cache/@parcel-transformer-image-npm-2.10.0-e63bd526ed-61a47d7d8e.zip and b/.yarn/cache/@parcel-transformer-image-npm-2.12.0-53f04e21c0-0a1581eacc.zip differ diff --git a/.yarn/cache/@parcel-transformer-inline-string-npm-2.10.0-44c9b349db-618c919108.zip b/.yarn/cache/@parcel-transformer-inline-string-npm-2.10.0-44c9b349db-618c919108.zip deleted file mode 100644 index 636b8cafdc..0000000000 Binary files a/.yarn/cache/@parcel-transformer-inline-string-npm-2.10.0-44c9b349db-618c919108.zip and /dev/null differ diff --git a/.yarn/cache/@parcel-transformer-inline-string-npm-2.12.0-a33f10bafa-5f63c08695.zip b/.yarn/cache/@parcel-transformer-inline-string-npm-2.12.0-a33f10bafa-5f63c08695.zip new file mode 100644 index 0000000000..0c4f3341c8 Binary files /dev/null and b/.yarn/cache/@parcel-transformer-inline-string-npm-2.12.0-a33f10bafa-5f63c08695.zip differ diff --git a/.yarn/cache/@parcel-transformer-js-npm-2.10.0-132e460926-e9944ce77c.zip b/.yarn/cache/@parcel-transformer-js-npm-2.10.0-132e460926-e9944ce77c.zip deleted file mode 100644 index ff6263c541..0000000000 Binary files a/.yarn/cache/@parcel-transformer-js-npm-2.10.0-132e460926-e9944ce77c.zip and /dev/null differ diff --git a/.yarn/cache/@parcel-transformer-js-npm-2.12.0-404d54db18-b9fe4c887b.zip b/.yarn/cache/@parcel-transformer-js-npm-2.12.0-404d54db18-b9fe4c887b.zip new file mode 100644 index 0000000000..1ce667ac8d Binary files /dev/null and b/.yarn/cache/@parcel-transformer-js-npm-2.12.0-404d54db18-b9fe4c887b.zip differ diff --git a/.yarn/cache/@parcel-transformer-json-npm-2.10.0-5525143f86-9c7aceb8e6.zip b/.yarn/cache/@parcel-transformer-json-npm-2.10.0-5525143f86-9c7aceb8e6.zip deleted file mode 100644 index e821a47649..0000000000 Binary files a/.yarn/cache/@parcel-transformer-json-npm-2.10.0-5525143f86-9c7aceb8e6.zip and /dev/null differ diff --git a/.yarn/cache/@parcel-transformer-json-npm-2.12.0-652d8d99d2-a711cb65a8.zip b/.yarn/cache/@parcel-transformer-json-npm-2.12.0-652d8d99d2-a711cb65a8.zip new file mode 100644 index 0000000000..926c01eb81 Binary files /dev/null and b/.yarn/cache/@parcel-transformer-json-npm-2.12.0-652d8d99d2-a711cb65a8.zip differ diff --git a/.yarn/cache/@parcel-transformer-postcss-npm-2.10.0-c1f60c708a-2e524bd513.zip b/.yarn/cache/@parcel-transformer-postcss-npm-2.12.0-f0cfb95fac-b210044a7f.zip similarity index 64% rename from .yarn/cache/@parcel-transformer-postcss-npm-2.10.0-c1f60c708a-2e524bd513.zip rename to .yarn/cache/@parcel-transformer-postcss-npm-2.12.0-f0cfb95fac-b210044a7f.zip index 5e78f70b2d..3bbacafa81 100644 Binary files a/.yarn/cache/@parcel-transformer-postcss-npm-2.10.0-c1f60c708a-2e524bd513.zip and b/.yarn/cache/@parcel-transformer-postcss-npm-2.12.0-f0cfb95fac-b210044a7f.zip differ diff --git a/.yarn/cache/@parcel-transformer-posthtml-npm-2.10.0-31d54ed3f0-7de343f0f9.zip b/.yarn/cache/@parcel-transformer-posthtml-npm-2.10.0-31d54ed3f0-7de343f0f9.zip deleted file mode 100644 index 5228dc9af8..0000000000 Binary files a/.yarn/cache/@parcel-transformer-posthtml-npm-2.10.0-31d54ed3f0-7de343f0f9.zip and /dev/null differ diff --git a/.yarn/cache/@parcel-transformer-posthtml-npm-2.12.0-41c570db12-b62582ae7e.zip b/.yarn/cache/@parcel-transformer-posthtml-npm-2.12.0-41c570db12-b62582ae7e.zip new file mode 100644 index 0000000000..e912a09713 Binary files /dev/null and b/.yarn/cache/@parcel-transformer-posthtml-npm-2.12.0-41c570db12-b62582ae7e.zip differ diff --git a/.yarn/cache/@parcel-transformer-raw-npm-2.10.0-d7cd50f767-c7b1b9c6f7.zip b/.yarn/cache/@parcel-transformer-raw-npm-2.10.0-d7cd50f767-c7b1b9c6f7.zip deleted file mode 100644 index 3c536f6417..0000000000 Binary files a/.yarn/cache/@parcel-transformer-raw-npm-2.10.0-d7cd50f767-c7b1b9c6f7.zip and /dev/null differ diff --git a/.yarn/cache/@parcel-transformer-raw-npm-2.12.0-bd2cb66ddf-de6681e2e7.zip b/.yarn/cache/@parcel-transformer-raw-npm-2.12.0-bd2cb66ddf-de6681e2e7.zip new file mode 100644 index 0000000000..40b7e2d3c4 Binary files /dev/null and b/.yarn/cache/@parcel-transformer-raw-npm-2.12.0-bd2cb66ddf-de6681e2e7.zip differ diff --git a/.yarn/cache/@parcel-transformer-react-refresh-wrap-npm-2.10.0-4c3ddcc095-fc3163bcb0.zip b/.yarn/cache/@parcel-transformer-react-refresh-wrap-npm-2.12.0-59ed68910f-9aba8c1ab0.zip similarity index 61% rename from .yarn/cache/@parcel-transformer-react-refresh-wrap-npm-2.10.0-4c3ddcc095-fc3163bcb0.zip rename to .yarn/cache/@parcel-transformer-react-refresh-wrap-npm-2.12.0-59ed68910f-9aba8c1ab0.zip index 67e2da6e22..23210becb7 100644 Binary files a/.yarn/cache/@parcel-transformer-react-refresh-wrap-npm-2.10.0-4c3ddcc095-fc3163bcb0.zip and b/.yarn/cache/@parcel-transformer-react-refresh-wrap-npm-2.12.0-59ed68910f-9aba8c1ab0.zip differ diff --git a/.yarn/cache/@parcel-transformer-sass-npm-2.10.0-6c5f188bcc-2d697077ac.zip b/.yarn/cache/@parcel-transformer-sass-npm-2.12.0-ef787eef35-ce6b4d329b.zip similarity index 51% rename from .yarn/cache/@parcel-transformer-sass-npm-2.10.0-6c5f188bcc-2d697077ac.zip rename to .yarn/cache/@parcel-transformer-sass-npm-2.12.0-ef787eef35-ce6b4d329b.zip index 3caad5c9a7..d62c342067 100644 Binary files a/.yarn/cache/@parcel-transformer-sass-npm-2.10.0-6c5f188bcc-2d697077ac.zip and b/.yarn/cache/@parcel-transformer-sass-npm-2.12.0-ef787eef35-ce6b4d329b.zip differ diff --git a/.yarn/cache/@parcel-transformer-svg-npm-2.10.0-881c72cd1f-d5f55f6eee.zip b/.yarn/cache/@parcel-transformer-svg-npm-2.12.0-f41b181676-92b7c65894.zip similarity index 71% rename from .yarn/cache/@parcel-transformer-svg-npm-2.10.0-881c72cd1f-d5f55f6eee.zip rename to .yarn/cache/@parcel-transformer-svg-npm-2.12.0-f41b181676-92b7c65894.zip index b6ef15207c..01af21f6a3 100644 Binary files a/.yarn/cache/@parcel-transformer-svg-npm-2.10.0-881c72cd1f-d5f55f6eee.zip and b/.yarn/cache/@parcel-transformer-svg-npm-2.12.0-f41b181676-92b7c65894.zip differ diff --git a/.yarn/cache/@parcel-types-npm-2.10.0-270e786ba1-387aa07902.zip b/.yarn/cache/@parcel-types-npm-2.10.0-270e786ba1-387aa07902.zip deleted file mode 100644 index 59297ef889..0000000000 Binary files a/.yarn/cache/@parcel-types-npm-2.10.0-270e786ba1-387aa07902.zip and /dev/null differ diff --git a/.yarn/cache/@parcel-types-npm-2.12.0-ffe47febbf-250f95580c.zip b/.yarn/cache/@parcel-types-npm-2.12.0-ffe47febbf-250f95580c.zip new file mode 100644 index 0000000000..ea6decc566 Binary files /dev/null and b/.yarn/cache/@parcel-types-npm-2.12.0-ffe47febbf-250f95580c.zip differ diff --git a/.yarn/cache/@parcel-utils-npm-2.10.0-1f25fbc366-9f4953ff9a.zip b/.yarn/cache/@parcel-utils-npm-2.10.0-1f25fbc366-9f4953ff9a.zip deleted file mode 100644 index 5162312f75..0000000000 Binary files a/.yarn/cache/@parcel-utils-npm-2.10.0-1f25fbc366-9f4953ff9a.zip and /dev/null differ diff --git a/.yarn/cache/@parcel-utils-npm-2.12.0-d8a9a48a66-ba80a60fed.zip b/.yarn/cache/@parcel-utils-npm-2.12.0-d8a9a48a66-ba80a60fed.zip new file mode 100644 index 0000000000..8eda598941 Binary files /dev/null and b/.yarn/cache/@parcel-utils-npm-2.12.0-d8a9a48a66-ba80a60fed.zip differ diff --git a/.yarn/cache/@parcel-workers-npm-2.10.0-7f8aa5ad5a-e8b1701b53.zip b/.yarn/cache/@parcel-workers-npm-2.10.0-7f8aa5ad5a-e8b1701b53.zip deleted file mode 100644 index d3e73822e5..0000000000 Binary files a/.yarn/cache/@parcel-workers-npm-2.10.0-7f8aa5ad5a-e8b1701b53.zip and /dev/null differ diff --git a/.yarn/cache/@parcel-workers-npm-2.12.0-3ddd4664bc-e19c3c0a66.zip b/.yarn/cache/@parcel-workers-npm-2.12.0-3ddd4664bc-e19c3c0a66.zip new file mode 100644 index 0000000000..53f28c9470 Binary files /dev/null and b/.yarn/cache/@parcel-workers-npm-2.12.0-3ddd4664bc-e19c3c0a66.zip differ diff --git a/.yarn/cache/@rollup-pluginutils-npm-5.0.5-cfa8fafc53-dcd4d6e3cb.zip b/.yarn/cache/@rollup-pluginutils-npm-5.0.5-cfa8fafc53-dcd4d6e3cb.zip deleted file mode 100644 index 91a5a69139..0000000000 Binary files a/.yarn/cache/@rollup-pluginutils-npm-5.0.5-cfa8fafc53-dcd4d6e3cb.zip and /dev/null differ diff --git a/.yarn/cache/@rollup-pluginutils-npm-5.1.0-6939820ef8-3cc5a6d914.zip b/.yarn/cache/@rollup-pluginutils-npm-5.1.0-6939820ef8-3cc5a6d914.zip new file mode 100644 index 0000000000..923a7a91a8 Binary files /dev/null and b/.yarn/cache/@rollup-pluginutils-npm-5.1.0-6939820ef8-3cc5a6d914.zip differ diff --git a/.yarn/cache/@sidvind-better-ajv-errors-npm-2.0.0-3531bddef9-12b0d87855.zip b/.yarn/cache/@sidvind-better-ajv-errors-npm-2.0.0-3531bddef9-12b0d87855.zip deleted file mode 100644 index 5990cc7604..0000000000 Binary files a/.yarn/cache/@sidvind-better-ajv-errors-npm-2.0.0-3531bddef9-12b0d87855.zip and /dev/null differ diff --git a/.yarn/cache/@sidvind-better-ajv-errors-npm-2.1.3-e3d1c524a8-949cb805a1.zip b/.yarn/cache/@sidvind-better-ajv-errors-npm-2.1.3-e3d1c524a8-949cb805a1.zip new file mode 100644 index 0000000000..ad36770e19 Binary files /dev/null and b/.yarn/cache/@sidvind-better-ajv-errors-npm-2.1.3-e3d1c524a8-949cb805a1.zip differ diff --git a/.yarn/cache/@ungap-structured-clone-npm-1.2.0-648f0b82e0-4f656b7b46.zip b/.yarn/cache/@ungap-structured-clone-npm-1.2.0-648f0b82e0-4f656b7b46.zip new file mode 100644 index 0000000000..598a36e085 Binary files /dev/null and b/.yarn/cache/@ungap-structured-clone-npm-1.2.0-648f0b82e0-4f656b7b46.zip differ diff --git a/.yarn/cache/@vitejs-plugin-vue-npm-4.4.0-c33d65c6f6-37b6987951.zip b/.yarn/cache/@vitejs-plugin-vue-npm-4.4.0-c33d65c6f6-37b6987951.zip deleted file mode 100644 index 96bb8b3013..0000000000 Binary files a/.yarn/cache/@vitejs-plugin-vue-npm-4.4.0-c33d65c6f6-37b6987951.zip and /dev/null differ diff --git a/.yarn/cache/@vitejs-plugin-vue-npm-4.6.2-d7ace53203-01bc4ed643.zip b/.yarn/cache/@vitejs-plugin-vue-npm-4.6.2-d7ace53203-01bc4ed643.zip new file mode 100644 index 0000000000..7cf07fbe2d Binary files /dev/null and b/.yarn/cache/@vitejs-plugin-vue-npm-4.6.2-d7ace53203-01bc4ed643.zip differ diff --git a/.yarn/cache/@volar-language-core-npm-2.1.4-18ee1a037d-7430f65143.zip b/.yarn/cache/@volar-language-core-npm-2.1.4-18ee1a037d-7430f65143.zip new file mode 100644 index 0000000000..25e6d3f94d Binary files /dev/null and b/.yarn/cache/@volar-language-core-npm-2.1.4-18ee1a037d-7430f65143.zip differ diff --git a/.yarn/cache/@volar-language-service-npm-2.1.4-2d34cb628f-06cdcfacf0.zip b/.yarn/cache/@volar-language-service-npm-2.1.4-2d34cb628f-06cdcfacf0.zip new file mode 100644 index 0000000000..5f494d902e Binary files /dev/null and b/.yarn/cache/@volar-language-service-npm-2.1.4-2d34cb628f-06cdcfacf0.zip differ diff --git a/.yarn/cache/@volar-source-map-npm-2.1.4-5963b1701f-e2f65bcfd6.zip b/.yarn/cache/@volar-source-map-npm-2.1.4-5963b1701f-e2f65bcfd6.zip new file mode 100644 index 0000000000..0ea96c4d97 Binary files /dev/null and b/.yarn/cache/@volar-source-map-npm-2.1.4-5963b1701f-e2f65bcfd6.zip differ diff --git a/.yarn/cache/@vscode-l10n-npm-0.0.18-8a12efe4b5-c33876cebd.zip b/.yarn/cache/@vscode-l10n-npm-0.0.18-8a12efe4b5-c33876cebd.zip new file mode 100644 index 0000000000..2d6533a204 Binary files /dev/null and b/.yarn/cache/@vscode-l10n-npm-0.0.18-8a12efe4b5-c33876cebd.zip differ diff --git a/.yarn/cache/@vue-compiler-core-npm-3.3.4-e514bded25-5437942ea6.zip b/.yarn/cache/@vue-compiler-core-npm-3.3.4-e514bded25-5437942ea6.zip deleted file mode 100644 index c09b47c6a8..0000000000 Binary files a/.yarn/cache/@vue-compiler-core-npm-3.3.4-e514bded25-5437942ea6.zip and /dev/null differ diff --git a/.yarn/cache/@vue-compiler-core-npm-3.4.21-ec7f24d7f5-0d6b7732bc.zip b/.yarn/cache/@vue-compiler-core-npm-3.4.21-ec7f24d7f5-0d6b7732bc.zip new file mode 100644 index 0000000000..ba6ec89e54 Binary files /dev/null and b/.yarn/cache/@vue-compiler-core-npm-3.4.21-ec7f24d7f5-0d6b7732bc.zip differ diff --git a/.yarn/cache/@vue-compiler-dom-npm-3.3.4-029250af79-1c2ac0c89d.zip b/.yarn/cache/@vue-compiler-dom-npm-3.3.4-029250af79-1c2ac0c89d.zip deleted file mode 100644 index cc5c3904dd..0000000000 Binary files a/.yarn/cache/@vue-compiler-dom-npm-3.3.4-029250af79-1c2ac0c89d.zip and /dev/null differ diff --git a/.yarn/cache/@vue-compiler-dom-npm-3.4.21-3d49f99020-f53e4f4e0a.zip b/.yarn/cache/@vue-compiler-dom-npm-3.4.21-3d49f99020-f53e4f4e0a.zip new file mode 100644 index 0000000000..4d0c8cd01f Binary files /dev/null and b/.yarn/cache/@vue-compiler-dom-npm-3.4.21-3d49f99020-f53e4f4e0a.zip differ diff --git a/.yarn/cache/@vue-compiler-sfc-npm-3.3.4-783aff746b-0a0adfdd3e.zip b/.yarn/cache/@vue-compiler-sfc-npm-3.3.4-783aff746b-0a0adfdd3e.zip deleted file mode 100644 index 40573c4dc3..0000000000 Binary files a/.yarn/cache/@vue-compiler-sfc-npm-3.3.4-783aff746b-0a0adfdd3e.zip and /dev/null differ diff --git a/.yarn/cache/@vue-compiler-sfc-npm-3.4.21-c2b76ee1ff-226dc404be.zip b/.yarn/cache/@vue-compiler-sfc-npm-3.4.21-c2b76ee1ff-226dc404be.zip new file mode 100644 index 0000000000..95e0d0d70c Binary files /dev/null and b/.yarn/cache/@vue-compiler-sfc-npm-3.4.21-c2b76ee1ff-226dc404be.zip differ diff --git a/.yarn/cache/@vue-compiler-ssr-npm-3.3.4-9c5036c29f-5d1875d55e.zip b/.yarn/cache/@vue-compiler-ssr-npm-3.3.4-9c5036c29f-5d1875d55e.zip deleted file mode 100644 index 2fb9d33d8a..0000000000 Binary files a/.yarn/cache/@vue-compiler-ssr-npm-3.3.4-9c5036c29f-5d1875d55e.zip and /dev/null differ diff --git a/.yarn/cache/@vue-compiler-ssr-npm-3.4.21-e6f043341e-c510bee68b.zip b/.yarn/cache/@vue-compiler-ssr-npm-3.4.21-e6f043341e-c510bee68b.zip new file mode 100644 index 0000000000..f03e17b080 Binary files /dev/null and b/.yarn/cache/@vue-compiler-ssr-npm-3.4.21-e6f043341e-c510bee68b.zip differ diff --git a/.yarn/cache/@vue-devtools-api-npm-6.6.1-ef3c82703e-cf12b5ebcc.zip b/.yarn/cache/@vue-devtools-api-npm-6.6.1-ef3c82703e-cf12b5ebcc.zip new file mode 100644 index 0000000000..f14e2cdac7 Binary files /dev/null and b/.yarn/cache/@vue-devtools-api-npm-6.6.1-ef3c82703e-cf12b5ebcc.zip differ diff --git a/.yarn/cache/@vue-language-plugin-pug-npm-2.0.7-547300c7e0-11cc96eb5f.zip b/.yarn/cache/@vue-language-plugin-pug-npm-2.0.7-547300c7e0-11cc96eb5f.zip new file mode 100644 index 0000000000..e637e5f556 Binary files /dev/null and b/.yarn/cache/@vue-language-plugin-pug-npm-2.0.7-547300c7e0-11cc96eb5f.zip differ diff --git a/.yarn/cache/@vue-reactivity-npm-3.3.4-4bb841d3a9-81c3d0c587.zip b/.yarn/cache/@vue-reactivity-npm-3.3.4-4bb841d3a9-81c3d0c587.zip deleted file mode 100644 index 38458a8899..0000000000 Binary files a/.yarn/cache/@vue-reactivity-npm-3.3.4-4bb841d3a9-81c3d0c587.zip and /dev/null differ diff --git a/.yarn/cache/@vue-reactivity-npm-3.4.21-fd3e254d08-79c7ebe3ec.zip b/.yarn/cache/@vue-reactivity-npm-3.4.21-fd3e254d08-79c7ebe3ec.zip new file mode 100644 index 0000000000..adc965a473 Binary files /dev/null and b/.yarn/cache/@vue-reactivity-npm-3.4.21-fd3e254d08-79c7ebe3ec.zip differ diff --git a/.yarn/cache/@vue-reactivity-transform-npm-3.3.4-bfbf394bf7-b425e78b20.zip b/.yarn/cache/@vue-reactivity-transform-npm-3.3.4-bfbf394bf7-b425e78b20.zip deleted file mode 100644 index 4759f812bd..0000000000 Binary files a/.yarn/cache/@vue-reactivity-transform-npm-3.3.4-bfbf394bf7-b425e78b20.zip and /dev/null differ diff --git a/.yarn/cache/@vue-runtime-core-npm-3.3.4-4a56fcce5e-d402da5126.zip b/.yarn/cache/@vue-runtime-core-npm-3.3.4-4a56fcce5e-d402da5126.zip deleted file mode 100644 index aa4a131e69..0000000000 Binary files a/.yarn/cache/@vue-runtime-core-npm-3.3.4-4a56fcce5e-d402da5126.zip and /dev/null differ diff --git a/.yarn/cache/@vue-runtime-core-npm-3.4.21-7bf985040b-4eb9b5d91f.zip b/.yarn/cache/@vue-runtime-core-npm-3.4.21-7bf985040b-4eb9b5d91f.zip new file mode 100644 index 0000000000..ffb48a907a Binary files /dev/null and b/.yarn/cache/@vue-runtime-core-npm-3.4.21-7bf985040b-4eb9b5d91f.zip differ diff --git a/.yarn/cache/@vue-runtime-dom-npm-3.3.4-554b8c4277-dac9ada7f6.zip b/.yarn/cache/@vue-runtime-dom-npm-3.3.4-554b8c4277-dac9ada7f6.zip deleted file mode 100644 index b1011236c7..0000000000 Binary files a/.yarn/cache/@vue-runtime-dom-npm-3.3.4-554b8c4277-dac9ada7f6.zip and /dev/null differ diff --git a/.yarn/cache/@vue-runtime-dom-npm-3.4.21-40f99cf9a2-ebfdaa081f.zip b/.yarn/cache/@vue-runtime-dom-npm-3.4.21-40f99cf9a2-ebfdaa081f.zip new file mode 100644 index 0000000000..c65601f0a9 Binary files /dev/null and b/.yarn/cache/@vue-runtime-dom-npm-3.4.21-40f99cf9a2-ebfdaa081f.zip differ diff --git a/.yarn/cache/@vue-server-renderer-npm-3.3.4-75b963f24d-e8598ed1a4.zip b/.yarn/cache/@vue-server-renderer-npm-3.3.4-75b963f24d-e8598ed1a4.zip deleted file mode 100644 index 9917a9c898..0000000000 Binary files a/.yarn/cache/@vue-server-renderer-npm-3.3.4-75b963f24d-e8598ed1a4.zip and /dev/null differ diff --git a/.yarn/cache/@vue-server-renderer-npm-3.4.21-bf6b2daebb-faa3dc4876.zip b/.yarn/cache/@vue-server-renderer-npm-3.4.21-bf6b2daebb-faa3dc4876.zip new file mode 100644 index 0000000000..4da755254b Binary files /dev/null and b/.yarn/cache/@vue-server-renderer-npm-3.4.21-bf6b2daebb-faa3dc4876.zip differ diff --git a/.yarn/cache/@vue-shared-npm-3.3.4-76d250afa2-12fe53ff81.zip b/.yarn/cache/@vue-shared-npm-3.3.4-76d250afa2-12fe53ff81.zip deleted file mode 100644 index 8d67c0c0d3..0000000000 Binary files a/.yarn/cache/@vue-shared-npm-3.3.4-76d250afa2-12fe53ff81.zip and /dev/null differ diff --git a/.yarn/cache/@vue-shared-npm-3.4.21-2aee4ae0bc-5f30a40891.zip b/.yarn/cache/@vue-shared-npm-3.4.21-2aee4ae0bc-5f30a40891.zip new file mode 100644 index 0000000000..01c52809b3 Binary files /dev/null and b/.yarn/cache/@vue-shared-npm-3.4.21-2aee4ae0bc-5f30a40891.zip differ diff --git a/.yarn/cache/array-includes-npm-3.1.6-d0ff9d248b-f22f8cd8ba.zip b/.yarn/cache/array-includes-npm-3.1.6-d0ff9d248b-f22f8cd8ba.zip deleted file mode 100644 index a1083551bf..0000000000 Binary files a/.yarn/cache/array-includes-npm-3.1.6-d0ff9d248b-f22f8cd8ba.zip and /dev/null differ diff --git a/.yarn/cache/array-includes-npm-3.1.7-d32a5ee179-06f9e4598f.zip b/.yarn/cache/array-includes-npm-3.1.7-d32a5ee179-06f9e4598f.zip new file mode 100644 index 0000000000..1f7fc2c577 Binary files /dev/null and b/.yarn/cache/array-includes-npm-3.1.7-d32a5ee179-06f9e4598f.zip differ diff --git a/.yarn/cache/array.prototype.findlastindex-npm-1.2.2-dc5ee7bf67-8a166359f6.zip b/.yarn/cache/array.prototype.findlastindex-npm-1.2.2-dc5ee7bf67-8a166359f6.zip deleted file mode 100644 index 43baf2b331..0000000000 Binary files a/.yarn/cache/array.prototype.findlastindex-npm-1.2.2-dc5ee7bf67-8a166359f6.zip and /dev/null differ diff --git a/.yarn/cache/array.prototype.findlastindex-npm-1.2.3-2a36f4417b-31f35d7b37.zip b/.yarn/cache/array.prototype.findlastindex-npm-1.2.3-2a36f4417b-31f35d7b37.zip new file mode 100644 index 0000000000..8aaa4a956a Binary files /dev/null and b/.yarn/cache/array.prototype.findlastindex-npm-1.2.3-2a36f4417b-31f35d7b37.zip differ diff --git a/.yarn/cache/array.prototype.flat-npm-1.3.1-e9a9e389c0-5a8415949d.zip b/.yarn/cache/array.prototype.flat-npm-1.3.2-350729f7f4-5d6b4bf102.zip similarity index 55% rename from .yarn/cache/array.prototype.flat-npm-1.3.1-e9a9e389c0-5a8415949d.zip rename to .yarn/cache/array.prototype.flat-npm-1.3.2-350729f7f4-5d6b4bf102.zip index f3e42570b9..7720137d70 100644 Binary files a/.yarn/cache/array.prototype.flat-npm-1.3.1-e9a9e389c0-5a8415949d.zip and b/.yarn/cache/array.prototype.flat-npm-1.3.2-350729f7f4-5d6b4bf102.zip differ diff --git a/.yarn/cache/array.prototype.flatmap-npm-1.3.1-c65186ca34-8c1c43a499.zip b/.yarn/cache/array.prototype.flatmap-npm-1.3.2-5c6a4af226-ce09fe21dc.zip similarity index 61% rename from .yarn/cache/array.prototype.flatmap-npm-1.3.1-c65186ca34-8c1c43a499.zip rename to .yarn/cache/array.prototype.flatmap-npm-1.3.2-5c6a4af226-ce09fe21dc.zip index a791d28dca..2553a317f1 100644 Binary files a/.yarn/cache/array.prototype.flatmap-npm-1.3.1-c65186ca34-8c1c43a499.zip and b/.yarn/cache/array.prototype.flatmap-npm-1.3.2-5c6a4af226-ce09fe21dc.zip differ diff --git a/.yarn/cache/arraybuffer.prototype.slice-npm-1.0.1-d44cb5acc0-e3e9b2a3e9.zip b/.yarn/cache/arraybuffer.prototype.slice-npm-1.0.2-4eda52ad8c-c200faf437.zip similarity index 59% rename from .yarn/cache/arraybuffer.prototype.slice-npm-1.0.1-d44cb5acc0-e3e9b2a3e9.zip rename to .yarn/cache/arraybuffer.prototype.slice-npm-1.0.2-4eda52ad8c-c200faf437.zip index aa44dd7138..559e55f81a 100644 Binary files a/.yarn/cache/arraybuffer.prototype.slice-npm-1.0.1-d44cb5acc0-e3e9b2a3e9.zip and b/.yarn/cache/arraybuffer.prototype.slice-npm-1.0.2-4eda52ad8c-c200faf437.zip differ diff --git a/.yarn/cache/bootstrap-icons-npm-1.11.1-9f55aea76a-d78ff24a83.zip b/.yarn/cache/bootstrap-icons-npm-1.11.1-9f55aea76a-d78ff24a83.zip deleted file mode 100644 index 8124ba6569..0000000000 Binary files a/.yarn/cache/bootstrap-icons-npm-1.11.1-9f55aea76a-d78ff24a83.zip and /dev/null differ diff --git a/.yarn/cache/bootstrap-icons-npm-1.11.3-8d5387bef2-d5cdb90fe3.zip b/.yarn/cache/bootstrap-icons-npm-1.11.3-8d5387bef2-d5cdb90fe3.zip new file mode 100644 index 0000000000..e20ab2ecb3 Binary files /dev/null and b/.yarn/cache/bootstrap-icons-npm-1.11.3-8d5387bef2-d5cdb90fe3.zip differ diff --git a/.yarn/cache/bootstrap-npm-5.3.2-20b391b636-d5580b253d.zip b/.yarn/cache/bootstrap-npm-5.3.2-20b391b636-d5580b253d.zip deleted file mode 100644 index a575a52461..0000000000 Binary files a/.yarn/cache/bootstrap-npm-5.3.2-20b391b636-d5580b253d.zip and /dev/null differ diff --git a/.yarn/cache/bootstrap-npm-5.3.3-da08e2f0fe-537b68db30.zip b/.yarn/cache/bootstrap-npm-5.3.3-da08e2f0fe-537b68db30.zip new file mode 100644 index 0000000000..ca3961acc1 Binary files /dev/null and b/.yarn/cache/bootstrap-npm-5.3.3-da08e2f0fe-537b68db30.zip differ diff --git a/.yarn/cache/builtin-modules-npm-3.3.0-db4f3d32de-db021755d7.zip b/.yarn/cache/builtin-modules-npm-3.3.0-db4f3d32de-db021755d7.zip new file mode 100644 index 0000000000..c7e20444c6 Binary files /dev/null and b/.yarn/cache/builtin-modules-npm-3.3.0-db4f3d32de-db021755d7.zip differ diff --git a/.yarn/cache/c8-npm-8.0.1-6462c8130b-2c47531d21.zip b/.yarn/cache/c8-npm-9.1.0-92c3d37f46-c5249bf9c3.zip similarity index 50% rename from .yarn/cache/c8-npm-8.0.1-6462c8130b-2c47531d21.zip rename to .yarn/cache/c8-npm-9.1.0-92c3d37f46-c5249bf9c3.zip index ff8bf91a24..1e5812b784 100644 Binary files a/.yarn/cache/c8-npm-8.0.1-6462c8130b-2c47531d21.zip and b/.yarn/cache/c8-npm-9.1.0-92c3d37f46-c5249bf9c3.zip differ diff --git a/.yarn/cache/call-bind-npm-1.0.5-65600fae47-449e83ecbd.zip b/.yarn/cache/call-bind-npm-1.0.5-65600fae47-449e83ecbd.zip new file mode 100644 index 0000000000..29854c129a Binary files /dev/null and b/.yarn/cache/call-bind-npm-1.0.5-65600fae47-449e83ecbd.zip differ diff --git a/.yarn/cache/caniuse-lite-npm-1.0.30001538-68bfe8259b-94c5d55757.zip b/.yarn/cache/caniuse-lite-npm-1.0.30001538-68bfe8259b-94c5d55757.zip deleted file mode 100644 index 12c494fb8d..0000000000 Binary files a/.yarn/cache/caniuse-lite-npm-1.0.30001538-68bfe8259b-94c5d55757.zip and /dev/null differ diff --git a/.yarn/cache/caniuse-lite-npm-1.0.30001603-77af81f60b-e66e0d24b8.zip b/.yarn/cache/caniuse-lite-npm-1.0.30001603-77af81f60b-e66e0d24b8.zip new file mode 100644 index 0000000000..f3bd2d06bc Binary files /dev/null and b/.yarn/cache/caniuse-lite-npm-1.0.30001603-77af81f60b-e66e0d24b8.zip differ diff --git a/.yarn/cache/csstype-npm-3.1.2-cead7d99b2-e1a52e6c25.zip b/.yarn/cache/csstype-npm-3.1.2-cead7d99b2-e1a52e6c25.zip deleted file mode 100644 index 740042eb39..0000000000 Binary files a/.yarn/cache/csstype-npm-3.1.2-cead7d99b2-e1a52e6c25.zip and /dev/null differ diff --git a/.yarn/cache/csstype-npm-3.1.3-e9a1c85013-8db785cc92.zip b/.yarn/cache/csstype-npm-3.1.3-e9a1c85013-8db785cc92.zip new file mode 100644 index 0000000000..9853f0cf0b Binary files /dev/null and b/.yarn/cache/csstype-npm-3.1.3-e9a1c85013-8db785cc92.zip differ diff --git a/.yarn/cache/d3-npm-7.8.5-5db20a5616-e407e79731.zip b/.yarn/cache/d3-npm-7.8.5-5db20a5616-e407e79731.zip deleted file mode 100644 index b06dd3f260..0000000000 Binary files a/.yarn/cache/d3-npm-7.8.5-5db20a5616-e407e79731.zip and /dev/null differ diff --git a/.yarn/cache/d3-npm-7.9.0-d293821ce6-1c0e9135f1.zip b/.yarn/cache/d3-npm-7.9.0-d293821ce6-1c0e9135f1.zip new file mode 100644 index 0000000000..e78ffffee5 Binary files /dev/null and b/.yarn/cache/d3-npm-7.9.0-d293821ce6-1c0e9135f1.zip differ diff --git a/.yarn/cache/deepmerge-npm-4.2.2-112165ced2-a8c43a1ed8.zip b/.yarn/cache/deepmerge-npm-4.2.2-112165ced2-a8c43a1ed8.zip deleted file mode 100644 index 3e07a61c47..0000000000 Binary files a/.yarn/cache/deepmerge-npm-4.2.2-112165ced2-a8c43a1ed8.zip and /dev/null differ diff --git a/.yarn/cache/define-data-property-npm-1.1.1-2b5156d112-a29855ad3f.zip b/.yarn/cache/define-data-property-npm-1.1.1-2b5156d112-a29855ad3f.zip new file mode 100644 index 0000000000..75936e2374 Binary files /dev/null and b/.yarn/cache/define-data-property-npm-1.1.1-2b5156d112-a29855ad3f.zip differ diff --git a/.yarn/cache/entities-npm-4.5.0-7cdb83b832-853f8ebd5b.zip b/.yarn/cache/entities-npm-4.5.0-7cdb83b832-853f8ebd5b.zip new file mode 100644 index 0000000000..3772a4510c Binary files /dev/null and b/.yarn/cache/entities-npm-4.5.0-7cdb83b832-853f8ebd5b.zip differ diff --git a/.yarn/cache/es-abstract-npm-1.21.1-28d9a4a469-23ff60d42d.zip b/.yarn/cache/es-abstract-npm-1.21.1-28d9a4a469-23ff60d42d.zip deleted file mode 100644 index 46e5d7d326..0000000000 Binary files a/.yarn/cache/es-abstract-npm-1.21.1-28d9a4a469-23ff60d42d.zip and /dev/null differ diff --git a/.yarn/cache/es-abstract-npm-1.22.1-bfe4c9a3e1-614e2c1c37.zip b/.yarn/cache/es-abstract-npm-1.22.3-15a58832e5-b1bdc96285.zip similarity index 78% rename from .yarn/cache/es-abstract-npm-1.22.1-bfe4c9a3e1-614e2c1c37.zip rename to .yarn/cache/es-abstract-npm-1.22.3-15a58832e5-b1bdc96285.zip index 1f0b5c1d9d..f72f30d6f5 100644 Binary files a/.yarn/cache/es-abstract-npm-1.22.1-bfe4c9a3e1-614e2c1c37.zip and b/.yarn/cache/es-abstract-npm-1.22.3-15a58832e5-b1bdc96285.zip differ diff --git a/.yarn/cache/eslint-compat-utils-npm-0.1.2-361c6992b1-2315d9db81.zip b/.yarn/cache/eslint-compat-utils-npm-0.1.2-361c6992b1-2315d9db81.zip new file mode 100644 index 0000000000..505e336b08 Binary files /dev/null and b/.yarn/cache/eslint-compat-utils-npm-0.1.2-361c6992b1-2315d9db81.zip differ diff --git a/.yarn/cache/eslint-import-resolver-node-npm-0.3.7-65bed19543-3379aacf1d.zip b/.yarn/cache/eslint-import-resolver-node-npm-0.3.7-65bed19543-3379aacf1d.zip deleted file mode 100644 index f9111a337b..0000000000 Binary files a/.yarn/cache/eslint-import-resolver-node-npm-0.3.7-65bed19543-3379aacf1d.zip and /dev/null differ diff --git a/.yarn/cache/eslint-import-resolver-node-npm-0.3.9-2a426afc4b-439b912712.zip b/.yarn/cache/eslint-import-resolver-node-npm-0.3.9-2a426afc4b-439b912712.zip new file mode 100644 index 0000000000..f2e17574bd Binary files /dev/null and b/.yarn/cache/eslint-import-resolver-node-npm-0.3.9-2a426afc4b-439b912712.zip differ diff --git a/.yarn/cache/eslint-npm-8.51.0-77fce3ec74-214fa5d1fc.zip b/.yarn/cache/eslint-npm-8.57.0-4286e12a3a-3a48d7ff85.zip similarity index 61% rename from .yarn/cache/eslint-npm-8.51.0-77fce3ec74-214fa5d1fc.zip rename to .yarn/cache/eslint-npm-8.57.0-4286e12a3a-3a48d7ff85.zip index 997dda7f1a..73f8f9dff6 100644 Binary files a/.yarn/cache/eslint-npm-8.51.0-77fce3ec74-214fa5d1fc.zip and b/.yarn/cache/eslint-npm-8.57.0-4286e12a3a-3a48d7ff85.zip differ diff --git a/.yarn/cache/eslint-plugin-es-x-npm-7.1.0-35735e8bbc-a19924313c.zip b/.yarn/cache/eslint-plugin-es-x-npm-7.1.0-35735e8bbc-a19924313c.zip deleted file mode 100644 index 4184ed69d2..0000000000 Binary files a/.yarn/cache/eslint-plugin-es-x-npm-7.1.0-35735e8bbc-a19924313c.zip and /dev/null differ diff --git a/.yarn/cache/eslint-plugin-es-x-npm-7.5.0-77e84d6e5d-e770e57df7.zip b/.yarn/cache/eslint-plugin-es-x-npm-7.5.0-77e84d6e5d-e770e57df7.zip new file mode 100644 index 0000000000..1d334e0a10 Binary files /dev/null and b/.yarn/cache/eslint-plugin-es-x-npm-7.5.0-77e84d6e5d-e770e57df7.zip differ diff --git a/.yarn/cache/eslint-plugin-import-npm-2.28.1-2056ddf35c-e8ae6dd8f0.zip b/.yarn/cache/eslint-plugin-import-npm-2.29.1-b94305f7dc-e65159aef8.zip similarity index 78% rename from .yarn/cache/eslint-plugin-import-npm-2.28.1-2056ddf35c-e8ae6dd8f0.zip rename to .yarn/cache/eslint-plugin-import-npm-2.29.1-b94305f7dc-e65159aef8.zip index 694db7307d..bc424a6a64 100644 Binary files a/.yarn/cache/eslint-plugin-import-npm-2.28.1-2056ddf35c-e8ae6dd8f0.zip and b/.yarn/cache/eslint-plugin-import-npm-2.29.1-b94305f7dc-e65159aef8.zip differ diff --git a/.yarn/cache/eslint-plugin-n-npm-16.2.0-b2b8355312-124ba4f418.zip b/.yarn/cache/eslint-plugin-n-npm-16.2.0-b2b8355312-124ba4f418.zip deleted file mode 100644 index 12357638c9..0000000000 Binary files a/.yarn/cache/eslint-plugin-n-npm-16.2.0-b2b8355312-124ba4f418.zip and /dev/null differ diff --git a/.yarn/cache/eslint-plugin-n-npm-16.6.2-77775852d0-3b468da003.zip b/.yarn/cache/eslint-plugin-n-npm-16.6.2-77775852d0-3b468da003.zip new file mode 100644 index 0000000000..9c7224993f Binary files /dev/null and b/.yarn/cache/eslint-plugin-n-npm-16.6.2-77775852d0-3b468da003.zip differ diff --git a/.yarn/cache/eslint-plugin-vue-npm-9.17.0-c32115eab8-2ef53a0387.zip b/.yarn/cache/eslint-plugin-vue-npm-9.17.0-c32115eab8-2ef53a0387.zip deleted file mode 100644 index 2453c1f228..0000000000 Binary files a/.yarn/cache/eslint-plugin-vue-npm-9.17.0-c32115eab8-2ef53a0387.zip and /dev/null differ diff --git a/.yarn/cache/eslint-plugin-vue-npm-9.24.0-4c6dba51bf-2309b919d8.zip b/.yarn/cache/eslint-plugin-vue-npm-9.24.0-4c6dba51bf-2309b919d8.zip new file mode 100644 index 0000000000..285d11da2d Binary files /dev/null and b/.yarn/cache/eslint-plugin-vue-npm-9.24.0-4c6dba51bf-2309b919d8.zip differ diff --git a/.yarn/cache/foreground-child-npm-2.0.0-80c976b61e-f77ec9aff6.zip b/.yarn/cache/foreground-child-npm-2.0.0-80c976b61e-f77ec9aff6.zip deleted file mode 100644 index d947311d1e..0000000000 Binary files a/.yarn/cache/foreground-child-npm-2.0.0-80c976b61e-f77ec9aff6.zip and /dev/null differ diff --git a/.yarn/cache/function-bind-npm-1.1.2-7a55be9b03-2b0ff4ce70.zip b/.yarn/cache/function-bind-npm-1.1.2-7a55be9b03-2b0ff4ce70.zip new file mode 100644 index 0000000000..55fbdad3a3 Binary files /dev/null and b/.yarn/cache/function-bind-npm-1.1.2-7a55be9b03-2b0ff4ce70.zip differ diff --git a/.yarn/cache/function.prototype.name-npm-1.1.5-e776a642bb-acd21d733a.zip b/.yarn/cache/function.prototype.name-npm-1.1.5-e776a642bb-acd21d733a.zip deleted file mode 100644 index ac0af539ea..0000000000 Binary files a/.yarn/cache/function.prototype.name-npm-1.1.5-e776a642bb-acd21d733a.zip and /dev/null differ diff --git a/.yarn/cache/function.prototype.name-npm-1.1.6-fd3a6a5cdd-7a3f9bd98a.zip b/.yarn/cache/function.prototype.name-npm-1.1.6-fd3a6a5cdd-7a3f9bd98a.zip new file mode 100644 index 0000000000..9c6ff345f9 Binary files /dev/null and b/.yarn/cache/function.prototype.name-npm-1.1.6-fd3a6a5cdd-7a3f9bd98a.zip differ diff --git a/.yarn/cache/get-intrinsic-npm-1.2.2-3f446d8847-447ff0724d.zip b/.yarn/cache/get-intrinsic-npm-1.2.2-3f446d8847-447ff0724d.zip new file mode 100644 index 0000000000..510eb5f0ed Binary files /dev/null and b/.yarn/cache/get-intrinsic-npm-1.2.2-3f446d8847-447ff0724d.zip differ diff --git a/.yarn/cache/globals-npm-13.24.0-cc7713139c-56066ef058.zip b/.yarn/cache/globals-npm-13.24.0-cc7713139c-56066ef058.zip new file mode 100644 index 0000000000..c8cb0244af Binary files /dev/null and b/.yarn/cache/globals-npm-13.24.0-cc7713139c-56066ef058.zip differ diff --git a/.yarn/cache/hasown-npm-2.0.0-78b794ceef-6151c75ca1.zip b/.yarn/cache/hasown-npm-2.0.0-78b794ceef-6151c75ca1.zip new file mode 100644 index 0000000000..5454406288 Binary files /dev/null and b/.yarn/cache/hasown-npm-2.0.0-78b794ceef-6151c75ca1.zip differ diff --git a/.yarn/cache/highcharts-npm-11.1.0-0d42a04430-f9b8cdc38b.zip b/.yarn/cache/highcharts-npm-11.1.0-0d42a04430-f9b8cdc38b.zip deleted file mode 100644 index ccf9aece97..0000000000 Binary files a/.yarn/cache/highcharts-npm-11.1.0-0d42a04430-f9b8cdc38b.zip and /dev/null differ diff --git a/.yarn/cache/highcharts-npm-11.4.0-8a1f46b545-873e661914.zip b/.yarn/cache/highcharts-npm-11.4.0-8a1f46b545-873e661914.zip new file mode 100644 index 0000000000..9c2f2df154 Binary files /dev/null and b/.yarn/cache/highcharts-npm-11.4.0-8a1f46b545-873e661914.zip differ diff --git a/.yarn/cache/html-validate-npm-8.18.1-c5271a0fb9-53479bf75b.zip b/.yarn/cache/html-validate-npm-8.18.1-c5271a0fb9-53479bf75b.zip new file mode 100644 index 0000000000..b2f855af03 Binary files /dev/null and b/.yarn/cache/html-validate-npm-8.18.1-c5271a0fb9-53479bf75b.zip differ diff --git a/.yarn/cache/html-validate-npm-8.5.0-a5c06a51e6-38ef4c832e.zip b/.yarn/cache/html-validate-npm-8.5.0-a5c06a51e6-38ef4c832e.zip deleted file mode 100644 index 876a2e14a7..0000000000 Binary files a/.yarn/cache/html-validate-npm-8.5.0-a5c06a51e6-38ef4c832e.zip and /dev/null differ diff --git a/.yarn/cache/ignore-npm-5.3.1-f6947c5df7-71d7bb4c1d.zip b/.yarn/cache/ignore-npm-5.3.1-f6947c5df7-71d7bb4c1d.zip new file mode 100644 index 0000000000..75ba53a270 Binary files /dev/null and b/.yarn/cache/ignore-npm-5.3.1-f6947c5df7-71d7bb4c1d.zip differ diff --git a/.yarn/cache/internal-slot-npm-1.0.4-9183007374-8974588d06.zip b/.yarn/cache/internal-slot-npm-1.0.4-9183007374-8974588d06.zip deleted file mode 100644 index cba2d7ba66..0000000000 Binary files a/.yarn/cache/internal-slot-npm-1.0.4-9183007374-8974588d06.zip and /dev/null differ diff --git a/.yarn/cache/is-builtin-module-npm-3.2.1-2f92a5d353-e8f0ffc19a.zip b/.yarn/cache/is-builtin-module-npm-3.2.1-2f92a5d353-e8f0ffc19a.zip new file mode 100644 index 0000000000..be908976b5 Binary files /dev/null and b/.yarn/cache/is-builtin-module-npm-3.2.1-2f92a5d353-e8f0ffc19a.zip differ diff --git a/.yarn/cache/is-core-module-npm-2.10.0-6dff9310aa-0f3f77811f.zip b/.yarn/cache/is-core-module-npm-2.10.0-6dff9310aa-0f3f77811f.zip deleted file mode 100644 index 42dc0c31eb..0000000000 Binary files a/.yarn/cache/is-core-module-npm-2.10.0-6dff9310aa-0f3f77811f.zip and /dev/null differ diff --git a/.yarn/cache/is-core-module-npm-2.11.0-70061e141a-f96fd490c6.zip b/.yarn/cache/is-core-module-npm-2.11.0-70061e141a-f96fd490c6.zip deleted file mode 100644 index 4b89bc40ec..0000000000 Binary files a/.yarn/cache/is-core-module-npm-2.11.0-70061e141a-f96fd490c6.zip and /dev/null differ diff --git a/.yarn/cache/is-core-module-npm-2.13.1-36e17434f9-256559ee8a.zip b/.yarn/cache/is-core-module-npm-2.13.1-36e17434f9-256559ee8a.zip new file mode 100644 index 0000000000..897f505685 Binary files /dev/null and b/.yarn/cache/is-core-module-npm-2.13.1-36e17434f9-256559ee8a.zip differ diff --git a/.yarn/cache/is-typed-array-npm-1.1.12-6135c91b1a-4c89c4a3be.zip b/.yarn/cache/is-typed-array-npm-1.1.12-6135c91b1a-4c89c4a3be.zip new file mode 100644 index 0000000000..4a35c2e95f Binary files /dev/null and b/.yarn/cache/is-typed-array-npm-1.1.12-6135c91b1a-4c89c4a3be.zip differ diff --git a/.yarn/cache/luxon-npm-3.4.3-1b54517fa6-3eade81506.zip b/.yarn/cache/luxon-npm-3.4.3-1b54517fa6-3eade81506.zip deleted file mode 100644 index 3089c36a81..0000000000 Binary files a/.yarn/cache/luxon-npm-3.4.3-1b54517fa6-3eade81506.zip and /dev/null differ diff --git a/.yarn/cache/luxon-npm-3.4.4-c93f95dde8-36c1f99c47.zip b/.yarn/cache/luxon-npm-3.4.4-c93f95dde8-36c1f99c47.zip new file mode 100644 index 0000000000..ed7709ee9e Binary files /dev/null and b/.yarn/cache/luxon-npm-3.4.4-c93f95dde8-36c1f99c47.zip differ diff --git a/.yarn/cache/magic-string-npm-0.30.0-20d8e0b6e4-7bdf22e273.zip b/.yarn/cache/magic-string-npm-0.30.0-20d8e0b6e4-7bdf22e273.zip deleted file mode 100644 index 24415206d0..0000000000 Binary files a/.yarn/cache/magic-string-npm-0.30.0-20d8e0b6e4-7bdf22e273.zip and /dev/null differ diff --git a/.yarn/cache/magic-string-npm-0.30.7-0bb5819095-bdf102e36a.zip b/.yarn/cache/magic-string-npm-0.30.7-0bb5819095-bdf102e36a.zip new file mode 100644 index 0000000000..7d9e6ff1d3 Binary files /dev/null and b/.yarn/cache/magic-string-npm-0.30.7-0bb5819095-bdf102e36a.zip differ diff --git a/.yarn/cache/moment-npm-2.30.1-1c51a5c631-859236bab1.zip b/.yarn/cache/moment-npm-2.30.1-1c51a5c631-859236bab1.zip new file mode 100644 index 0000000000..7454cc21af Binary files /dev/null and b/.yarn/cache/moment-npm-2.30.1-1c51a5c631-859236bab1.zip differ diff --git a/.yarn/cache/moment-timezone-npm-0.5.43-1304d8602a-8075c897ed.zip b/.yarn/cache/moment-timezone-npm-0.5.43-1304d8602a-8075c897ed.zip deleted file mode 100644 index 6200ccaec8..0000000000 Binary files a/.yarn/cache/moment-timezone-npm-0.5.43-1304d8602a-8075c897ed.zip and /dev/null differ diff --git a/.yarn/cache/moment-timezone-npm-0.5.45-2df3ad72a4-a22e9f983f.zip b/.yarn/cache/moment-timezone-npm-0.5.45-2df3ad72a4-a22e9f983f.zip new file mode 100644 index 0000000000..4cd7864ca5 Binary files /dev/null and b/.yarn/cache/moment-timezone-npm-0.5.45-2df3ad72a4-a22e9f983f.zip differ diff --git a/.yarn/cache/msgpackr-npm-1.10.1-5c5ff5c553-e422d18b01.zip b/.yarn/cache/msgpackr-npm-1.10.1-5c5ff5c553-e422d18b01.zip new file mode 100644 index 0000000000..12aaa36344 Binary files /dev/null and b/.yarn/cache/msgpackr-npm-1.10.1-5c5ff5c553-e422d18b01.zip differ diff --git a/.yarn/cache/muggle-string-npm-0.4.1-fe3c825cc2-85fe1766d1.zip b/.yarn/cache/muggle-string-npm-0.4.1-fe3c825cc2-85fe1766d1.zip new file mode 100644 index 0000000000..4cec1b177d Binary files /dev/null and b/.yarn/cache/muggle-string-npm-0.4.1-fe3c825cc2-85fe1766d1.zip differ diff --git a/.yarn/cache/naive-ui-npm-2.35.0-2bb3f5a46d-53239b8cbe.zip b/.yarn/cache/naive-ui-npm-2.35.0-2bb3f5a46d-53239b8cbe.zip deleted file mode 100644 index 79503fb7af..0000000000 Binary files a/.yarn/cache/naive-ui-npm-2.35.0-2bb3f5a46d-53239b8cbe.zip and /dev/null differ diff --git a/.yarn/cache/naive-ui-npm-2.38.1-0edd2e5816-88a8f981de.zip b/.yarn/cache/naive-ui-npm-2.38.1-0edd2e5816-88a8f981de.zip new file mode 100644 index 0000000000..fb6dc789a1 Binary files /dev/null and b/.yarn/cache/naive-ui-npm-2.38.1-0edd2e5816-88a8f981de.zip differ diff --git a/.yarn/cache/nanoid-npm-3.3.3-25d865be84-ada019402a.zip b/.yarn/cache/nanoid-npm-3.3.3-25d865be84-ada019402a.zip deleted file mode 100644 index d28e91f1ff..0000000000 Binary files a/.yarn/cache/nanoid-npm-3.3.3-25d865be84-ada019402a.zip and /dev/null differ diff --git a/.yarn/cache/nanoid-npm-3.3.6-e6d6ae7e71-7d0eda6570.zip b/.yarn/cache/nanoid-npm-3.3.7-98824ba130-d36c427e53.zip similarity index 70% rename from .yarn/cache/nanoid-npm-3.3.6-e6d6ae7e71-7d0eda6570.zip rename to .yarn/cache/nanoid-npm-3.3.7-98824ba130-d36c427e53.zip index 8526acad72..7b2fd6e1b5 100644 Binary files a/.yarn/cache/nanoid-npm-3.3.6-e6d6ae7e71-7d0eda6570.zip and b/.yarn/cache/nanoid-npm-3.3.7-98824ba130-d36c427e53.zip differ diff --git a/.yarn/cache/object-inspect-npm-1.12.3-1e7d20f5ff-dabfd824d9.zip b/.yarn/cache/object-inspect-npm-1.12.3-1e7d20f5ff-dabfd824d9.zip deleted file mode 100644 index ec58095dc8..0000000000 Binary files a/.yarn/cache/object-inspect-npm-1.12.3-1e7d20f5ff-dabfd824d9.zip and /dev/null differ diff --git a/.yarn/cache/object-inspect-npm-1.13.1-fd038a2f0a-7d9fa9221d.zip b/.yarn/cache/object-inspect-npm-1.13.1-fd038a2f0a-7d9fa9221d.zip new file mode 100644 index 0000000000..1e1bbfbcfa Binary files /dev/null and b/.yarn/cache/object-inspect-npm-1.13.1-fd038a2f0a-7d9fa9221d.zip differ diff --git a/.yarn/cache/object.fromentries-npm-2.0.6-424cf4cd3c-453c6d6941.zip b/.yarn/cache/object.fromentries-npm-2.0.7-2e38392540-7341ce246e.zip similarity index 55% rename from .yarn/cache/object.fromentries-npm-2.0.6-424cf4cd3c-453c6d6941.zip rename to .yarn/cache/object.fromentries-npm-2.0.7-2e38392540-7341ce246e.zip index 5c9a301a0f..a976cc8e0e 100644 Binary files a/.yarn/cache/object.fromentries-npm-2.0.6-424cf4cd3c-453c6d6941.zip and b/.yarn/cache/object.fromentries-npm-2.0.7-2e38392540-7341ce246e.zip differ diff --git a/.yarn/cache/object.groupby-npm-1.0.0-b360bea3aa-64b00b287d.zip b/.yarn/cache/object.groupby-npm-1.0.0-b360bea3aa-64b00b287d.zip deleted file mode 100644 index ba072a91af..0000000000 Binary files a/.yarn/cache/object.groupby-npm-1.0.0-b360bea3aa-64b00b287d.zip and /dev/null differ diff --git a/.yarn/cache/object.groupby-npm-1.0.1-fc268391fe-d7959d6eaa.zip b/.yarn/cache/object.groupby-npm-1.0.1-fc268391fe-d7959d6eaa.zip new file mode 100644 index 0000000000..c67f462cfb Binary files /dev/null and b/.yarn/cache/object.groupby-npm-1.0.1-fc268391fe-d7959d6eaa.zip differ diff --git a/.yarn/cache/object.values-npm-1.1.6-ab9b67ccd3-f6fff9fd81.zip b/.yarn/cache/object.values-npm-1.1.6-ab9b67ccd3-f6fff9fd81.zip deleted file mode 100644 index 679cbd793f..0000000000 Binary files a/.yarn/cache/object.values-npm-1.1.6-ab9b67ccd3-f6fff9fd81.zip and /dev/null differ diff --git a/.yarn/cache/object.values-npm-1.1.7-deae619f88-f3e4ae4f21.zip b/.yarn/cache/object.values-npm-1.1.7-deae619f88-f3e4ae4f21.zip new file mode 100644 index 0000000000..4c12832e02 Binary files /dev/null and b/.yarn/cache/object.values-npm-1.1.7-deae619f88-f3e4ae4f21.zip differ diff --git a/.yarn/cache/parcel-npm-2.10.0-8e794fc289-fe25ddcf2d.zip b/.yarn/cache/parcel-npm-2.10.0-8e794fc289-fe25ddcf2d.zip deleted file mode 100644 index 018a98c3b8..0000000000 Binary files a/.yarn/cache/parcel-npm-2.10.0-8e794fc289-fe25ddcf2d.zip and /dev/null differ diff --git a/.yarn/cache/parcel-npm-2.12.0-96a4bb6cc3-d8e6cb690a.zip b/.yarn/cache/parcel-npm-2.12.0-96a4bb6cc3-d8e6cb690a.zip new file mode 100644 index 0000000000..965ad65ddc Binary files /dev/null and b/.yarn/cache/parcel-npm-2.12.0-96a4bb6cc3-d8e6cb690a.zip differ diff --git a/.yarn/cache/postcss-npm-8.4.12-e941d78a98-248e3d0f9b.zip b/.yarn/cache/postcss-npm-8.4.12-e941d78a98-248e3d0f9b.zip deleted file mode 100644 index 4f940728b9..0000000000 Binary files a/.yarn/cache/postcss-npm-8.4.12-e941d78a98-248e3d0f9b.zip and /dev/null differ diff --git a/.yarn/cache/postcss-npm-8.4.27-2a9f5f8f40-1cdd0c2988.zip b/.yarn/cache/postcss-npm-8.4.27-2a9f5f8f40-1cdd0c2988.zip deleted file mode 100644 index 11d492b6c1..0000000000 Binary files a/.yarn/cache/postcss-npm-8.4.27-2a9f5f8f40-1cdd0c2988.zip and /dev/null differ diff --git a/.yarn/cache/postcss-npm-8.4.33-6ba8157009-6f98b2af4b.zip b/.yarn/cache/postcss-npm-8.4.33-6ba8157009-6f98b2af4b.zip new file mode 100644 index 0000000000..57638cbd81 Binary files /dev/null and b/.yarn/cache/postcss-npm-8.4.33-6ba8157009-6f98b2af4b.zip differ diff --git a/.yarn/cache/postcss-npm-8.4.35-6bc1848fff-cf3c3124d3.zip b/.yarn/cache/postcss-npm-8.4.35-6bc1848fff-cf3c3124d3.zip new file mode 100644 index 0000000000..888dccea0c Binary files /dev/null and b/.yarn/cache/postcss-npm-8.4.35-6bc1848fff-cf3c3124d3.zip differ diff --git a/.yarn/cache/postcss-selector-parser-npm-6.0.13-f732d92326-f89163338a.zip b/.yarn/cache/postcss-selector-parser-npm-6.0.15-0ec4819b4e-57decb9415.zip similarity index 74% rename from .yarn/cache/postcss-selector-parser-npm-6.0.13-f732d92326-f89163338a.zip rename to .yarn/cache/postcss-selector-parser-npm-6.0.15-0ec4819b4e-57decb9415.zip index 1623d46ce1..c6d454663e 100644 Binary files a/.yarn/cache/postcss-selector-parser-npm-6.0.13-f732d92326-f89163338a.zip and b/.yarn/cache/postcss-selector-parser-npm-6.0.15-0ec4819b4e-57decb9415.zip differ diff --git a/.yarn/cache/regexp.prototype.flags-npm-1.4.3-df1c08b65d-51228bae73.zip b/.yarn/cache/regexp.prototype.flags-npm-1.4.3-df1c08b65d-51228bae73.zip deleted file mode 100644 index 9e56b1a48b..0000000000 Binary files a/.yarn/cache/regexp.prototype.flags-npm-1.4.3-df1c08b65d-51228bae73.zip and /dev/null differ diff --git a/.yarn/cache/regexp.prototype.flags-npm-1.5.0-5623b9e07f-c541687cdb.zip b/.yarn/cache/regexp.prototype.flags-npm-1.5.0-5623b9e07f-c541687cdb.zip deleted file mode 100644 index 79f001c128..0000000000 Binary files a/.yarn/cache/regexp.prototype.flags-npm-1.5.0-5623b9e07f-c541687cdb.zip and /dev/null differ diff --git a/.yarn/cache/regexp.prototype.flags-npm-1.5.1-b8faeee306-869edff002.zip b/.yarn/cache/regexp.prototype.flags-npm-1.5.1-b8faeee306-869edff002.zip new file mode 100644 index 0000000000..d73fb5c3df Binary files /dev/null and b/.yarn/cache/regexp.prototype.flags-npm-1.5.1-b8faeee306-869edff002.zip differ diff --git a/.yarn/cache/resolve-npm-1.22.1-3980488690-07af5fc1e8.zip b/.yarn/cache/resolve-npm-1.22.8-098f379dfe-f8a26958aa.zip similarity index 63% rename from .yarn/cache/resolve-npm-1.22.1-3980488690-07af5fc1e8.zip rename to .yarn/cache/resolve-npm-1.22.8-098f379dfe-f8a26958aa.zip index d41402c877..87b2b21978 100644 Binary files a/.yarn/cache/resolve-npm-1.22.1-3980488690-07af5fc1e8.zip and b/.yarn/cache/resolve-npm-1.22.8-098f379dfe-f8a26958aa.zip differ diff --git a/.yarn/cache/resolve-patch-46f9469d0d-5656f4d0be.zip b/.yarn/cache/resolve-patch-f6b5304cab-5479b7d431.zip similarity index 63% rename from .yarn/cache/resolve-patch-46f9469d0d-5656f4d0be.zip rename to .yarn/cache/resolve-patch-f6b5304cab-5479b7d431.zip index c3066c3608..84c63abe59 100644 Binary files a/.yarn/cache/resolve-patch-46f9469d0d-5656f4d0be.zip and b/.yarn/cache/resolve-patch-f6b5304cab-5479b7d431.zip differ diff --git a/.yarn/cache/rollup-npm-3.28.0-4ab1b4022e-6ded4a0d3c.zip b/.yarn/cache/rollup-npm-3.28.0-4ab1b4022e-6ded4a0d3c.zip deleted file mode 100644 index fba58d7eec..0000000000 Binary files a/.yarn/cache/rollup-npm-3.28.0-4ab1b4022e-6ded4a0d3c.zip and /dev/null differ diff --git a/.yarn/cache/rollup-npm-3.29.4-5e5e5f2087-8bb20a39c8.zip b/.yarn/cache/rollup-npm-3.29.4-5e5e5f2087-8bb20a39c8.zip new file mode 100644 index 0000000000..9f6628aa42 Binary files /dev/null and b/.yarn/cache/rollup-npm-3.29.4-5e5e5f2087-8bb20a39c8.zip differ diff --git a/.yarn/cache/safe-array-concat-npm-1.0.0-897b2c630a-f43cb98fe3.zip b/.yarn/cache/safe-array-concat-npm-1.0.1-8a42907bbf-001ecf1d8a.zip similarity index 57% rename from .yarn/cache/safe-array-concat-npm-1.0.0-897b2c630a-f43cb98fe3.zip rename to .yarn/cache/safe-array-concat-npm-1.0.1-8a42907bbf-001ecf1d8a.zip index df430cd1d9..6789308b81 100644 Binary files a/.yarn/cache/safe-array-concat-npm-1.0.0-897b2c630a-f43cb98fe3.zip and b/.yarn/cache/safe-array-concat-npm-1.0.1-8a42907bbf-001ecf1d8a.zip differ diff --git a/.yarn/cache/sass-npm-1.69.4-bea57e4b30-ed5558445b.zip b/.yarn/cache/sass-npm-1.69.4-bea57e4b30-ed5558445b.zip deleted file mode 100644 index e9db6b0c8c..0000000000 Binary files a/.yarn/cache/sass-npm-1.69.4-bea57e4b30-ed5558445b.zip and /dev/null differ diff --git a/.yarn/cache/sass-npm-1.72.0-fb38bb530c-f420079c7d.zip b/.yarn/cache/sass-npm-1.72.0-fb38bb530c-f420079c7d.zip new file mode 100644 index 0000000000..a3aea4e668 Binary files /dev/null and b/.yarn/cache/sass-npm-1.72.0-fb38bb530c-f420079c7d.zip differ diff --git a/.yarn/cache/seemly-npm-0.3.8-4940336497-98171fd4d9.zip b/.yarn/cache/seemly-npm-0.3.8-4940336497-98171fd4d9.zip new file mode 100644 index 0000000000..03ae0a8f50 Binary files /dev/null and b/.yarn/cache/seemly-npm-0.3.8-4940336497-98171fd4d9.zip differ diff --git a/.yarn/cache/semver-npm-7.6.0-f4630729f6-7427f05b70.zip b/.yarn/cache/semver-npm-7.6.0-f4630729f6-7427f05b70.zip new file mode 100644 index 0000000000..a5494e10ac Binary files /dev/null and b/.yarn/cache/semver-npm-7.6.0-f4630729f6-7427f05b70.zip differ diff --git a/.yarn/cache/set-function-length-npm-1.1.1-d362bf8221-c131d7569c.zip b/.yarn/cache/set-function-length-npm-1.1.1-d362bf8221-c131d7569c.zip new file mode 100644 index 0000000000..024add469c Binary files /dev/null and b/.yarn/cache/set-function-length-npm-1.1.1-d362bf8221-c131d7569c.zip differ diff --git a/.yarn/cache/set-function-name-npm-2.0.1-a9f970eea0-4975d17d90.zip b/.yarn/cache/set-function-name-npm-2.0.1-a9f970eea0-4975d17d90.zip new file mode 100644 index 0000000000..f18d53b599 Binary files /dev/null and b/.yarn/cache/set-function-name-npm-2.0.1-a9f970eea0-4975d17d90.zip differ diff --git a/.yarn/cache/sortablejs-npm-1.15.0-f3a393abcc-bb82223a66.zip b/.yarn/cache/sortablejs-npm-1.15.0-f3a393abcc-bb82223a66.zip deleted file mode 100644 index 9028b71d1c..0000000000 Binary files a/.yarn/cache/sortablejs-npm-1.15.0-f3a393abcc-bb82223a66.zip and /dev/null differ diff --git a/.yarn/cache/sortablejs-npm-1.15.2-73347ae85a-36b20b144f.zip b/.yarn/cache/sortablejs-npm-1.15.2-73347ae85a-36b20b144f.zip new file mode 100644 index 0000000000..b303125761 Binary files /dev/null and b/.yarn/cache/sortablejs-npm-1.15.2-73347ae85a-36b20b144f.zip differ diff --git a/.yarn/cache/string.prototype.trim-npm-1.2.7-3fbaf3b9d2-05b7b2d6af.zip b/.yarn/cache/string.prototype.trim-npm-1.2.7-3fbaf3b9d2-05b7b2d6af.zip deleted file mode 100644 index ee6a456c05..0000000000 Binary files a/.yarn/cache/string.prototype.trim-npm-1.2.7-3fbaf3b9d2-05b7b2d6af.zip and /dev/null differ diff --git a/.yarn/cache/string.prototype.trim-npm-1.2.8-7ed4517ce8-49eb1a862a.zip b/.yarn/cache/string.prototype.trim-npm-1.2.8-7ed4517ce8-49eb1a862a.zip new file mode 100644 index 0000000000..543f676ced Binary files /dev/null and b/.yarn/cache/string.prototype.trim-npm-1.2.8-7ed4517ce8-49eb1a862a.zip differ diff --git a/.yarn/cache/string.prototype.trimend-npm-1.0.6-304246ecc1-0fdc34645a.zip b/.yarn/cache/string.prototype.trimend-npm-1.0.6-304246ecc1-0fdc34645a.zip deleted file mode 100644 index c28cc815b2..0000000000 Binary files a/.yarn/cache/string.prototype.trimend-npm-1.0.6-304246ecc1-0fdc34645a.zip and /dev/null differ diff --git a/.yarn/cache/string.prototype.trimend-npm-1.0.7-159b9dcfbc-2375516272.zip b/.yarn/cache/string.prototype.trimend-npm-1.0.7-159b9dcfbc-2375516272.zip new file mode 100644 index 0000000000..93f30c147e Binary files /dev/null and b/.yarn/cache/string.prototype.trimend-npm-1.0.7-159b9dcfbc-2375516272.zip differ diff --git a/.yarn/cache/string.prototype.trimstart-npm-1.0.6-0926caea6c-89080feef4.zip b/.yarn/cache/string.prototype.trimstart-npm-1.0.7-ae2f803b78-13d0c2cb0d.zip similarity index 54% rename from .yarn/cache/string.prototype.trimstart-npm-1.0.6-0926caea6c-89080feef4.zip rename to .yarn/cache/string.prototype.trimstart-npm-1.0.7-ae2f803b78-13d0c2cb0d.zip index 6203bec399..187509d052 100644 Binary files a/.yarn/cache/string.prototype.trimstart-npm-1.0.6-0926caea6c-89080feef4.zip and b/.yarn/cache/string.prototype.trimstart-npm-1.0.7-ae2f803b78-13d0c2cb0d.zip differ diff --git a/.yarn/cache/tsconfig-paths-npm-3.14.2-90ce75420d-a6162eaa1a.zip b/.yarn/cache/tsconfig-paths-npm-3.15.0-ff68930e0e-59f35407a3.zip similarity index 69% rename from .yarn/cache/tsconfig-paths-npm-3.14.2-90ce75420d-a6162eaa1a.zip rename to .yarn/cache/tsconfig-paths-npm-3.15.0-ff68930e0e-59f35407a3.zip index 0b76788258..abfe8dd47e 100644 Binary files a/.yarn/cache/tsconfig-paths-npm-3.14.2-90ce75420d-a6162eaa1a.zip and b/.yarn/cache/tsconfig-paths-npm-3.15.0-ff68930e0e-59f35407a3.zip differ diff --git a/.yarn/cache/vite-npm-4.4.11-e7ab057df9-c22145c838.zip b/.yarn/cache/vite-npm-4.4.11-e7ab057df9-c22145c838.zip deleted file mode 100644 index ed02fabbc7..0000000000 Binary files a/.yarn/cache/vite-npm-4.4.11-e7ab057df9-c22145c838.zip and /dev/null differ diff --git a/.yarn/cache/vite-npm-4.5.3-5cedc7cb8f-fd3f512ce4.zip b/.yarn/cache/vite-npm-4.5.3-5cedc7cb8f-fd3f512ce4.zip new file mode 100644 index 0000000000..c6bb0e4ef7 Binary files /dev/null and b/.yarn/cache/vite-npm-4.5.3-5cedc7cb8f-fd3f512ce4.zip differ diff --git a/.yarn/cache/volar-service-html-npm-0.0.34-32b6d24136-83b50cd805.zip b/.yarn/cache/volar-service-html-npm-0.0.34-32b6d24136-83b50cd805.zip new file mode 100644 index 0000000000..0f1e9805f7 Binary files /dev/null and b/.yarn/cache/volar-service-html-npm-0.0.34-32b6d24136-83b50cd805.zip differ diff --git a/.yarn/cache/volar-service-pug-npm-0.0.34-6f5429e17c-4691aa1c8e.zip b/.yarn/cache/volar-service-pug-npm-0.0.34-6f5429e17c-4691aa1c8e.zip new file mode 100644 index 0000000000..d53f3521ee Binary files /dev/null and b/.yarn/cache/volar-service-pug-npm-0.0.34-6f5429e17c-4691aa1c8e.zip differ diff --git a/.yarn/cache/vscode-html-languageservice-npm-5.1.2-2ea2618bdd-3a2a5ee5ad.zip b/.yarn/cache/vscode-html-languageservice-npm-5.1.2-2ea2618bdd-3a2a5ee5ad.zip new file mode 100644 index 0000000000..d83607888b Binary files /dev/null and b/.yarn/cache/vscode-html-languageservice-npm-5.1.2-2ea2618bdd-3a2a5ee5ad.zip differ diff --git a/.yarn/cache/vscode-jsonrpc-npm-8.2.0-b7d2e5b553-f302a01e59.zip b/.yarn/cache/vscode-jsonrpc-npm-8.2.0-b7d2e5b553-f302a01e59.zip new file mode 100644 index 0000000000..75e2c086b6 Binary files /dev/null and b/.yarn/cache/vscode-jsonrpc-npm-8.2.0-b7d2e5b553-f302a01e59.zip differ diff --git a/.yarn/cache/vscode-languageserver-protocol-npm-3.17.5-2b07e16989-dfb42d276d.zip b/.yarn/cache/vscode-languageserver-protocol-npm-3.17.5-2b07e16989-dfb42d276d.zip new file mode 100644 index 0000000000..bcb5ae5b4e Binary files /dev/null and b/.yarn/cache/vscode-languageserver-protocol-npm-3.17.5-2b07e16989-dfb42d276d.zip differ diff --git a/.yarn/cache/vscode-languageserver-textdocument-npm-1.0.11-6fc94d2b7b-ea7cdc9d4f.zip b/.yarn/cache/vscode-languageserver-textdocument-npm-1.0.11-6fc94d2b7b-ea7cdc9d4f.zip new file mode 100644 index 0000000000..b1edfda12d Binary files /dev/null and b/.yarn/cache/vscode-languageserver-textdocument-npm-1.0.11-6fc94d2b7b-ea7cdc9d4f.zip differ diff --git a/.yarn/cache/vscode-languageserver-types-npm-3.17.5-aca3b71a5a-79b420e757.zip b/.yarn/cache/vscode-languageserver-types-npm-3.17.5-aca3b71a5a-79b420e757.zip new file mode 100644 index 0000000000..ec214b2903 Binary files /dev/null and b/.yarn/cache/vscode-languageserver-types-npm-3.17.5-aca3b71a5a-79b420e757.zip differ diff --git a/.yarn/cache/vscode-uri-npm-3.0.8-56f46b9d24-5142491268.zip b/.yarn/cache/vscode-uri-npm-3.0.8-56f46b9d24-5142491268.zip new file mode 100644 index 0000000000..6dadd110c9 Binary files /dev/null and b/.yarn/cache/vscode-uri-npm-3.0.8-56f46b9d24-5142491268.zip differ diff --git a/.yarn/cache/vue-eslint-parser-npm-9.3.1-a0feb51670-6d1476b45f.zip b/.yarn/cache/vue-eslint-parser-npm-9.3.1-a0feb51670-6d1476b45f.zip deleted file mode 100644 index 82eadd1443..0000000000 Binary files a/.yarn/cache/vue-eslint-parser-npm-9.3.1-a0feb51670-6d1476b45f.zip and /dev/null differ diff --git a/.yarn/cache/vue-eslint-parser-npm-9.4.2-3e4e696025-67f14c8ea1.zip b/.yarn/cache/vue-eslint-parser-npm-9.4.2-3e4e696025-67f14c8ea1.zip new file mode 100644 index 0000000000..9ec85e189e Binary files /dev/null and b/.yarn/cache/vue-eslint-parser-npm-9.4.2-3e4e696025-67f14c8ea1.zip differ diff --git a/.yarn/cache/vue-npm-3.3.4-174fadbea4-58b6c62a66.zip b/.yarn/cache/vue-npm-3.3.4-174fadbea4-58b6c62a66.zip deleted file mode 100644 index 2386327419..0000000000 Binary files a/.yarn/cache/vue-npm-3.3.4-174fadbea4-58b6c62a66.zip and /dev/null differ diff --git a/.yarn/cache/vue-npm-3.4.21-02110aa6d9-3c477982a0.zip b/.yarn/cache/vue-npm-3.4.21-02110aa6d9-3c477982a0.zip new file mode 100644 index 0000000000..c48b4e5dfe Binary files /dev/null and b/.yarn/cache/vue-npm-3.4.21-02110aa6d9-3c477982a0.zip differ diff --git a/.yarn/cache/vue-router-npm-4.2.5-3479f41e41-2449db4f3a.zip b/.yarn/cache/vue-router-npm-4.2.5-3479f41e41-2449db4f3a.zip deleted file mode 100644 index 930757dd0b..0000000000 Binary files a/.yarn/cache/vue-router-npm-4.2.5-3479f41e41-2449db4f3a.zip and /dev/null differ diff --git a/.yarn/cache/vue-router-npm-4.3.0-b765d40138-0059261d39.zip b/.yarn/cache/vue-router-npm-4.3.0-b765d40138-0059261d39.zip new file mode 100644 index 0000000000..6b93953624 Binary files /dev/null and b/.yarn/cache/vue-router-npm-4.3.0-b765d40138-0059261d39.zip differ diff --git a/.yarn/cache/vueuc-npm-0.4.51-794074113f-7969659fac.zip b/.yarn/cache/vueuc-npm-0.4.58-be5584770c-fb0b9a69be.zip similarity index 66% rename from .yarn/cache/vueuc-npm-0.4.51-794074113f-7969659fac.zip rename to .yarn/cache/vueuc-npm-0.4.58-be5584770c-fb0b9a69be.zip index 4a273f8340..f62e5e32e8 100644 Binary files a/.yarn/cache/vueuc-npm-0.4.51-794074113f-7969659fac.zip and b/.yarn/cache/vueuc-npm-0.4.58-be5584770c-fb0b9a69be.zip differ diff --git a/.yarn/cache/which-typed-array-npm-1.1.11-f37f0cefe2-711ffc8ef8.zip b/.yarn/cache/which-typed-array-npm-1.1.11-f37f0cefe2-711ffc8ef8.zip deleted file mode 100644 index d9fa5911e3..0000000000 Binary files a/.yarn/cache/which-typed-array-npm-1.1.11-f37f0cefe2-711ffc8ef8.zip and /dev/null differ diff --git a/.yarn/cache/which-typed-array-npm-1.1.13-92c18b4878-3828a0d5d7.zip b/.yarn/cache/which-typed-array-npm-1.1.13-92c18b4878-3828a0d5d7.zip new file mode 100644 index 0000000000..0d9d2479da Binary files /dev/null and b/.yarn/cache/which-typed-array-npm-1.1.13-92c18b4878-3828a0d5d7.zip differ diff --git a/.yarn/cache/which-typed-array-npm-1.1.9-9559c95dfc-fe0178ca44.zip b/.yarn/cache/which-typed-array-npm-1.1.9-9559c95dfc-fe0178ca44.zip deleted file mode 100644 index e7e7d548de..0000000000 Binary files a/.yarn/cache/which-typed-array-npm-1.1.9-9559c95dfc-fe0178ca44.zip and /dev/null differ diff --git a/LICENSE b/LICENSE index c180fa3f9f..dc6e0c5663 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ BSD 3-Clause License -Copyright (c) 2008-2023, The IETF Trust +Copyright (c) 2008-2024, The IETF Trust All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/README.md b/README.md index 20c9f3e599..baffc311e7 100644 --- a/README.md +++ b/README.md @@ -5,10 +5,10 @@ [![Release](https://img.shields.io/github/release/ietf-tools/datatracker.svg?style=flat&maxAge=300)](https://github.com/ietf-tools/datatracker/releases) [![License](https://img.shields.io/github/license/ietf-tools/datatracker)](https://github.com/ietf-tools/datatracker/blob/main/LICENSE) [![Code Coverage](https://codecov.io/gh/ietf-tools/datatracker/branch/feat/bs5/graph/badge.svg?token=V4DXB0Q28C)](https://codecov.io/gh/ietf-tools/datatracker) -[![Python Version](https://img.shields.io/badge/python-3.9-blue?logo=python&logoColor=white)](#prerequisites) +[![Python Version](https://img.shields.io/badge/python-3.12-blue?logo=python&logoColor=white)](#prerequisites) [![Django Version](https://img.shields.io/badge/django-4.x-51be95?logo=django&logoColor=white)](#prerequisites) [![Node Version](https://img.shields.io/badge/node.js-16.x-green?logo=node.js&logoColor=white)](#prerequisites) -[![MariaDB Version](https://img.shields.io/badge/postgres-14-blue?logo=postgresql&logoColor=white)](#prerequisites) +[![MariaDB Version](https://img.shields.io/badge/postgres-17-blue?logo=postgresql&logoColor=white)](#prerequisites) ##### The day-to-day front-end to the IETF database for people who work on IETF standards. @@ -44,6 +44,7 @@ This project is following the standard **Git Feature Workflow** development model. Learn about all the various steps of the development workflow, from creating a fork to submitting a pull request, in the [Contributing](https://github.com/ietf-tools/.github/blob/main/CONTRIBUTING.md) guide. +> [!TIP] > Make sure to read the [Styleguides](https://github.com/ietf-tools/.github/blob/main/CONTRIBUTING.md#styleguides) section to ensure a cohesive code format across the project. You can submit bug reports, enhancement and new feature requests in the [discussions](https://github.com/ietf-tools/datatracker/discussions) area. Accepted tickets will be converted to issues. @@ -52,7 +53,8 @@ You can submit bug reports, enhancement and new feature requests in the [discuss Click the Fork button in the top-right corner of the repository to create a personal copy that you can work on. -> Note that some GitHub Actions might be enabled by default in your fork. You should disable them by going to **Settings** > **Actions** > **General** and selecting **Disable actions** (then Save). +> [!NOTE] +> Some GitHub Actions might be enabled by default in your fork. You should disable them by going to **Settings** > **Actions** > **General** and selecting **Disable actions** (then Save). #### Git Cloning Tips @@ -81,7 +83,7 @@ Many developers are using [VS Code](https://code.visualstudio.com/) and taking a If VS Code is not available to you, in your clone, type `cd docker; ./run` -Once the containers are started, run the tests to make sure your checkout is a good place to start from (all tests should pass - if any fail, ask for help at tools-develop@). Inside the app container's shell type: +Once the containers are started, run the tests to make sure your checkout is a good place to start from (all tests should pass - if any fail, ask for help at tools-help@). Inside the app container's shell type: ```sh ietf/manage.py test --settings=settings_test ``` @@ -104,7 +106,25 @@ Read the [Docker Dev Environment](docker/README.md) guide to get started. Nightly database dumps of the datatracker are available as Docker images: `ghcr.io/ietf-tools/datatracker-db:latest` -> Note that to update the database in your dev environment to the latest version, you should run the `docker/cleandb` script. +> [!TIP] +> In order to update the database in your dev environment to the latest version, you should run the `docker/cleandb` script. + +### Blob storage for dev/test + +The dev and test environments use [minio](https://github.com/minio/minio) to provide local blob storage. See the settings files for how the app container communicates with the blobstore container. If you need to work with minio directly from outside the containers (to interact with its api or console), use `docker compose` from the top level directory of your clone to expose it at an ephemeral port. + +``` +$ docker compose port blobstore 9001 +0.0.0.0: + +$ curl -I http://localhost: +HTTP/1.1 200 OK +... +``` + + +The minio container exposes the minio api at port 9000 and the minio console at port 9001 + ### Frontend Development @@ -122,7 +142,7 @@ Pages will gradually be updated to Vue 3 components. These components are locate Each Vue 3 app has its own sub-directory. For example, the agenda app is located under `/client/agenda`. -The datatracker makes use of the Django-Vite plugin to point to either the Vite.js server or the precompiled production files. The `DJANGO_VITE_DEV_MODE` flag, found in the `ietf/settings_local.py` file determines whether the Vite.js server is used or not. +The datatracker makes use of the Django-Vite plugin to point to either the Vite.js server or the precompiled production files. The `DJANGO_VITE["default"]["dev_mode"]` flag, found in the `ietf/settings_local.py` file determines whether the Vite.js server is used or not. In development mode, you must start the Vite.js development server, in addition to the usual Datatracker server: @@ -231,6 +251,7 @@ From a datatracker container, run the command: ./ietf/manage.py test --settings=settings_test ``` +> [!TIP] > You can limit the run to specific tests using the `--pattern` argument. ### Frontend Tests @@ -240,11 +261,13 @@ Frontend tests are done via Playwright. There're 2 different type of tests: - Tests that test Vue pages / components and run natively without any external dependency. - Tests that require a running datatracker instance to test against (usually legacy views). +> [!IMPORTANT] > Make sure you have Node.js 16.x or later installed on your machine. #### Run Vue Tests -> :warning: All commands below **MUST** be run from the `./playwright` directory, unless noted otherwise. +> [!WARNING] +> All commands below **MUST** be run from the `./playwright` directory, unless noted otherwise. 1. Run **once** to install dependencies on your system: ```sh @@ -277,7 +300,8 @@ Frontend tests are done via Playwright. There're 2 different type of tests: First, you need to start a datatracker instance (dev or prod), ideally from a docker container, exposing the 8000 port. -> :warning: All commands below **MUST** be run from the `./playwright` directory. +> [!WARNING] +> All commands below **MUST** be run from the `./playwright` directory. 1. Run **once** to install dependencies on your system: ```sh @@ -290,6 +314,7 @@ npm run install-deps npm run test:legacy ``` + ### Diff Tool To compare 2 different datatracker instances and look for diff, read the [diff tool instructions](dev/diff). diff --git a/bin/check-copyright b/bin/check-copyright deleted file mode 100755 index 13cbcd8582..0000000000 --- a/bin/check-copyright +++ /dev/null @@ -1,261 +0,0 @@ -#!/usr/bin/env python3.7 -# -*- mode: python; coding: utf-8 -*- -# Copyright The IETF Trust 2019, All Rights Reserved -""" -NAME - $program - Check for current copyright notice in given files - -SYNOPSIS - $program [OPTIONS] ARGS - -DESCRIPTION - Given a list of files or filename wildcard patterns, check all for - an IETF Trust copyright notice with the current year. Optionally - generate a diff on standard out which can be used by 'patch'. - - An invocation similar to the following can be particularly useful with - a set of changed version-controlled files, as it will fix up the - Copyright statements of any python files with pending changes: - - $ check-copyright -p $(svn st | cut -c 9- | grep '\.py$' ) | patch -p0 - - -%(options)s - -AUTHOR - Written by Henrik Levkowetz, - -COPYRIGHT - Copyright 2019 the IETF Trust - - This program is free software; you can redistribute it and/or modify - it under the terms of the Simplified BSD license as published by the - Open Source Initiative at http://opensource.org/licenses/BSD-2-Clause. - -""" - - -import datetime -import os -import sys -import time - -path = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) -if not path in sys.path: - sys.path.insert(0, path) - -import getopt -import re -import pytz -import tzparse -import debug - -version = "1.0.0" -program = os.path.basename(sys.argv[0]) -progdir = os.path.dirname(sys.argv[0]) - -debug.debug = True - -# ---------------------------------------------------------------------- -# Parse options - -options = "" -for line in re.findall("\n +(if|elif) +opt in \[(.+)\]:\s+#(.+)\n", open(sys.argv[0]).read()): - if not options: - options += "OPTIONS\n" - options += " %-16s %s\n" % (line[1].replace('"', ''), line[2]) -options = options.strip() - -# with ' < 1:' on the next line, this is a no-op: -if len(sys.argv) < 1: - print(__doc__ % locals()) - sys.exit(1) - -try: - opts, files = getopt.gnu_getopt(sys.argv[1:], "hC:pvV", ["help", "copyright=", "patch", "version", "verbose",]) -except Exception as e: - print( "%s: %s" % (program, e)) - sys.exit(1) - -# ---------------------------------------------------------------------- -# Handle options - -# set default values, if any -opt_verbose = 0 -opt_patch = False -opt_copyright = "Copyright The IETF Trust {years}, All Rights Reserved" - -# handle individual options -for opt, value in opts: - if opt in ["-h", "--help"]: # Output this help, then exit - print( __doc__ % locals() ) - sys.exit(1) - elif opt in ["-p", "--patch"]: # Generate patch output rather than error messages - opt_patch = True - elif opt in ["-C", "--copyright"]: # Copyright line pattern using {years} for years - opt_copyright = value - elif opt in ["-V", "--version"]: # Output version information, then exit - print( program, version ) - sys.exit(0) - elif opt in ["-v", "--verbose"]: # Be more verbose - opt_verbose += 1 - -# ---------------------------------------------------------------------- -def say(s): - sys.stderr.write("%s\n" % (s)) - -# ---------------------------------------------------------------------- -def note(s): - if opt_verbose: - sys.stderr.write("%s\n" % (s)) - -# ---------------------------------------------------------------------- -def die(s, error=1): - sys.stderr.write("\n%s: Error: %s\n\n" % (program, s)) - sys.exit(error) - -# ---------------------------------------------------------------------- - -def pipe(cmd, inp=None): - import shlex - from subprocess import Popen, PIPE - args = shlex.split(cmd) - bufsize = 4096 - stdin = PIPE if inp else None - pipe = Popen(args, stdin=stdin, stdout=PIPE, stderr=PIPE, bufsize=bufsize, encoding='utf-8', universal_newlines=True) - out, err = pipe.communicate(inp) - code = pipe.returncode - if code != 0: - raise OSError(err) - return out - -# ---------------------------------------------------------------------- -def split_loginfo(line): - try: - parts = line.split() - rev = parts[0][1:] - who = parts[2] - date = parts[4] - time = parts[5] - tz = parts[6] - when = tzparse.tzparse(" ".join(parts[4:7]), "%Y-%m-%d %H:%M:%S %Z") - when = when.astimezone(pytz.utc) - except ValueError as e: - sys.stderr.write("Bad log line format: %s\n %s\n" % (line, e)) - - return rev, who, when - -# ---------------------------------------------------------------------- -def get_first_commit(path): - note("Getting first commit for '%s'" % path) - cmd = 'svn log %s' % path - if opt_verbose > 1: - note("Running '%s' ..." % cmd) - try: - commit_log = pipe(cmd) - commit_log = commit_log.splitlines() - commit_log.reverse() - for line in commit_log: - if re.search(loginfo_format, line): - rev, who, when = split_loginfo(line) - break - else: - pass - except OSError: - rev, who, when = None, None, datetime.datetime.now(datetime.timezone.utc) - return { path: { 'rev': rev, 'who': who, 'date': when.strftime('%Y-%m-%d %H:%M:%S'), }, } - - -# ---------------------------------------------------------------------- -# The program itself - -import os -import json - -cwd = os.getcwd() - -# Get current initinfo from cache and svn -cachefn = os.path.join(os.environ.get('HOME', '.'), '.initinfo') - -if os.path.exists(cachefn): - note("Reading initinfo cache file %s" % cachefn) - with open(cachefn, "r") as file: - cache = json.load(file) -else: - sys.stderr.write("No initinfo cache file found -- will have to extract all information from SVN.\n"+ - "This may take some time.\n\n") - cache = {} -initinfo = cache - -merged_revs = {} -write_cache = False -loginfo_format = r'^r[0-9]+ \| [^@]+@[^@]+ \| \d\d\d\d-\d\d-\d\d ' - -year = time.strftime('%Y') -copyright_re = "(?i)"+opt_copyright.format(years=r"(\d+-)?\d+") -for path in files: - try: - if not os.path.exists(path): - note("File does not exist: %s" % path) - continue - note("Checking path %s" % path) - if not path in initinfo: - initinfo.update(get_first_commit(path)) - write_cache = True - date = initinfo[path]['date'] - init = date[:4] - - copyright_year_re = "(?i)"+opt_copyright.format(years=r"({init}-)?{year}".format(init=init, year=year)) - with open(path) as file: - try: - chunk = file.read(4000) - except UnicodeDecodeError as e: - sys.stderr.write(f'Error when reading {file.name}: {e}\n') - raise - if os.path.basename(path) == '__init__.py' and len(chunk)==0: - continue - if not re.search(copyright_year_re, chunk): - if year == init: - copyright = opt_copyright.format(years=year) - else: - copyright = opt_copyright.format(years=f"{init}-{year}") - if opt_patch: - print(f"--- {file.name}\t(original)") - print(f"+++ {file.name}\t(modified)") - if not re.search(copyright_re, chunk): - # Simple case, just insert copyright at the top - print( "@@ -1,3 +1,4 @@") - print(f"+# {copyright}") - for i, line in list(enumerate(chunk.splitlines()))[:3]: - print(f" {line}") - else: - # Find old copyright, then emit preceding lines, - # change, and following lines. - pos = None - for i, line in enumerate(chunk.splitlines(), start=1): - if re.search(copyright_re, line): - pos = i - break - if not pos: - raise RuntimeError("Unexpected state: Expected a copyright line, but found none") - print(f"@@ -1,{pos+3} +1,{pos+3} @@") - for i, line in list(enumerate(chunk.splitlines(), start=1))[:pos+3]: - if i == pos: - print(f"-{line}") - print(f"+# {copyright}") - else: - print(f" {line}") - else: - sys.stderr.write(f"{path}(1): Error: Missing or bad copyright. Expected: {copyright}") - except Exception: - if write_cache: - cache = initinfo - with open(cachefn, "w") as file: - json.dump(cache, file, indent=2, sort_keys=True) - raise - -if write_cache: - cache = initinfo - with open(cachefn, "w") as file: - json.dump(cache, file, indent=2, sort_keys=True) - diff --git a/bin/count.c b/bin/count.c deleted file mode 100644 index 786f15eb97..0000000000 --- a/bin/count.c +++ /dev/null @@ -1,26 +0,0 @@ -#include - -int main( void ) -{ - int c; - int count = 0; - - //turn off buffering - setvbuf(stdin, NULL, _IONBF, 0); - setvbuf(stdout, NULL, _IONBF, 0); - setvbuf(stderr, NULL, _IONBF, 0); - - c = fgetc(stdin); - while(c != EOF) - { - if (c=='.' || c=='E' || c=='F' || c=='s') count++; else count=0; - fputc(c, stdout); - fflush(stdout); - if (count && count % 76 == 0) { - fprintf(stderr, "%4d\n", count); - fflush(stderr); - } - c = fgetc(stdin); - } - return 0; -} diff --git a/bin/daily b/bin/daily deleted file mode 100755 index c65ab56043..0000000000 --- a/bin/daily +++ /dev/null @@ -1,68 +0,0 @@ -#!/bin/bash - -# Nightly datatracker jobs. -# -# This script is expected to be triggered by cron from -# /etc/cron.d/datatracker -export LANG=en_US.UTF-8 -export PYTHONIOENCODING=utf-8 - -# Make sure we stop if something goes wrong: -program=${0##*/} -trap 'echo "$program($LINENO): Command failed with error code $? ([$$] $0 $*)"; exit 1' ERR - -# Datatracker directory -DTDIR=/a/www/ietf-datatracker/web -cd $DTDIR/ - -logger -p user.info -t cron "Running $DTDIR/bin/daily" - -# Run the hourly jobs first -$DTDIR/bin/hourly - -# Set up the virtual environment -source $DTDIR/env/bin/activate - - -# Update our information about the current version of some commands we use -$DTDIR/ietf/manage.py update_external_command_info - -# Get IANA-registered yang models -#YANG_IANA_DIR=$(python -c 'import ietf.settings; print ietf.settings.SUBMIT_YANG_IANA_MODEL_DIR') -# Hardcode the rsync target to avoid any unwanted deletes: -# rsync -avzq --delete rsync.ietf.org::iana/yang-parameters/ /a/www/ietf-ftp/yang/ianamod/ -rsync -avzq --delete /a/www/ietf-ftp/iana/yang-parameters/ /a/www/ietf-ftp/yang/ianamod/ - -# Get Yang models from Yangcatalog. -rsync -avzq rsync://rsync.yangcatalog.org:10873/yangdeps /a/www/ietf-ftp/yang/catalogmod/ - -# Populate the yang repositories -$DTDIR/ietf/manage.py populate_yang_model_dirs -v0 - -# Re-run yang checks on active documents -$DTDIR/ietf/manage.py run_yang_model_checks -v0 - -# Expire Internet-Drafts -# Enable when removed from /a/www/ietf-datatracker/scripts/Cron-runner: -$DTDIR/ietf/bin/expire-ids - -# Send nomcom reminders about nomination acceptance and questionnaires -$DTDIR/ietf/manage.py send_reminders - -# Expire last calls -# Enable when removed from /a/www/ietf-datatracker/scripts/Cron-runner: -$DTDIR/ietf/bin/expire-last-calls - -# Run an extended version of the rfc editor update, to catch changes -# with backdated timestamps -# Enable when removed from /a/www/ietf-datatracker/scripts/Cron-runner: -$DTDIR/ietf/bin/rfc-editor-index-updates -d 1969-01-01 - -# Fetch meeting attendance data from ietf.org/registration/attendees -$DTDIR/ietf/manage.py fetch_meeting_attendance --latest 2 - -# Send reminders originating from the review app -$DTDIR/ietf/bin/send-review-reminders - -# Purge older PersonApiKeyEvents -$DTDIR/ietf/manage.py purge_old_personal_api_key_events 14 diff --git a/bin/drop-new-tables b/bin/drop-new-tables deleted file mode 100755 index ec1594ae26..0000000000 --- a/bin/drop-new-tables +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash - -# Drop tables which don't exist in the database dump. - -[ -n "$1" ] || { echo -e "\nUsage: $0 DUMPFILE\n\nError: No database dump file given"; exit 1; } - -zcat $1 | head | grep "Database: ietf_utf8" || { echo "Is this a database dump? Expected to see 'Database: ietf_utf8' "; exit 1; } - -echo -e "\nSQL commands:\n" - -diff <(zcat $1 | grep '^DROP TABLE IF EXISTS' | tr -d '`;' | field 5) <(ietf/manage.py dbshell <<< 'show tables;' | tail -n +2) | grep '^>' | awk '{print "drop table if exists", $2, ";";}' | tee /dev/stderr | ietf/manage.py dbshell - -echo -e "\nDone" diff --git a/bin/dump-to-names-json b/bin/dump-to-names-json deleted file mode 100644 index 9c7dfac07d..0000000000 --- a/bin/dump-to-names-json +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash - -# This script provides a limited selected dump of database content with the -# purpose of generating a test fixture that provides the test data needed -# by the test suite. -# -# The generated data fixture is sorted and normalized in order to produce -# minimal commit diffs which reflect only actual changes in the fixture data, -# without apparent changes resulting only from ordering changes. - -set -x -ietf/manage.py dumpdata --indent 1 doc.State doc.BallotType doc.StateType \ - mailtrigger.MailTrigger mailtrigger.Recipient name utils.VersionInfo \ - group.GroupFeatures stats.CountryAlias dbtemplate.DBTemplate \ - | jq --sort-keys "sort_by(.model, .pk)" \ - | jq '[.[] | select(.model!="dbtemplate.dbtemplate" or .pk==354)]' > ietf/name/fixtures/names.json diff --git a/bin/every15m b/bin/every15m deleted file mode 100755 index c0b0752f6a..0000000000 --- a/bin/every15m +++ /dev/null @@ -1,28 +0,0 @@ -#!/bin/bash - -# datatracker jobs to run every 15 minutes -# -# This script is expected to be triggered by cron from -# /etc/cron.d/datatracker - -export LANG=en_US.UTF-8 -export PYTHONIOENCODING=utf-8 - -DTDIR=/a/www/ietf-datatracker/web -cd $DTDIR/ - -# Set up the virtual environment -source $DTDIR/env/bin/activate - -logger -p user.info -t cron "Running $DTDIR/bin/every15m" - -# Send mail scheduled to go out at certain times -$DTDIR/ietf/bin/send-scheduled-mail all - -# Reparse the last _year_ of RFC index entries -# (which is the default if -d is not provided) -# until https://github.com/ietf-tools/datatracker/issues/3734 -# is addressed. -# This takes about 20s on production as of 2022-08-11 -$DTDIR/ietf/bin/rfc-editor-index-updates - diff --git a/bin/hourly b/bin/hourly deleted file mode 100755 index 77310302ce..0000000000 --- a/bin/hourly +++ /dev/null @@ -1,101 +0,0 @@ -#!/bin/bash - -# Hourly datatracker jobs -# -# This script is expected to be triggered by cron from -# /etc/cron.d/datatracker -export LANG=en_US.UTF-8 -export PYTHONIOENCODING=utf-8 - -# Make sure we stop if something goes wrong: -program=${0##*/} -trap 'echo "$program($LINENO): Command failed with error code $? ([$$] $0 $*)"; exit 1' ERR - -DTDIR=/a/www/ietf-datatracker/web -cd $DTDIR/ - -# Set up the virtual environment -source $DTDIR/env/bin/activate - -logger -p user.info -t cron "Running $DTDIR/bin/hourly" - -# *** Enable when removed from /a/www/ietf-datatracker/scripts/Cron-runner: *** - -# # Update community lists. Remove once the community rewrite (will be around 6.20.0 ) -# $DTDIR/ietf/manage.py update_community_lists -# -# # Polling backup for iana and rfc-editory post APIs -$DTDIR/ietf/bin/iana-changes-updates -$DTDIR/ietf/bin/iana-protocols-updates -# $DTDIR/ietf/bin/rfc-editor-index-updates -# $DTDIR/ietf/bin/rfc-editor-queue-updates -# -# # Generate alias and virtual files for draft email aliases -# $DTDIR/ietf/bin/generate-draft-aliases && \ -# ( cd /a/postfix; /usr/sbin/postalias -o draft-aliases; ) && \ -# ( cd /a/postfix; /usr/sbin/postmap -o draft-virtual; ) -# -# # Generate alias and virtual files for group email aliases -# $DTDIR/ietf/bin/generate-wg-aliases && \ -# ( cd /a/postfix; /usr/sbin/postalias -o group-aliases; ) && \ -# ( cd /a/postfix; /usr/sbin/postmap -o group-virtual; ) -# -# Generate some static files -ID=/a/ietfdata/doc/draft/repository -DERIVED=/a/ietfdata/derived -DOWNLOAD=/a/www/www6s/download - -export TMPDIR=/a/tmp - -TMPFILE1=`mktemp` || exit 1 -TMPFILE2=`mktemp` || exit 1 -TMPFILE3=`mktemp` || exit 1 -TMPFILE4=`mktemp` || exit 1 -TMPFILE5=`mktemp` || exit 1 -TMPFILE6=`mktemp` || exit 1 -TMPFILE7=`mktemp` || exit 1 -TMPFILE8=`mktemp` || exit 1 -TMPFILE9=`mktemp` || exit 1 -TMPFILEA=`mktemp` || exit 1 -TMPFILEB=`mktemp` || exit 1 - -chmod a+r $TMPFILE1 $TMPFILE2 $TMPFILE3 $TMPFILE4 $TMPFILE5 $TMPFILE6 $TMPFILE7 $TMPFILE8 $TMPFILE9 $TMPFILEA $TMPFILEB - -python -m ietf.idindex.generate_all_id_txt >> $TMPFILE1 -python -m ietf.idindex.generate_id_index_txt >> $TMPFILE2 -python -m ietf.idindex.generate_id_abstracts_txt >> $TMPFILE3 -cp $TMPFILE1 $TMPFILE4 -cp $TMPFILE2 $TMPFILE5 -cp $TMPFILE3 $TMPFILE6 -cp $TMPFILE1 $TMPFILE8 -cp $TMPFILE2 $TMPFILE9 -cp $TMPFILE3 $TMPFILEA -python -m ietf.idindex.generate_all_id2_txt >> $TMPFILE7 -cp $TMPFILE7 $TMPFILEB - -mv $TMPFILE1 $ID/all_id.txt -mv $TMPFILE2 $ID/1id-index.txt -mv $TMPFILE3 $ID/1id-abstracts.txt -mv $TMPFILE4 $DOWNLOAD/id-all.txt -mv $TMPFILE5 $DOWNLOAD/id-index.txt -mv $TMPFILE6 $DOWNLOAD/id-abstract.txt -mv $TMPFILE7 $ID/all_id2.txt -mv $TMPFILE8 $DERIVED/all_id.txt -mv $TMPFILE9 $DERIVED/1id-index.txt -mv $TMPFILEA $DERIVED/1id-abstracts.txt -mv $TMPFILEB $DERIVED/all_id2.txt - -$DTDIR/ietf/manage.py generate_idnits2_rfc_status -$DTDIR/ietf/manage.py generate_idnits2_rfcs_obsoleted - -CHARTER=/a/www/ietf-ftp/charter -wget -q https://datatracker.ietf.org/wg/1wg-charters-by-acronym.txt -O $CHARTER/1wg-charters-by-acronym.txt -wget -q https://datatracker.ietf.org/wg/1wg-charters.txt -O $CHARTER/1wg-charters.txt - -# Regenerate the last week of bibxml-ids -$DTDIR/ietf/manage.py generate_draft_bibxml_files - -# Create and update group wikis -#$DTDIR/ietf/manage.py create_group_wikis - -# exit 0 diff --git a/bin/mkdiagram b/bin/mkdiagram deleted file mode 100755 index 4f015c0abe..0000000000 --- a/bin/mkdiagram +++ /dev/null @@ -1,50 +0,0 @@ -#!/bin/bash - -# assume we're in bin/, sibling to ietf/ - -cd ${0%/*}/../ietf || { echo "CD to ietf directory failed, bailing out"; exit; } - -trap 'echo "$program($LINENO): Command failed with error code $? ($0 $*)"; exit 1' ERR - -if [ "$*" ]; then apps="$@"; graph="${1%.*}"; else apps=$(ls */models.py | sed 's!/models.py!!'); graph="models"; fi - -newapps="doc group meeting message person name" -legacyapps="announcements idindex idrfc idtracker iesg ietfauth ipr liaisons mailinglists proceedings redirects submit wgcharter wginfo" - -proxy="$(grep ^class */proxy.py | tr '()' ' ' | awk '{printf $2 ","}')" -names="$(grep ^class name/models.py | tr '()' ' ' | awk '{printf $2 ","}')" -legacy="$(for app in $legacyapps; do grep ^class $app/models.py | tr '()' ' '; done | grep -v ' Meeting\\(' | awk '{printf $2 ","}')" -events="$(egrep '^class .+DocEvent' doc/models.py | tr '()' ' ' | awk '{printf $2 ","}')" - -echo -e "proxy: $proxy\n" -echo -e "names: $names\n" -echo -e "legacy:$legacy\n" -echo -e "events:$events\n" - -exclude="--exclude=$proxy,$names,$legacy" - -export PYTHONPATH="$PWD/.." - -echo "Validating..." -./manage.py validate - -export PYTHONPATH=`dirname $PWD` -module=${PWD##*/} -export DJANGO_SETTINGS_MODULE=$module.settings -export graph -export title - -echo "Generate model graph" -graph="models-with-names-and-events" -title="New IETF Database schema" -${0%/*}/../ietf/manage.py graph_models --exclude="$proxy,$legacy" --title "$title" $apps > $graph.dot && dot -Tpng $graph.dot > $graph.png - -echo "Generate new model without names" -graph="models-with-names" -title="New IETF Database schema, without name tables" -modelviz.py --exclude="$proxy,$legacy,$names" --title "$title" $apps > $graph.dot && dot -Tpng $graph.dot > $graph.png - -echo "Generate new model without names and subevents" -graph="models" -title="New IETF Database schema, without name tables and subevents" -modelviz.py --exclude="$proxy,$legacy,$names,$events" --title "$title" $apps > $graph.dot && dot -Tpng $graph.dot > $graph.png diff --git a/bin/mm_hourly b/bin/mm_hourly deleted file mode 100755 index 0d1da2e572..0000000000 --- a/bin/mm_hourly +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/bash - -# Hourly datatracker jobs, ***run as mailman*** -# -# This script is expected to be triggered by cron from -# $DTDIR/etc/cron.d/datatracker which should be symlinked from -# /etc/cron.d/ - -export LANG=en_US.UTF-8 -export PYTHONIOENCODING=utf-8 - -# Make sure we stop if something goes wrong: -program=${0##*/} -trap 'echo "$program($LINENO): Command failed with error code $? ([$$] $0 $*)"; exit 1' ERR - -DTDIR=/a/www/ietf-datatracker/web -cd $DTDIR/ - -# Set up the virtual environment -source $DTDIR/env/bin/activate - -logger -p user.info -t cron "Running $DTDIR/bin/mm_hourly" - -$DTDIR/ietf/manage.py import_mailman_listinfo diff --git a/bin/monthly b/bin/monthly deleted file mode 100755 index 1d36abc210..0000000000 --- a/bin/monthly +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash - -# Weekly datatracker jobs. -# -# This script is expected to be triggered by cron from -# /etc/cron.d/datatracker -export LANG=en_US.UTF-8 -export PYTHONIOENCODING=utf-8 - -DTDIR=/a/www/ietf-datatracker/web -cd $DTDIR/ - -# Set up the virtual environment -source $DTDIR/env/bin/activate - -logger -p user.info -t cron "Running $DTDIR/bin/monthly" - diff --git a/bin/release-coverage b/bin/release-coverage deleted file mode 100755 index 22177c17a6..0000000000 --- a/bin/release-coverage +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/bash -zcat release-coverage.json.gz | jq 'to_entries[] | [.value.time, .key, .value.code.coverage, .value.template.coverage, .value.url.coverage] ' 2>/dev/null | tr "\n][" " \n" | tr -d ' "Z' | tr ",T" " " | sort -n | cut -c 2- | sed -n '/2015-03-10/,$p' diff --git a/bin/update b/bin/update deleted file mode 100755 index bcb6e8b129..0000000000 --- a/bin/update +++ /dev/null @@ -1,229 +0,0 @@ -#!/bin/bash - -version="0.34" -program=$(basename $0) - -NEW="" # If there are more than $NEW % new lines, skip update -OLD="" # If there are more than $OLD % deleted lines, skip update -FILE="" -verbose="" -silent="" - -# ---------------------------------------------------------------------- -function usage() { -cat < -EOF -exit -} - - -# ---------------------------------------------------------------------- -function note() { - if [ -n "$verbose" ]; then - echo -e "$program: $*" - fi -} - -# ---------------------------------------------------------------------- -function warn() { - [ "$QUIET" ] || echo -e "$program: $*" -} - -# ---------------------------------------------------------------------- -function err() { - echo -e "$program: $*" > /dev/stderr -} - -# ----------------------------------------------------------------------------- -function leave() { - errcode=$1; shift - if [ "$errcode" -ge "2" ]; then warn "$*"; else note "$*"; fi - if [ -f "$tempfile" ]; then rm $tempfile; fi - if [ -f "$difffile" ]; then rm $difffile; fi - if [ "$errcode" = "1" -a "$RESULT" = "0" ]; then exit 0; else exit $errcode; fi -} - -# ---------------------------------------------------------------------- -# Set up error trap -trap 'leave 127 "$program($LINENO): Command failed with error code $? while processing '$origfile'."' ERR - -# exit with a message if a command fails -set -e - -# ---------------------------------------------------------------------- -# Get any options -# - -# Default values -PAT="\$path\$base.%Y-%m-%d_%H%M" -RESULT="0" -QUIET="" - -# Based on the sample code in /usr/share/doc/util-linux/examples/parse.bash.gz -if [ "$(uname)" = "Linux" ]; then - GETOPT_RESULT=$(getopt -o bc:ef:hn:o:p:qrvV --long backup,maxchg:,empty,file:,help,maxnew:,maxold:,prefix:,report,quiet,verbose,version -n "$program" -- "$@") -else - GETOPT_RESULT=$(getopt bc:ef:hn:o:p:qrvV "$@") -fi - -if [ $? != 0 ] ; then echo "Terminating..." >&2 ; exit 1 ; fi - -note "GETOPT_RESULT: $GETOPT_RESULT" -eval set -- "$GETOPT_RESULT" - -while true ; do - case "$1" in - -b|--backup) backup=1; shift ;; # Back up earlier versions by creating a backup file - -c|--maxchg) CHG="$2"; shift 2 ;; # Limit on percentage of changed lines - -e|--empty) empty=1; shift ;; # Permit the update to be empty (default: discard) - -f|--file) FILE="$2"; shift 2 ;; # Read input from FILE instead of standard input - -h|--help) usage; shift ;; # Show this text and exit - -n|--maxnew) NEW="$2"; shift 2 ;; # Limit on percentage of new (added) lines - -o|--maxold) OLD="$2"; shift 2 ;; # Limit on percentage of old (deleted) lines - -p|--pat*) PAT="$2"; shift 2 ;; # Backup name base ('$path$base.%Y%m%d_%H%M') - -q|--quiet) QUIET=1; shift;; # Be less verbose - -r|--result) RESULT=1; shift ;; # Return 1 if update not done - -v|--verbose) verbose=1; shift ;; # Be more verbose about what's happening - -V|--version) echo -e "$program\t$version"; exit;; # Show version and exit - --) shift ; break ;; - *) echo "$program: Internal error, inconsistent option specification." ; exit 1 ;; - esac -done - -if [ $CHG ]; then OLD=$CHG; NEW=$CHG; fi - -if [ $# -lt 1 ]; then echo -e "$program: Missing output filename\n"; usage; fi - -origfile=$1 -tempfile=$(mktemp) -difffile=$(mktemp) - -if [ -e "$origfile" ]; then - cp -p $origfile $tempfile # For ownership and permissions - cat $FILE > $tempfile - [ "$FILE" ] && touch -r $FILE $tempfile - # This won't work if we don't have sufficient privileges: - #chown --reference=$origfile $tempfile - #chmod --reference=$origfile $tempfile -else - cat $FILE > $origfile - [ "$FILE" ] && touch -r $FILE $tempfile - leave 0 "Created file '$origfile'" -fi - -origlen=$(wc -c < $origfile) -newlen=$(wc -c < $tempfile) - -if [ $origlen = 0 -a $newlen = 0 ]; then - rm $tempfile - leave 1 "New content is identical (and void) - not updating '$origfile'." -fi -if [ $newlen = 0 -a -z "$empty" ]; then - leave 1 "New content is void - not updating '$origfile'." -fi - -diff $origfile $tempfile > $difffile || [ $? -le 1 ] && true # suppress the '1' error code on differences -difflen=$(wc -l < $difffile) -if [ $difflen = 0 ]; then - leave 1 "New content is identical - not updating '$origfile'." -fi - -if [ "$OLD" -o "$NEW" ]; then - - if [ "$NEW" ]; then maxnew=$(( $origlen * $NEW / 100 )); fi - if [ "$OLD" ]; then maxdel=$(( $origlen * $OLD / 100 )); fi - - newcount=$(grep "^> " $difffile | wc -c) - outcount=$(grep "^< " $difffile | wc -c) - delcount=$(grep "^! " $difffile | wc -c) - delcount=$(( $outcount + $delcount )) - rm $difffile - - if [ "$OLD" ]; then - if [ "$delcount" -ge "$maxdel" ]; then - cp $tempfile $origfile.update - leave 2 "New content has too many removed lines ($delcount/$origlen)\n - not updating '$origfile'.\nNew content placed in '$origfile.update' instead" - fi - fi - if [ "$NEW" ]; then - if [ "$newcount" -ge "$maxnew" ]; then - cp $tempfile $origfile.update - leave 2 "New content has too many added lines ($newcount/$origlen)\n - not updating '$origfile'.\nNew content placed in '$origfile.update' instead" - fi - fi -fi - -if [ "$backup" ]; then - - path=${origfile%/*} - name=${origfile##*/} - base=${name%.*} - ext=${origfile##*.} - - if [ "$ext" = "$origfile" ]; then - ext="" - elif [ ! "${ext%/*}" = "$ext" ]; then - ext="" - else - ext=".$ext" - fi - - if [ "$path" = "$origfile" ]; then - path="" - else - path="$path/" - fi - - ver=1 - backfile=$(eval date +"$PAT") - backpath="${backfile%/*}" - if [ "$backpath" = "$backfile" ]; then - backpath="." - fi - if [ ! -d $backpath ]; then - if [ -e $backpath ]; then - leave 3 "The backup path '$backpath' exists but isn't a directory" - else - mkdir -p $backpath - fi - fi - while [ -e "$backfile,$ver$ext" ]; do - ver=$(( $ver+1 )) - done - note "Saving backup: $backfile,$ver$ext" - cp -p "$origfile" "$backfile,$ver$ext" - chmod -w "$backfile,$ver$ext" || true -fi - -if ! mv $tempfile $origfile; then cp -p $tempfile $origfile; fi -leave 0 "Updated file '$origfile'" diff --git a/bin/weekly b/bin/weekly deleted file mode 100755 index cca8403fd4..0000000000 --- a/bin/weekly +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/bash - -# Weekly datatracker jobs. -# -# This script is expected to be triggered by cron from -# /etc/cron.d/datatracker -export LANG=en_US.UTF-8 -export PYTHONIOENCODING=utf-8 - -DTDIR=/a/www/ietf-datatracker/web -cd $DTDIR/ - -# Set up the virtual environment -source $DTDIR/env/bin/activate - -logger -p user.info -t cron "Running $DTDIR/bin/weekly" - - -# Send out weekly summaries of apikey usage - -$DTDIR/ietf/manage.py send_apikey_usage_emails - -# Send notifications about coming expirations -$DTDIR/ietf/bin/notify-expirations - diff --git a/client/App.vue b/client/App.vue index 2a6c5e6e98..7750674296 100644 --- a/client/App.vue +++ b/client/App.vue @@ -30,17 +30,24 @@ const appContainer = ref(null) // Set user theme // -------------------------------------------------------------------- -const desiredTheme = window.localStorage?.getItem('theme') -if (desiredTheme === 'dark') { - siteStore.theme = 'dark' -} else if (desiredTheme === 'light') { - siteStore.theme = 'light' -} else if (window.matchMedia("(prefers-color-scheme: dark)").matches) { - siteStore.theme = 'dark' -} else { - siteStore.theme = 'light' +function updateTheme() { + const desiredTheme = window.localStorage?.getItem('theme') + if (desiredTheme === 'dark') { + siteStore.theme = 'dark' + } else if (desiredTheme === 'light') { + siteStore.theme = 'light' + } else if (window.matchMedia("(prefers-color-scheme: dark)").matches) { + siteStore.theme = 'dark' + } else { + siteStore.theme = 'light' + } } +updateTheme() + +// this change event fires for either light or dark changes +window.matchMedia('(prefers-color-scheme: dark)').addEventListener('change', updateTheme) + // -------------------------------------------------------------------- // Handle browser resize // -------------------------------------------------------------------- diff --git a/client/Embedded.vue b/client/Embedded.vue index a0f0d2831e..80b105dc15 100644 --- a/client/Embedded.vue +++ b/client/Embedded.vue @@ -1,12 +1,13 @@ + diff --git a/client/components/Status.vue b/client/components/Status.vue new file mode 100644 index 0000000000..4fded5bbe4 --- /dev/null +++ b/client/components/Status.vue @@ -0,0 +1,80 @@ + diff --git a/client/embedded.js b/client/embedded.js index f3b01f68f5..0509c0aecf 100644 --- a/client/embedded.js +++ b/client/embedded.js @@ -1,5 +1,12 @@ import { createApp } from 'vue' +import piniaPersist from 'pinia-plugin-persist' import Embedded from './Embedded.vue' +import { createPiniaSingleton } from './shared/create-pinia-singleton' + +// Initialize store (Pinia) + +const pinia = createPiniaSingleton() +pinia.use(piniaPersist) // Mount App @@ -9,5 +16,6 @@ for (const mnt of mountEls) { componentName: mnt.dataset.component, componentId: mnt.dataset.componentId }) + app.use(pinia) app.mount(mnt) } diff --git a/client/index.html b/client/index.html index 740c994329..75d6f77727 100644 --- a/client/index.html +++ b/client/index.html @@ -12,6 +12,7 @@ +
@@ -20,5 +21,6 @@
+ diff --git a/client/main.js b/client/main.js index 0dc5cf32e0..3fbad907b1 100644 --- a/client/main.js +++ b/client/main.js @@ -1,14 +1,14 @@ import { createApp } from 'vue' -import { createPinia } from 'pinia' import piniaPersist from 'pinia-plugin-persist' import App from './App.vue' import router from './router' +import { createPiniaSingleton } from './shared/create-pinia-singleton' const app = createApp(App, {}) // Initialize store (Pinia) -const pinia = createPinia() +const pinia = createPiniaSingleton() pinia.use(piniaPersist) app.use(pinia) diff --git a/client/shared/create-pinia-singleton.js b/client/shared/create-pinia-singleton.js new file mode 100644 index 0000000000..f0013245a1 --- /dev/null +++ b/client/shared/create-pinia-singleton.js @@ -0,0 +1,6 @@ +import { createPinia } from 'pinia' + +export function createPiniaSingleton(){ + window.pinia = window.pinia ?? createPinia() + return window.pinia +} diff --git a/client/shared/json-wrapper.js b/client/shared/json-wrapper.js new file mode 100644 index 0000000000..e080b5a479 --- /dev/null +++ b/client/shared/json-wrapper.js @@ -0,0 +1,20 @@ +export const JSONWrapper = { + parse(jsonString, defaultValue) { + if(typeof jsonString !== "string") { + return defaultValue + } + try { + return JSON.parse(jsonString); + } catch (e) { + console.error(e); + } + return defaultValue + }, + stringify(data) { + try { + return JSON.stringify(data); + } catch (e) { + console.error(e) + } + }, +} diff --git a/client/shared/local-storage-wrapper.js b/client/shared/local-storage-wrapper.js new file mode 100644 index 0000000000..88cd3dc589 --- /dev/null +++ b/client/shared/local-storage-wrapper.js @@ -0,0 +1,42 @@ + +/* + * DEVELOPER NOTE + * + * Some browsers can block storage (localStorage, sessionStorage) + * access for privacy reasons, and all browsers can have storage + * that's full, and then they throw exceptions. + * + * See https://michalzalecki.com/why-using-localStorage-directly-is-a-bad-idea/ + * + * Exceptions can even be thrown when testing if localStorage + * even exists. This can throw: + * + * if (window.localStorage) + * + * Also localStorage/sessionStorage can be enabled after DOMContentLoaded + * so we handle it gracefully. + * + * 1) we need to wrap all usage in try/catch + * 2) we need to defer actual usage of these until + * necessary, + * + */ + +export const localStorageWrapper = { + getItem: (key) => { + try { + return localStorage.getItem(key) + } catch (e) { + console.error(e); + } + return null; + }, + setItem: (key, value) => { + try { + return localStorage.setItem(key, value) + } catch (e) { + console.error(e); + } + return; + }, +} diff --git a/client/shared/status-common.js b/client/shared/status-common.js new file mode 100644 index 0000000000..6503bfbf63 --- /dev/null +++ b/client/shared/status-common.js @@ -0,0 +1,5 @@ +// Used in Playwright Status and components + +export const STATUS_STORAGE_KEY = "status-dismissed" + +export const generateStatusTestId = (id) => `status-${id}` diff --git a/client/shared/urls.json b/client/shared/urls.json index 285caa07d2..15410d68df 100644 --- a/client/shared/urls.json +++ b/client/shared/urls.json @@ -1,5 +1,6 @@ { "bofDefinition": "https://www.ietf.org/how/bofs/", + "hackathonWiki": "https://wiki.ietf.org/meeting/{meetingNumber}/hackathon", "meetingCalIcs": "/meeting/{meetingNumber}/agenda.ics", "meetingDetails": "/meeting/{meetingNumber}/session/{eventAcronym}/", "meetingMaterialsPdf": "/meeting/{meetingNumber}/agenda/{eventAcronym}-drafts.pdf", diff --git a/client/shared/xslugify.js b/client/shared/xslugify.js index daf0bdf2ba..e1ac556ddf 100644 --- a/client/shared/xslugify.js +++ b/client/shared/xslugify.js @@ -1,5 +1,5 @@ import slugify from 'slugify' export default (str) => { - return slugify(str.replace('/', '-'), { lower: true }) + return slugify(str.replaceAll('/', '-').replaceAll(/['&]/g, ''), { lower: true }) } diff --git a/debug.py b/debug.py index bf34367cce..4f0d64bae2 100644 --- a/debug.py +++ b/debug.py @@ -3,15 +3,7 @@ import sys import time as timeutils import inspect -from typing import Callable -try: - import syslog - logger = syslog.syslog # type: Callable -except ImportError: # import syslog will fail on Windows boxes - import logging - logging.basicConfig(filename='tracker.log',level=logging.INFO) - logger = logging.info try: from pprint import pformat @@ -55,7 +47,7 @@ def fix(s,n=64): if len(s) > n+3: s = s[:n]+"..." return s - def wrap(fn, *params,**kwargs): + def wrap(*params,**kwargs): call = wrap.callcount = wrap.callcount + 1 indent = ' ' * _report_indent[0] @@ -81,8 +73,8 @@ def wrap(fn, *params,**kwargs): return ret wrap.callcount = 0 if debug: - from decorator import decorator - return decorator(wrap, fn) + from functools import update_wrapper + return update_wrapper(wrap, fn) else: return fn @@ -119,7 +111,7 @@ def clock(s): def time(fn): """Decorator to print timing information about a function call. """ - def wrap(fn, *params,**kwargs): + def wrap(*params,**kwargs): indent = ' ' * _report_indent[0] fc = "%s.%s()" % (fn.__module__, fn.__name__,) @@ -132,8 +124,8 @@ def wrap(fn, *params,**kwargs): return ret wrap.callcount = 0 if debug: - from decorator import decorator - return decorator(wrap, fn) + from functools import update_wrapper + return update_wrapper(wrap, fn) else: return fn @@ -155,13 +147,6 @@ def showpos(name): indent = ' ' * (_report_indent[0]) sys.stderr.write("%s%s:%s: %s: '%s'\n" % (indent, fn, line, name, value)) -def log(name): - if debug: - frame = inspect.stack()[1][0] - value = eval(name, frame.f_globals, frame.f_locals) - indent = ' ' * (_report_indent[0]) - logger("%s%s: %s" % (indent, name, value)) - def pprint(name): if debug: frame = inspect.stack()[1][0] @@ -190,7 +175,7 @@ def type(name): value = eval(name, frame.f_globals, frame.f_locals) indent = ' ' * (_report_indent[0]) sys.stderr.write("%s%s: %s\n" % (indent, name, value)) - + def say(s): if debug: indent = ' ' * (_report_indent[0]) @@ -205,11 +190,11 @@ def wrapper(*args, **kwargs): prof.dump_stats(datafn) return retval if debug: - from decorator import decorator - return decorator(wrapper, fn) + from functools import update_wrapper + return update_wrapper(wrapper, fn) else: return fn - + def traceback(levels=None): if debug: indent = ' ' * (_report_indent[0]) diff --git a/dev/INSTALL b/dev/INSTALL deleted file mode 100644 index 15c7472972..0000000000 --- a/dev/INSTALL +++ /dev/null @@ -1,155 +0,0 @@ -============================================================================== - IETF Datatracker -============================================================================== - ------------------------------------------------------------------------------- - Installation Instructions ------------------------------------------------------------------------------- - -General Instructions for Deployment of a New Release -==================================================== - - 0. Prepare to hold different roles at different stages of the instructions below. - You will need to be root, wwwrun, and some user in group docker. - Consider using separate shells for the wwwrun and other roles. These instructions - are written assuming you will only use one shell. - - 1. Make a directory to hold the new release as wwwrun:: - sudo su - -s /bin/bash wwwrun - mkdir /a/www/ietf-datatracker/${releasenumber} - cd /a/www/ietf-datatracker/${releasenumber} - - 2. Fetch the release tarball from github - (see https://github.com/ietf-tools/datatracker/releases):: - - wget https://github.com/ietf-tools/datatracker/releases/download/${releasenumber}/release.tar.gz - tar xzvf release.tar.gz - - 3. Copy ietf/settings_local.py from previous release:: - - cp ../web/ietf/settings_local.py ietf/ - - 4. Setup a new virtual environment and install requirements:: - - python3.9 -mvenv env - source env/bin/activate - pip install -r requirements.txt - pip freeze > frozen-requirements.txt - - (The pip freeze command records the exact versions of the Python libraries that pip installed. - This is used by the celery docker container to ensure it uses the same library versions as - the datatracker service.) - - 5. Move static files into place for CDN (/a/www/www6s/lib/dt): - - ietf/manage.py collectstatic - - 6. Run system checks (which patches the just installed modules):: - - ietf/manage.py check - - 7. Switch to the docker directory and update images as a user in group docker: - - exit - cd /a/docker/datatracker - docker image tag ghcr.io/ietf-tools/datatracker-celery:latest datatracker-celery-fallback - docker image tag ghcr.io/ietf-tools/datatracker-mq:latest datatracker-mq-fallback - docker-compose pull - - 8. Stop and remove the async task containers: - Wait for this to finish cleanly. Usually this will only be a few seconds, but it may take up - to about 10 minutes for the 'down' command to complete if a long-running task is in progress. - - docker-compose down - - 9. Stop the datatracker - - sudo systemctl stop datatracker.socket datatracker.service - - 10. Return to the release directory and run migrations as wwwrun: - - sudo su - -s /bin/bash wwwrun - cd /a/www/ietf-datatracker/${releasenumber} - ietf/manage.py migrate - - Take note if any migrations were executed. - - 11. Back out one directory level, then re-point the 'web' symlink:: - - cd .. - rm ./web; ln -s ${releasenumber} web - - 12. Start the datatracker service (it is no longer necessary to restart apache) :: - - exit - sudo systemctl start datatracker.service datatracker.socket - - 13. Start async task worker and message broker: - - cd /a/docker/datatracker - bash startcommand - - 14. Verify operation: - - http://datatracker.ietf.org/ - - 15. If install failed and there were no migrations at step 9, revert web symlink and docker update and repeat the - restart in steps 11 and 12. To revert the docker update: - - cd /a/docker/datatracker - docker-compose down - docker image rm ghcr.io/ietf-tools/datatracker-celery:latest ghcr.io/ietf-tools/datatracker-mq:latest - docker image tag datatracker-celery-fallback ghcr.io/ietf-tools/datatracker-celery:latest - docker image tag datatracker-mq-fallback ghcr.io/ietf-tools/datatracker-mq:latest - cd - - - If there were migrations at step 10, they will need to be reversed before the restart at step 12. - If it's not obvious what to do to reverse the migrations, contact the dev team. - - -Patching a Production Release -============================= - -Sometimes it can prove necessary to patch an existing release. -The following process should be used: - - 1. Code and test the patch on an copy of the release with any - previously applied patches put in place. - - 2. Produce a patch file, named with date and subject:: - - $ git diff > 2013-03-25-ballot-calculation.patch - - 3. Move the patch file to the production server, and place it in - '/a/www/ietf-datatracker/patches/' - - 4. Make a recursive copy of the production code to a new directory, named with a patch number. - - /a/www/ietf-datatracker $ rsync -a web/ ${releasenumber}.p1/ - - 5. Apply the patch:: - - /a/www/ietf-datatracker $ cd ${releasenumber}.p1/ - /a/www/ietf-datatracker/${releasnumber}.p1 $ patch -p1 \ - < ../patches/2013-03-25-ballot-calculation.patch - - This must not produce any messages about failing to apply any chunks; - if it does, go back to 1. and figure out why. - - 6. Edit ``.../ietf/__init__.py`` in the new patched release to indicate the patch - version in the ``__patch__`` string. - - 7. Stop the async task container (this may take a few minutes if tasks are in progress): - - cd /a/docker/datatracker - docker-compose down - - 8. Change the 'web' symlink, reload etc. as described in - `General Instructions for Deployment of a New Release`_. - - 9. Start async task worker: - - cd /a/docker/datatracker - bash startcommand - - diff --git a/dev/build/Dockerfile b/dev/build/Dockerfile new file mode 100644 index 0000000000..e57fecd5f2 --- /dev/null +++ b/dev/build/Dockerfile @@ -0,0 +1,41 @@ +FROM ghcr.io/ietf-tools/datatracker-app-base:20260410T1557 +LABEL maintainer="IETF Tools Team " + +ENV DEBIAN_FRONTEND=noninteractive + +# uid 498 = wwwrun and gid 496 = www on ietfa +RUN groupadd -g 1000 datatracker && \ + useradd -c "Datatracker User" -u 1000 -g datatracker -m -s /bin/false datatracker + +RUN apt-get purge -y imagemagick imagemagick-6-common + +# Install libreoffice (needed via PPT2PDF_COMMAND) +RUN apt-get update && \ + apt-get -qy install libreoffice-nogui + +COPY . . +COPY ./dev/build/start.sh ./start.sh +COPY ./dev/build/datatracker-start.sh ./datatracker-start.sh +COPY ./dev/build/migration-start.sh ./migration-start.sh +COPY ./dev/build/celery-start.sh ./celery-start.sh +COPY ./dev/build/gunicorn.conf.py ./gunicorn.conf.py + +RUN pip3 --disable-pip-version-check --no-cache-dir install -r requirements.txt && \ + echo '# empty' > ietf/settings_local.py && \ + ietf/manage.py patch_libraries && \ + rm -f ietf/settings_local.py + +RUN chmod +x start.sh && \ + chmod +x datatracker-start.sh && \ + chmod +x migration-start.sh && \ + chmod +x celery-start.sh && \ + chmod +x docker/scripts/app-create-dirs.sh && \ + sh ./docker/scripts/app-create-dirs.sh + +RUN mkdir -p /a + +VOLUME [ "/a" ] + +EXPOSE 8000 + +CMD ["./start.sh"] diff --git a/dev/build/TARGET_BASE b/dev/build/TARGET_BASE new file mode 100644 index 0000000000..f430037c09 --- /dev/null +++ b/dev/build/TARGET_BASE @@ -0,0 +1 @@ +20260410T1557 diff --git a/dev/build/celery-start.sh b/dev/build/celery-start.sh new file mode 100644 index 0000000000..69dcd7bbda --- /dev/null +++ b/dev/build/celery-start.sh @@ -0,0 +1,52 @@ +#!/bin/bash -e +# +# Run a celery worker +# +echo "Running Datatracker checks..." +./ietf/manage.py check + +# Check whether the blobdb database exists - inspectdb will return a false +# status if not. +if ietf/manage.py inspectdb --database blobdb > /dev/null 2>&1; then + HAVE_BLOBDB="yes" +fi + +migrations_applied_for () { + local DATABASE=${1:-default} + ietf/manage.py migrate --check --database "$DATABASE" +} + +migrations_all_applied () { + if [[ "$HAVE_BLOBDB" == "yes" ]]; then + migrations_applied_for default && migrations_applied_for blobdb + else + migrations_applied_for default + fi +} + +if ! migrations_all_applied; then + echo "Unapplied migrations found, waiting to start..." + sleep 5 + while ! migrations_all_applied ; do + echo "... still waiting for migrations..." + sleep 5 + done +fi + +echo "Starting Celery..." + +cleanup () { + # Cleanly terminate the celery app by sending it a TERM, then waiting for it to exit. + if [[ -n "${celery_pid}" ]]; then + echo "Gracefully terminating celery worker. This may take a few minutes if tasks are in progress..." + kill -TERM "${celery_pid}" + wait "${celery_pid}" + fi +} + +trap 'trap "" TERM; cleanup' TERM + +# start celery in the background so we can trap the TERM signal +celery "$@" & +celery_pid=$! +wait "${celery_pid}" diff --git a/dev/build/collectstatics.sh b/dev/build/collectstatics.sh new file mode 100644 index 0000000000..44f1c608a9 --- /dev/null +++ b/dev/build/collectstatics.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +# Copy temp local settings +cp dev/build/settings_local_collectstatics.py ietf/settings_local.py + +# Install Python dependencies +pip --disable-pip-version-check --no-cache-dir install -r requirements.txt + +# Collect statics +ietf/manage.py collectstatic + +# Delete temp local settings +rm ietf/settings_local.py \ No newline at end of file diff --git a/dev/build/datatracker-start.sh b/dev/build/datatracker-start.sh new file mode 100644 index 0000000000..012a563412 --- /dev/null +++ b/dev/build/datatracker-start.sh @@ -0,0 +1,50 @@ +#!/bin/bash -e + +echo "Running Datatracker checks..." +./ietf/manage.py check + +# Check whether the blobdb database exists - inspectdb will return a false +# status if not. +if ietf/manage.py inspectdb --database blobdb > /dev/null 2>&1; then + HAVE_BLOBDB="yes" +fi + +migrations_applied_for () { + local DATABASE=${1:-default} + ietf/manage.py migrate --check --database "$DATABASE" +} + +migrations_all_applied () { + if [[ "$HAVE_BLOBDB" == "yes" ]]; then + migrations_applied_for default && migrations_applied_for blobdb + else + migrations_applied_for default + fi +} + +if ! migrations_all_applied; then + echo "Unapplied migrations found, waiting to start..." + sleep 5 + while ! migrations_all_applied ; do + echo "... still waiting for migrations..." + sleep 5 + done +fi + +echo "Starting Datatracker..." + +# trap TERM and shut down gunicorn +cleanup () { + if [[ -n "${gunicorn_pid}" ]]; then + echo "Terminating gunicorn..." + kill -TERM "${gunicorn_pid}" + wait "${gunicorn_pid}" + fi +} + +trap 'trap "" TERM; cleanup' TERM + +# start gunicorn in the background so we can trap the TERM signal +gunicorn -c /workspace/gunicorn.conf.py ${DATATRACKER_GUNICORN_EXTRA_ARGS} ietf.wsgi:application & +gunicorn_pid=$! +wait "${gunicorn_pid}" diff --git a/dev/deploy/exclude-patterns.txt b/dev/build/exclude-patterns.txt similarity index 100% rename from dev/deploy/exclude-patterns.txt rename to dev/build/exclude-patterns.txt diff --git a/dev/build/gunicorn.conf.py b/dev/build/gunicorn.conf.py new file mode 100644 index 0000000000..be8808ec48 --- /dev/null +++ b/dev/build/gunicorn.conf.py @@ -0,0 +1,181 @@ +# Copyright The IETF Trust 2024-2026, All Rights Reserved + +import os +import ietf +from opentelemetry import trace +from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter +from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchSpanProcessor +from opentelemetry.instrumentation.django import DjangoInstrumentor +from opentelemetry.instrumentation.psycopg2 import Psycopg2Instrumentor +from opentelemetry.instrumentation.pymemcache import PymemcacheInstrumentor +from opentelemetry.instrumentation.requests import RequestsInstrumentor + +# Bind all ipv4 interfaces and ipv6 loopback interface. Would prefer to bind all +# ipv6 as well, but something conflicts with [::]:8000. +bind = ["0.0.0.0:8000", "[::1]:8000"] + +# Disable control socket +control_socket_disable = True + +# Settings configurable via environment +workers = int(os.environ.get("DATATRACKER_GUNICORN_WORKERS", "9")) +max_requests = int(os.environ.get("DATATRACKER_GUNICORN_MAX_REQUESTS", "32768")) +timeout = int(os.environ.get("DATATRACKER_GUNICORN_TIMEOUT", "180")) +loglevel = os.environ.get("DATATRACKER_GUNICORN_LOG_LEVEL", "info") + +# Logging / stdout capture +capture_output = True +accesslog = "-" + +# Configure security scheme headers for forwarded requests. Cloudflare sets X-Forwarded-Proto +# for us. Don't trust any of the other similar headers. Only trust the header if it's coming +# from localhost, as all legitimate traffic will reach gunicorn via co-located nginx. +secure_scheme_headers = {"X-FORWARDED-PROTO": "https"} +forwarded_allow_ips = "127.0.0.1, ::1" # this is the default + +# Log as JSON on stdout (to distinguish from Django's logs on stderr) +# +# This is applied as an update to gunicorn's glogging.CONFIG_DEFAULTS. +logconfig_dict = { + "version": 1, + "disable_existing_loggers": False, + "root": {"level": "INFO", "handlers": ["console"]}, + "loggers": { + "gunicorn.error": { + "level": "INFO", + "handlers": ["console"], + "propagate": False, + "qualname": "gunicorn.error", + }, + "gunicorn.access": { + "level": "INFO", + "handlers": ["access_console"], + "propagate": False, + "qualname": "gunicorn.access", + }, + }, + "handlers": { + "console": { + "class": "logging.StreamHandler", + "formatter": "json", + "stream": "ext://sys.stdout", + }, + "access_console": { + "class": "logging.StreamHandler", + "formatter": "access_json", + "stream": "ext://sys.stdout", + }, + }, + "formatters": { + "json": { + "class": "ietf.utils.jsonlogger.DatatrackerJsonFormatter", + "style": "{", + "format": "{asctime}{levelname}{message}{name}{process}", + }, + "access_json": { + "class": "ietf.utils.jsonlogger.GunicornRequestJsonFormatter", + "style": "{", + "format": "{asctime}{levelname}{message}{name}{process}", + }, + }, +} + +# Track in-flight requests and emit a list of what was happeningwhen a worker is terminated. +# For the default sync worker, there will only be one request per PID, but allow for the +# possibility of multiple requests in case we switch to a different worker class. +# +# This dict is only visible within a single worker, but key by pid to guarantee no conflicts. +# +# Use a list rather than a set to allow for the possibility of overlapping identical requests. +in_flight_by_pid: dict[str, list[str]] = {} # pid -> list of in-flight requests + + +def _describe_request(req): + """Generate a consistent description of a request + + The return value is used identify in-flight requests, so it must not vary between the + start and end of handling a request. E.g., do not include a timestamp. + """ + client_ip = "-" + asn = "-" + cf_ray = "-" + for header, value in req.headers: + header = header.lower() + if header == "cf-connecting-ip": + client_ip = value + elif header == "x-ip-src-asnum": + asn = value + elif header == "cf-ray": + cf_ray = value + if req.query: + path = f"{req.path}?{req.query}" + else: + path = req.path + return f"{req.method} {path} (client_ip={client_ip}, asn={asn}, cf_ray={cf_ray})" + + +def pre_request(worker, req): + """Log the start of a request and add it to the in-flight list""" + request_description = _describe_request(req) + worker.log.info(f"gunicorn starting to process {request_description}") + in_flight = in_flight_by_pid.setdefault(worker.pid, []) + in_flight.append(request_description) + + +def worker_abort(worker): + """Emit an error log if any requests were in-flight""" + in_flight = in_flight_by_pid.get(worker.pid, []) + if len(in_flight) > 0: + worker.log.error( + f"Aborted worker {worker.pid} with in-flight requests: {', '.join(in_flight)}" + ) + + +def worker_int(worker): + """Emit an error log if any requests were in-flight""" + in_flight = in_flight_by_pid.get(worker.pid, []) + if len(in_flight) > 0: + worker.log.error( + f"Interrupted worker {worker.pid} with in-flight requests: {', '.join(in_flight)}" + ) + + +def post_request(worker, req, environ, resp): + """Remove request from in-flight list when we finish handling it""" + request_description = _describe_request(req) + in_flight = in_flight_by_pid.get(worker.pid, []) + if request_description in in_flight: + in_flight.remove(request_description) + +def post_fork(server, worker): + server.log.info("Worker spawned (pid: %s)", worker.pid) + + # Setting DATATRACKER_OPENTELEMETRY_ENABLE=all in the environment will enable all + # opentelemetry instrumentations. Individual instrumentations can be selected by + # using a space-separated list. See the code below for available instrumentations. + telemetry_env = os.environ.get("DATATRACKER_OPENTELEMETRY_ENABLE", "").strip() + if telemetry_env != "": + enabled_telemetry = [tok.strip().lower() for tok in telemetry_env.split()] + resource = Resource.create(attributes={ + "service.name": "datatracker", + "service.version": ietf.__version__, + "service.instance.id": worker.pid, + "service.namespace": "datatracker", + "deployment.environment.name": os.environ.get("DATATRACKER_SERVICE_ENV", "dev") + }) + trace.set_tracer_provider(TracerProvider(resource=resource)) + otlp_exporter = OTLPSpanExporter(endpoint="https://heimdall-otlp.ietf.org/v1/traces") + + trace.get_tracer_provider().add_span_processor(BatchSpanProcessor(otlp_exporter)) + + # Instrumentations + if "all" in enabled_telemetry or "django" in enabled_telemetry: + DjangoInstrumentor().instrument() + if "all" in enabled_telemetry or "psycopg2" in enabled_telemetry: + Psycopg2Instrumentor().instrument() + if "all" in enabled_telemetry or "pymemcache" in enabled_telemetry: + PymemcacheInstrumentor().instrument() + if "all" in enabled_telemetry or "requests" in enabled_telemetry: + RequestsInstrumentor().instrument() diff --git a/dev/build/migration-start.sh b/dev/build/migration-start.sh new file mode 100644 index 0000000000..578daf5cef --- /dev/null +++ b/dev/build/migration-start.sh @@ -0,0 +1,13 @@ +#!/bin/bash -e + +echo "Running Datatracker migrations..." +./ietf/manage.py migrate --settings=settings_local + +# Check whether the blobdb database exists - inspectdb will return a false +# status if not. +if ./ietf/manage.py inspectdb --database blobdb > /dev/null 2>&1; then + echo "Running Blobdb migrations ..." + ./ietf/manage.py migrate --settings=settings_local --database=blobdb +fi + +echo "Done!" diff --git a/dev/build/settings_local_collectstatics.py b/dev/build/settings_local_collectstatics.py new file mode 100644 index 0000000000..ccb4b33979 --- /dev/null +++ b/dev/build/settings_local_collectstatics.py @@ -0,0 +1,8 @@ +# Copyright The IETF Trust 2007-2019, All Rights Reserved +# -*- coding: utf-8 -*- + +from ietf import __version__ +from ietf.settings import * # pyflakes:ignore + +STATIC_URL = "https://static.ietf.org/dt/%s/"%__version__ +STATIC_ROOT = os.path.abspath(BASE_DIR + "/../static/") diff --git a/dev/build/start.sh b/dev/build/start.sh new file mode 100644 index 0000000000..3b03637068 --- /dev/null +++ b/dev/build/start.sh @@ -0,0 +1,29 @@ +#!/bin/bash +# +# Environment config: +# +# CONTAINER_ROLE - datatracker, celery, beat, migrations, or replicator (defaults to datatracker) +# +case "${CONTAINER_ROLE:-datatracker}" in + auth) + exec ./datatracker-start.sh + ;; + beat) + exec ./celery-start.sh --app=ietf beat + ;; + celery) + exec ./celery-start.sh --app=ietf worker + ;; + datatracker) + exec ./datatracker-start.sh + ;; + migrations) + exec ./migration-start.sh + ;; + replicator) + exec ./celery-start.sh --app=ietf worker --queues=blobdb --concurrency=1 + ;; + *) + echo "Unknown role '${CONTAINER_ROLE}'" + exit 255 +esac diff --git a/dev/celery/Dockerfile b/dev/celery/Dockerfile index 12eb15eb81..e69de29bb2 100644 --- a/dev/celery/Dockerfile +++ b/dev/celery/Dockerfile @@ -1,23 +0,0 @@ -# Dockerfile for celery worker -# -FROM ghcr.io/ietf-tools/datatracker-app-base:latest -LABEL maintainer="IETF Tools Team " - -ENV DEBIAN_FRONTEND=noninteractive - -RUN apt-get purge -y imagemagick imagemagick-6-common - -# Copy the startup file -COPY dev/celery/docker-init.sh /docker-init.sh -RUN sed -i 's/\r$//' /docker-init.sh && \ - chmod +x /docker-init.sh - -# Install current datatracker python dependencies -COPY requirements.txt /tmp/pip-tmp/ -RUN pip3 --disable-pip-version-check --no-cache-dir install -r /tmp/pip-tmp/requirements.txt -RUN rm -rf /tmp/pip-tmp - -# Add watchmedo utility for dev containers -RUN pip3 --disable-pip-version-check --no-cache-dir install watchdog[watchmedo] - -ENTRYPOINT [ "/docker-init.sh" ] diff --git a/dev/coverage-action/action.yml b/dev/coverage-action/action.yml index b8d732a534..60c8de2d92 100644 --- a/dev/coverage-action/action.yml +++ b/dev/coverage-action/action.yml @@ -35,7 +35,7 @@ outputs: changelog: description: Changelog with headers prepended and coverage stats + chart appended runs: - using: 'node16' + using: 'node20' main: 'index.js' branding: icon: layers diff --git a/dev/coverage-action/index.js b/dev/coverage-action/index.js index 57249bfdb1..5a1c690be3 100644 --- a/dev/coverage-action/index.js +++ b/dev/coverage-action/index.js @@ -5,20 +5,20 @@ const find = require('lodash/find') const round = require('lodash/round') const fs = require('fs/promises') const { DateTime } = require('luxon') -const isPlainObject = require('lodash/isPlainObject') +// const isPlainObject = require('lodash/isPlainObject') const dec = new TextDecoder() async function main () { const token = core.getInput('token') - const tokenCommon = core.getInput('tokenCommon') + // const tokenCommon = core.getInput('tokenCommon') const inputCovPath = core.getInput('coverageResultsPath') // 'data/coverage-raw.json' const outputCovPath = core.getInput('coverageResultsPath') // 'data/coverage.json' const outputHistPath = core.getInput('histCoveragePath') // 'data/historical-coverage.json' const relVersionRaw = core.getInput('version') // 'v7.47.0' const relVersion = relVersionRaw.indexOf('v') === 0 ? relVersionRaw.substring(1) : relVersionRaw const gh = github.getOctokit(token) - const ghCommon = github.getOctokit(tokenCommon) + // const ghCommon = github.getOctokit(tokenCommon) const owner = github.context.repo.owner // 'ietf-tools' const repo = github.context.repo.repo // 'datatracker' const sender = github.context.payload.sender.login // 'rjsparks' @@ -116,137 +116,137 @@ async function main () { } // -> Coverage Chart - if (chartsDirListing.some(c => c.name === `${newRelease.id}.svg`)) { - console.info(`Chart SVG already exists for ${newRelease.name}, skipping...`) - } else { - console.info(`Generating chart SVG for ${newRelease.name}...`) + // if (chartsDirListing.some(c => c.name === `${newRelease.id}.svg`)) { + // console.info(`Chart SVG already exists for ${newRelease.name}, skipping...`) + // } else { + // console.info(`Generating chart SVG for ${newRelease.name}...`) - const { ChartJSNodeCanvas } = require('chartjs-node-canvas') - const chartJSNodeCanvas = new ChartJSNodeCanvas({ type: 'svg', width: 850, height: 300, backgroundColour: '#FFFFFF' }) + // const { ChartJSNodeCanvas } = require('chartjs-node-canvas') + // const chartJSNodeCanvas = new ChartJSNodeCanvas({ type: 'svg', width: 850, height: 300, backgroundColour: '#FFFFFF' }) - // -> Reorder versions - const versions = [] - for (const [key, value] of Object.entries(covData)) { - if (isPlainObject(value)) { - const vRel = find(releases, r => r.tag_name === key || r.tag_name === `v${key}`) - if (!vRel) { - continue - } - versions.push({ - tag: key, - time: vRel.created_at, - stats: { - code: round(value.code * 100, 2), - template: round(value.template * 100, 2), - url: round(value.url * 100, 2) - } - }) - } - } - const roVersions = orderBy(versions, ['time', 'tag'], ['asc', 'asc']) + // // -> Reorder versions + // const versions = [] + // for (const [key, value] of Object.entries(covData)) { + // if (isPlainObject(value)) { + // const vRel = find(releases, r => r.tag_name === key || r.tag_name === `v${key}`) + // if (!vRel) { + // continue + // } + // versions.push({ + // tag: key, + // time: vRel.created_at, + // stats: { + // code: round(value.code * 100, 2), + // template: round(value.template * 100, 2), + // url: round(value.url * 100, 2) + // } + // }) + // } + // } + // const roVersions = orderBy(versions, ['time', 'tag'], ['asc', 'asc']) - // -> Fill axis + data points - const labels = [] - const datasetCode = [] - const datasetTemplate = [] - const datasetUrl = [] + // // -> Fill axis + data points + // const labels = [] + // const datasetCode = [] + // const datasetTemplate = [] + // const datasetUrl = [] - for (const ver of roVersions) { - labels.push(ver.tag) - datasetCode.push(ver.stats.code) - datasetTemplate.push(ver.stats.template) - datasetUrl.push(ver.stats.url) - } + // for (const ver of roVersions) { + // labels.push(ver.tag) + // datasetCode.push(ver.stats.code) + // datasetTemplate.push(ver.stats.template) + // datasetUrl.push(ver.stats.url) + // } - // -> Generate chart SVG - const outputStream = chartJSNodeCanvas.renderToBufferSync({ - type: 'line', - options: { - borderColor: '#CCC', - layout: { - padding: 20 - }, - plugins: { - legend: { - position: 'bottom', - labels: { - font: { - size: 11 - } - } - } - }, - scales: { - x: { - ticks: { - font: { - size: 10 - } - } - }, - y: { - ticks: { - callback: (value) => { - return `${value}%` - }, - font: { - size: 10 - } - } - } - } - }, - data: { - labels, - datasets: [ - { - label: 'Code', - data: datasetCode, - borderWidth: 2, - borderColor: '#E53935', - backgroundColor: '#C6282833', - fill: false, - cubicInterpolationMode: 'monotone', - tension: 0.4, - pointRadius: 0 - }, - { - label: 'Templates', - data: datasetTemplate, - borderWidth: 2, - borderColor: '#039BE5', - backgroundColor: '#0277BD33', - fill: false, - cubicInterpolationMode: 'monotone', - tension: 0.4, - pointRadius: 0 - }, - { - label: 'URLs', - data: datasetUrl, - borderWidth: 2, - borderColor: '#7CB342', - backgroundColor: '#558B2F33', - fill: false, - cubicInterpolationMode: 'monotone', - tension: 0.4, - pointRadius: 0 - } - ] - } - }, 'image/svg+xml') - const svg = Buffer.from(outputStream).toString('base64') + // // -> Generate chart SVG + // const outputStream = chartJSNodeCanvas.renderToBufferSync({ + // type: 'line', + // options: { + // borderColor: '#CCC', + // layout: { + // padding: 20 + // }, + // plugins: { + // legend: { + // position: 'bottom', + // labels: { + // font: { + // size: 11 + // } + // } + // } + // }, + // scales: { + // x: { + // ticks: { + // font: { + // size: 10 + // } + // } + // }, + // y: { + // ticks: { + // callback: (value) => { + // return `${value}%` + // }, + // font: { + // size: 10 + // } + // } + // } + // } + // }, + // data: { + // labels, + // datasets: [ + // { + // label: 'Code', + // data: datasetCode, + // borderWidth: 2, + // borderColor: '#E53935', + // backgroundColor: '#C6282833', + // fill: false, + // cubicInterpolationMode: 'monotone', + // tension: 0.4, + // pointRadius: 0 + // }, + // { + // label: 'Templates', + // data: datasetTemplate, + // borderWidth: 2, + // borderColor: '#039BE5', + // backgroundColor: '#0277BD33', + // fill: false, + // cubicInterpolationMode: 'monotone', + // tension: 0.4, + // pointRadius: 0 + // }, + // { + // label: 'URLs', + // data: datasetUrl, + // borderWidth: 2, + // borderColor: '#7CB342', + // backgroundColor: '#558B2F33', + // fill: false, + // cubicInterpolationMode: 'monotone', + // tension: 0.4, + // pointRadius: 0 + // } + // ] + // } + // }, 'image/svg+xml') + // const svg = Buffer.from(outputStream).toString('base64') - // -> Upload to common repo - console.info(`Uploading chart SVG for ${newRelease.name}...`) - await ghCommon.rest.repos.createOrUpdateFileContents({ - owner, - repo: repoCommon, - path: `assets/graphs/datatracker/${newRelease.id}.svg`, - message: `chore: update datatracker release chart for release ${newRelease.name}`, - content: svg - }) - } + // // -> Upload to common repo + // console.info(`Uploading chart SVG for ${newRelease.name}...`) + // await ghCommon.rest.repos.createOrUpdateFileContents({ + // owner, + // repo: repoCommon, + // path: `assets/graphs/datatracker/${newRelease.id}.svg`, + // message: `chore: update datatracker release chart for release ${newRelease.name}`, + // content: svg + // }) + // } // -> Add to changelog body let formattedBody = '' @@ -265,7 +265,7 @@ async function main () { formattedBody += `![](https://img.shields.io/badge/Code-${covInfo.code}%25-${getCoverageColor(covInfo.code)}?style=flat-square)` formattedBody += `![](https://img.shields.io/badge/Templates-${covInfo.template}%25-${getCoverageColor(covInfo.template)}?style=flat-square)` formattedBody += `![](https://img.shields.io/badge/URLs-${covInfo.url}%25-${getCoverageColor(covInfo.url)}?style=flat-square)\n\n` - formattedBody += `![chart](https://raw.githubusercontent.com/${owner}/${repoCommon}/main/assets/graphs/datatracker/${newRelease.id}.svg)` + // formattedBody += `![chart](https://raw.githubusercontent.com/${owner}/${repoCommon}/main/assets/graphs/datatracker/${newRelease.id}.svg)` core.setOutput('changelog', formattedBody) } diff --git a/dev/coverage-action/package-lock.json b/dev/coverage-action/package-lock.json index 864cfdbed8..09570ee0e4 100644 --- a/dev/coverage-action/package-lock.json +++ b/dev/coverage-action/package-lock.json @@ -9,49 +9,42 @@ "version": "1.0.0", "license": "BSD-3-Clause", "dependencies": { - "@actions/core": "1.10.1", - "@actions/github": "6.0.0", - "chart.js": "3.5.1", - "chartjs-node-canvas": "4.1.6", + "@actions/core": "1.11.1", + "@actions/github": "6.0.1", "lodash": "4.17.21", - "luxon": "3.4.3" - }, - "devDependencies": { - "eslint": "8.51.0", - "eslint-config-standard": "17.1.0", - "eslint-plugin-import": "2.28.1", - "eslint-plugin-node": "11.1.0", - "eslint-plugin-promise": "6.1.1", - "npm-check-updates": "16.14.6" + "luxon": "3.7.1" } }, - "node_modules/@aashutoshrathi/word-wrap": { - "version": "1.2.6", - "resolved": "https://registry.npmjs.org/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz", - "integrity": "sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA==", - "dev": true, - "engines": { - "node": ">=0.10.0" + "node_modules/@actions/core": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.11.1.tgz", + "integrity": "sha512-hXJCSrkwfA46Vd9Z3q4cpEpHB1rL5NG04+/rbqW9d3+CSvtB1tYe8UTpAlixa1vj0m/ULglfEK2UKxMGxCxv5A==", + "dependencies": { + "@actions/exec": "^1.1.1", + "@actions/http-client": "^2.0.1" } }, - "node_modules/@actions/core": { - "version": "1.10.1", - "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.1.tgz", - "integrity": "sha512-3lBR9EDAY+iYIpTnTIXmWcNbX3T2kCkAEQGIQx4NVQ0575nk2k3GRZDTPQG+vVtS2izSLmINlxXf0uLtnrTP+g==", + "node_modules/@actions/exec": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.1.1.tgz", + "integrity": "sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w==", "dependencies": { - "@actions/http-client": "^2.0.1", - "uuid": "^8.3.2" + "@actions/io": "^1.0.1" } }, "node_modules/@actions/github": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/@actions/github/-/github-6.0.0.tgz", - "integrity": "sha512-alScpSVnYmjNEXboZjarjukQEzgCRmjMv6Xj47fsdnqGS73bjJNDpiiXmp8jr0UZLdUB6d9jW63IcmddUP+l0g==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/@actions/github/-/github-6.0.1.tgz", + "integrity": "sha512-xbZVcaqD4XnQAe35qSQqskb3SqIAfRyLBrHMd/8TuL7hJSz2QtbDwnNM8zWx4zO5l2fnGtseNE3MbEvD7BxVMw==", + "license": "MIT", "dependencies": { "@actions/http-client": "^2.2.0", "@octokit/core": "^5.0.1", - "@octokit/plugin-paginate-rest": "^9.0.0", - "@octokit/plugin-rest-endpoint-methods": "^10.0.0" + "@octokit/plugin-paginate-rest": "^9.2.2", + "@octokit/plugin-rest-endpoint-methods": "^10.4.0", + "@octokit/request": "^8.4.1", + "@octokit/request-error": "^5.1.1", + "undici": "^5.28.5" } }, "node_modules/@actions/http-client": { @@ -63,71 +56,10 @@ "undici": "^5.25.4" } }, - "node_modules/@colors/colors": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz", - "integrity": "sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==", - "dev": true, - "optional": true, - "engines": { - "node": ">=0.1.90" - } - }, - "node_modules/@eslint-community/eslint-utils": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.2.0.tgz", - "integrity": "sha512-gB8T4H4DEfX2IV9zGDJPOBgP1e/DbfCPDTtEqUMckpvzS1OYtva8JdFYBqMwYk7xAQ429WGF/UPqn8uQ//h2vQ==", - "dev": true, - "dependencies": { - "eslint-visitor-keys": "^3.3.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "peerDependencies": { - "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" - } - }, - "node_modules/@eslint-community/regexpp": { - "version": "4.8.0", - "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.8.0.tgz", - "integrity": "sha512-JylOEEzDiOryeUnFbQz+oViCXS0KsvR1mvHkoMiu5+UiBvy+RYX7tzlIIIEstF/gVa2tj9AQXk3dgnxv6KxhFg==", - "dev": true, - "engines": { - "node": "^12.0.0 || ^14.0.0 || >=16.0.0" - } - }, - "node_modules/@eslint/eslintrc": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.2.tgz", - "integrity": "sha512-+wvgpDsrB1YqAMdEUCcnTlpfVBH7Vqn6A/NT3D8WVXFIaKMlErPIZT3oCIAVCOtarRpMtelZLqJeU3t7WY6X6g==", - "dev": true, - "dependencies": { - "ajv": "^6.12.4", - "debug": "^4.3.2", - "espree": "^9.6.0", - "globals": "^13.19.0", - "ignore": "^5.2.0", - "import-fresh": "^3.2.1", - "js-yaml": "^4.1.0", - "minimatch": "^3.1.2", - "strip-json-comments": "^3.1.1" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/@eslint/js": { - "version": "8.51.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.51.0.tgz", - "integrity": "sha512-HxjQ8Qn+4SI3/AFv6sOrDB+g6PpUTDwSJiQqOrnneEk8L71161srI9gjzzZvYVbzHiVg/BvcH95+cK/zfIt4pg==", - "dev": true, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - } + "node_modules/@actions/io": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.3.tgz", + "integrity": "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q==" }, "node_modules/@fastify/busboy": { "version": "2.0.0", @@ -137,316 +69,26 @@ "node": ">=14" } }, - "node_modules/@humanwhocodes/config-array": { - "version": "0.11.11", - "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.11.tgz", - "integrity": "sha512-N2brEuAadi0CcdeMXUkhbZB84eskAc8MEX1By6qEchoVywSgXPIjou4rYsl0V3Hj0ZnuGycGCjdNgockbzeWNA==", - "dev": true, - "dependencies": { - "@humanwhocodes/object-schema": "^1.2.1", - "debug": "^4.1.1", - "minimatch": "^3.0.5" - }, - "engines": { - "node": ">=10.10.0" - } - }, - "node_modules/@humanwhocodes/module-importer": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", - "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", - "dev": true, - "engines": { - "node": ">=12.22" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/nzakas" - } - }, - "node_modules/@humanwhocodes/object-schema": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz", - "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==", - "dev": true - }, - "node_modules/@isaacs/cliui": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", - "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", - "dev": true, - "dependencies": { - "string-width": "^5.1.2", - "string-width-cjs": "npm:string-width@^4.2.0", - "strip-ansi": "^7.0.1", - "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", - "wrap-ansi": "^8.1.0", - "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/@isaacs/cliui/node_modules/ansi-regex": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", - "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", - "dev": true, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-regex?sponsor=1" - } - }, - "node_modules/@isaacs/cliui/node_modules/emoji-regex": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", - "dev": true - }, - "node_modules/@isaacs/cliui/node_modules/string-width": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", - "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", - "dev": true, - "dependencies": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/@isaacs/cliui/node_modules/strip-ansi": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.0.1.tgz", - "integrity": "sha512-cXNxvT8dFNRVfhVME3JAe98mkXDYN2O1l7jmcwMnOslDeESg1rF/OZMtK0nRAhiari1unG5cD4jG3rapUAkLbw==", - "dev": true, - "dependencies": { - "ansi-regex": "^6.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/strip-ansi?sponsor=1" - } - }, - "node_modules/@mapbox/node-pre-gyp": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.8.tgz", - "integrity": "sha512-CMGKi28CF+qlbXh26hDe6NxCd7amqeAzEqnS6IHeO6LoaKyM/n+Xw3HT1COdq8cuioOdlKdqn/hCmqPUOMOywg==", - "dependencies": { - "detect-libc": "^1.0.3", - "https-proxy-agent": "^5.0.0", - "make-dir": "^3.1.0", - "node-fetch": "^2.6.5", - "nopt": "^5.0.0", - "npmlog": "^5.0.1", - "rimraf": "^3.0.2", - "semver": "^7.3.5", - "tar": "^6.1.11" - }, - "bin": { - "node-pre-gyp": "bin/node-pre-gyp" - } - }, - "node_modules/@nodelib/fs.scandir": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", - "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", - "dev": true, - "dependencies": { - "@nodelib/fs.stat": "2.0.5", - "run-parallel": "^1.1.9" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.stat": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", - "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", - "dev": true, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.walk": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", - "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", - "dev": true, - "dependencies": { - "@nodelib/fs.scandir": "2.1.5", - "fastq": "^1.6.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@npmcli/fs": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-3.1.0.tgz", - "integrity": "sha512-7kZUAaLscfgbwBQRbvdMYaZOWyMEcPTH/tJjnyAWJ/dvvs9Ef+CERx/qJb9GExJpl1qipaDGn7KqHnFGGixd0w==", - "dev": true, - "dependencies": { - "semver": "^7.3.5" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/@npmcli/git": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-4.1.0.tgz", - "integrity": "sha512-9hwoB3gStVfa0N31ymBmrX+GuDGdVA/QWShZVqE0HK2Af+7QGGrCTbZia/SW0ImUTjTne7SP91qxDmtXvDHRPQ==", - "dev": true, - "dependencies": { - "@npmcli/promise-spawn": "^6.0.0", - "lru-cache": "^7.4.4", - "npm-pick-manifest": "^8.0.0", - "proc-log": "^3.0.0", - "promise-inflight": "^1.0.1", - "promise-retry": "^2.0.1", - "semver": "^7.3.5", - "which": "^3.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/@npmcli/git/node_modules/lru-cache": { - "version": "7.18.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", - "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", - "dev": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/@npmcli/git/node_modules/which": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/which/-/which-3.0.1.tgz", - "integrity": "sha512-XA1b62dzQzLfaEOSQFTCOd5KFf/1VSzZo7/7TUjnya6u0vGGKzU96UQBZTAThCb2j4/xjBAyii1OhRLJEivHvg==", - "dev": true, - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/which.js" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/@npmcli/installed-package-contents": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/@npmcli/installed-package-contents/-/installed-package-contents-2.0.2.tgz", - "integrity": "sha512-xACzLPhnfD51GKvTOOuNX2/V4G4mz9/1I2MfDoye9kBM3RYe5g2YbscsaGoTlaWqkxeiapBWyseULVKpSVHtKQ==", - "dev": true, - "dependencies": { - "npm-bundled": "^3.0.0", - "npm-normalize-package-bin": "^3.0.0" - }, - "bin": { - "installed-package-contents": "lib/index.js" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/@npmcli/node-gyp": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@npmcli/node-gyp/-/node-gyp-3.0.0.tgz", - "integrity": "sha512-gp8pRXC2oOxu0DUE1/M3bYtb1b3/DbJ5aM113+XJBgfXdussRAsX0YOrOhdd8WvnAR6auDBvJomGAkLKA5ydxA==", - "dev": true, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/@npmcli/promise-spawn": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-6.0.2.tgz", - "integrity": "sha512-gGq0NJkIGSwdbUt4yhdF8ZrmkGKVz9vAdVzpOfnom+V8PLSmSOVhZwbNvZZS1EYcJN5hzzKBxmmVVAInM6HQLg==", - "dev": true, - "dependencies": { - "which": "^3.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/@npmcli/promise-spawn/node_modules/which": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/which/-/which-3.0.1.tgz", - "integrity": "sha512-XA1b62dzQzLfaEOSQFTCOd5KFf/1VSzZo7/7TUjnya6u0vGGKzU96UQBZTAThCb2j4/xjBAyii1OhRLJEivHvg==", - "dev": true, - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/which.js" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/@npmcli/run-script": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-6.0.2.tgz", - "integrity": "sha512-NCcr1uQo1k5U+SYlnIrbAh3cxy+OQT1VtqiAbxdymSlptbzBb62AjH2xXgjNCoP073hoa1CfCAcwoZ8k96C4nA==", - "dev": true, - "dependencies": { - "@npmcli/node-gyp": "^3.0.0", - "@npmcli/promise-spawn": "^6.0.0", - "node-gyp": "^9.0.0", - "read-package-json-fast": "^3.0.0", - "which": "^3.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/@npmcli/run-script/node_modules/which": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/which/-/which-3.0.1.tgz", - "integrity": "sha512-XA1b62dzQzLfaEOSQFTCOd5KFf/1VSzZo7/7TUjnya6u0vGGKzU96UQBZTAThCb2j4/xjBAyii1OhRLJEivHvg==", - "dev": true, - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/which.js" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, "node_modules/@octokit/auth-token": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-4.0.0.tgz", "integrity": "sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA==", + "license": "MIT", "engines": { "node": ">= 18" } }, "node_modules/@octokit/core": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/@octokit/core/-/core-5.0.1.tgz", - "integrity": "sha512-lyeeeZyESFo+ffI801SaBKmCfsvarO+dgV8/0gD8u1d87clbEdWsP5yC+dSj3zLhb2eIf5SJrn6vDz9AheETHw==", + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/@octokit/core/-/core-5.2.1.tgz", + "integrity": "sha512-dKYCMuPO1bmrpuogcjQ8z7ICCH3FP6WmxpwC03yjzGfZhj9fTJg6+bS1+UAplekbN2C+M61UNllGOOoAfGCrdQ==", + "license": "MIT", "dependencies": { "@octokit/auth-token": "^4.0.0", - "@octokit/graphql": "^7.0.0", - "@octokit/request": "^8.0.2", - "@octokit/request-error": "^5.0.0", - "@octokit/types": "^12.0.0", + "@octokit/graphql": "^7.1.0", + "@octokit/request": "^8.4.1", + "@octokit/request-error": "^5.1.1", + "@octokit/types": "^13.0.0", "before-after-hook": "^2.2.0", "universal-user-agent": "^6.0.0" }, @@ -455,12 +97,12 @@ } }, "node_modules/@octokit/endpoint": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-9.0.1.tgz", - "integrity": "sha512-hRlOKAovtINHQPYHZlfyFwaM8OyetxeoC81lAkBy34uLb8exrZB50SQdeW3EROqiY9G9yxQTpp5OHTV54QD+vA==", + "version": "9.0.6", + "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-9.0.6.tgz", + "integrity": "sha512-H1fNTMA57HbkFESSt3Y9+FBICv+0jFceJFPWDePYlR/iMGrwM5ph+Dd4XRQs+8X+PUFURLQgX9ChPfhJ/1uNQw==", + "license": "MIT", "dependencies": { - "@octokit/types": "^12.0.0", - "is-plain-object": "^5.0.0", + "@octokit/types": "^13.1.0", "universal-user-agent": "^6.0.0" }, "engines": { @@ -468,12 +110,13 @@ } }, "node_modules/@octokit/graphql": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-7.0.2.tgz", - "integrity": "sha512-OJ2iGMtj5Tg3s6RaXH22cJcxXRi7Y3EBqbHTBRq+PQAqfaS8f/236fUrWhfSn8P4jovyzqucxme7/vWSSZBX2Q==", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-7.1.1.tgz", + "integrity": "sha512-3mkDltSfcDUoa176nlGoA32RGjeWjl3K7F/BwHwRMJUW/IteSa4bnSV8p2ThNkcIcZU2umkZWxwETSSCJf2Q7g==", + "license": "MIT", "dependencies": { - "@octokit/request": "^8.0.1", - "@octokit/types": "^12.0.0", + "@octokit/request": "^8.4.1", + "@octokit/types": "^13.0.0", "universal-user-agent": "^6.0.0" }, "engines": { @@ -481,47 +124,80 @@ } }, "node_modules/@octokit/openapi-types": { - "version": "19.0.0", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-19.0.0.tgz", - "integrity": "sha512-PclQ6JGMTE9iUStpzMkwLCISFn/wDeRjkZFIKALpvJQNBGwDoYYi2fFvuHwssoQ1rXI5mfh6jgTgWuddeUzfWw==" + "version": "24.2.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz", + "integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==", + "license": "MIT" }, "node_modules/@octokit/plugin-paginate-rest": { - "version": "9.0.0", - "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-9.0.0.tgz", - "integrity": "sha512-oIJzCpttmBTlEhBmRvb+b9rlnGpmFgDtZ0bB6nq39qIod6A5DP+7RkVLMOixIgRCYSHDTeayWqmiJ2SZ6xgfdw==", + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-9.2.2.tgz", + "integrity": "sha512-u3KYkGF7GcZnSD/3UP0S7K5XUFT2FkOQdcfXZGZQPGv3lm4F2Xbf71lvjldr8c1H3nNbF+33cLEkWYbokGWqiQ==", + "license": "MIT", "dependencies": { - "@octokit/types": "^12.0.0" + "@octokit/types": "^12.6.0" }, "engines": { "node": ">= 18" }, "peerDependencies": { - "@octokit/core": ">=5" + "@octokit/core": "5" + } + }, + "node_modules/@octokit/plugin-paginate-rest/node_modules/@octokit/openapi-types": { + "version": "20.0.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-20.0.0.tgz", + "integrity": "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA==", + "license": "MIT" + }, + "node_modules/@octokit/plugin-paginate-rest/node_modules/@octokit/types": { + "version": "12.6.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-12.6.0.tgz", + "integrity": "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw==", + "license": "MIT", + "dependencies": { + "@octokit/openapi-types": "^20.0.0" } }, "node_modules/@octokit/plugin-rest-endpoint-methods": { - "version": "10.0.1", - "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-10.0.1.tgz", - "integrity": "sha512-fgS6HPkPvJiz8CCliewLyym9qAx0RZ/LKh3sATaPfM41y/O2wQ4Z9MrdYeGPVh04wYmHFmWiGlKPC7jWVtZXQA==", + "version": "10.4.1", + "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-10.4.1.tgz", + "integrity": "sha512-xV1b+ceKV9KytQe3zCVqjg+8GTGfDYwaT1ATU5isiUyVtlVAO3HNdzpS4sr4GBx4hxQ46s7ITtZrAsxG22+rVg==", + "license": "MIT", "dependencies": { - "@octokit/types": "^12.0.0" + "@octokit/types": "^12.6.0" }, "engines": { "node": ">= 18" }, "peerDependencies": { - "@octokit/core": ">=5" + "@octokit/core": "5" } }, - "node_modules/@octokit/request": { - "version": "8.1.4", - "resolved": "https://registry.npmjs.org/@octokit/request/-/request-8.1.4.tgz", - "integrity": "sha512-M0aaFfpGPEKrg7XoA/gwgRvc9MSXHRO2Ioki1qrPDbl1e9YhjIwVoHE7HIKmv/m3idzldj//xBujcFNqGX6ENA==", + "node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/openapi-types": { + "version": "20.0.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-20.0.0.tgz", + "integrity": "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA==", + "license": "MIT" + }, + "node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types": { + "version": "12.6.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-12.6.0.tgz", + "integrity": "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw==", + "license": "MIT", "dependencies": { - "@octokit/endpoint": "^9.0.0", - "@octokit/request-error": "^5.0.0", - "@octokit/types": "^12.0.0", - "is-plain-object": "^5.0.0", + "@octokit/openapi-types": "^20.0.0" + } + }, + "node_modules/@octokit/request": { + "version": "8.4.1", + "resolved": "https://registry.npmjs.org/@octokit/request/-/request-8.4.1.tgz", + "integrity": "sha512-qnB2+SY3hkCmBxZsR/MPCybNmbJe4KAlfWErXq+rBKkQJlbjdJeS85VI9r8UqeLYLvnAenU8Q1okM/0MBsAGXw==", + "license": "MIT", + "dependencies": { + "@octokit/endpoint": "^9.0.6", + "@octokit/request-error": "^5.1.1", + "@octokit/types": "^13.1.0", "universal-user-agent": "^6.0.0" }, "engines": { @@ -529,11 +205,12 @@ } }, "node_modules/@octokit/request-error": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.0.1.tgz", - "integrity": "sha512-X7pnyTMV7MgtGmiXBwmO6M5kIPrntOXdyKZLigNfQWSEQzVxR4a4vo49vJjTWX70mPndj8KhfT4Dx+2Ng3vnBQ==", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.1.tgz", + "integrity": "sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g==", + "license": "MIT", "dependencies": { - "@octokit/types": "^12.0.0", + "@octokit/types": "^13.1.0", "deprecation": "^2.0.0", "once": "^1.4.0" }, @@ -542,9668 +219,276 @@ } }, "node_modules/@octokit/types": { - "version": "12.0.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-12.0.0.tgz", - "integrity": "sha512-EzD434aHTFifGudYAygnFlS1Tl6KhbTynEWELQXIbTY8Msvb5nEqTZIm7sbPEt4mQYLZwu3zPKVdeIrw0g7ovg==", + "version": "13.10.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz", + "integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==", + "license": "MIT", "dependencies": { - "@octokit/openapi-types": "^19.0.0" - } - }, - "node_modules/@pkgjs/parseargs": { - "version": "0.11.0", - "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", - "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", - "dev": true, - "optional": true, - "engines": { - "node": ">=14" + "@octokit/openapi-types": "^24.2.0" } }, - "node_modules/@pnpm/config.env-replace": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@pnpm/config.env-replace/-/config.env-replace-1.0.0.tgz", - "integrity": "sha512-ZVPVDi1E8oeXlYqkGRtX0CkzLTwE2zt62bjWaWKaAvI8NZqHzlMvGeSNDpW+JB3+aKanYb4UETJOF1/CxGPemA==", - "dev": true, - "engines": { - "node": ">=12.22.0" - } + "node_modules/before-after-hook": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz", + "integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==", + "license": "Apache-2.0" }, - "node_modules/@pnpm/network.ca-file": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@pnpm/network.ca-file/-/network.ca-file-1.0.2.tgz", - "integrity": "sha512-YcPQ8a0jwYU9bTdJDpXjMi7Brhkr1mXsXrUJvjqM2mQDgkRiz8jFaQGOdaLxgjtUfQgZhKy/O3cG/YwmgKaxLA==", - "dev": true, - "dependencies": { - "graceful-fs": "4.2.10" - }, - "engines": { - "node": ">=12.22.0" - } + "node_modules/deprecation": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", + "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==", + "license": "ISC" }, - "node_modules/@pnpm/network.ca-file/node_modules/graceful-fs": { - "version": "4.2.10", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", - "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==", - "dev": true + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" }, - "node_modules/@pnpm/npm-conf": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@pnpm/npm-conf/-/npm-conf-2.1.0.tgz", - "integrity": "sha512-Oe6ntvgsMTE3hDIqy6sajqHF+MnzJrOF06qC2QSiUEybLL7cp6tjoKUa32gpd9+KPVl4QyMs3E3nsXrx/Vdnlw==", - "dev": true, - "dependencies": { - "@pnpm/config.env-replace": "^1.0.0", - "@pnpm/network.ca-file": "^1.0.1", - "config-chain": "^1.1.11" - }, + "node_modules/luxon": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/luxon/-/luxon-3.7.1.tgz", + "integrity": "sha512-RkRWjA926cTvz5rAb1BqyWkKbbjzCGchDUIKMCUvNi17j6f6j8uHGDV82Aqcqtzd+icoYpELmG3ksgGiFNNcNg==", "engines": { "node": ">=12" } }, - "node_modules/@sigstore/bundle": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@sigstore/bundle/-/bundle-1.0.0.tgz", - "integrity": "sha512-yLvrWDOh6uMOUlFCTJIZEnwOT9Xte7NPXUqVexEKGSF5XtBAuSg5du0kn3dRR0p47a4ah10Y0mNt8+uyeQXrBQ==", - "dev": true, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "license": "ISC", "dependencies": { - "@sigstore/protobuf-specs": "^0.2.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "wrappy": "1" } }, - "node_modules/@sigstore/protobuf-specs": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.2.0.tgz", - "integrity": "sha512-8ZhZKAVfXjIspDWwm3D3Kvj0ddbJ0HqDZ/pOs5cx88HpT8mVsotFrg7H1UMnXOuDHz6Zykwxn4mxG3QLuN+RUg==", - "dev": true, + "node_modules/tunnel": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", + "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==", "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": ">=0.6.11 <=0.7.0 || >=0.7.3" } }, - "node_modules/@sigstore/tuf": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-1.0.3.tgz", - "integrity": "sha512-2bRovzs0nJZFlCN3rXirE4gwxCn97JNjMmwpecqlbgV9WcxX7WRuIrgzx/X7Ib7MYRbyUTpBYE0s2x6AmZXnlg==", - "dev": true, + "node_modules/undici": { + "version": "5.29.0", + "resolved": "https://registry.npmjs.org/undici/-/undici-5.29.0.tgz", + "integrity": "sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg==", + "license": "MIT", "dependencies": { - "@sigstore/protobuf-specs": "^0.2.0", - "tuf-js": "^1.1.7" + "@fastify/busboy": "^2.0.0" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/@sindresorhus/is": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-5.3.0.tgz", - "integrity": "sha512-CX6t4SYQ37lzxicAqsBtxA3OseeoVrh9cSJ5PFYam0GksYlupRfy1A+Q4aYD3zvcfECLc0zO2u+ZnR2UYKvCrw==", - "dev": true, - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sindresorhus/is?sponsor=1" + "node": ">=14.0" } }, - "node_modules/@szmarczak/http-timer": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-5.0.1.tgz", - "integrity": "sha512-+PmQX0PiAYPMeVYe237LJAYvOMYW1j2rH5YROyS3b4CTVJum34HfRvKvAzozHAQG0TnHNdUfY9nCeUyRAs//cw==", - "dev": true, - "dependencies": { - "defer-to-connect": "^2.0.1" - }, - "engines": { - "node": ">=14.16" - } + "node_modules/universal-user-agent": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz", + "integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==", + "license": "ISC" }, - "node_modules/@tootallnate/once": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", - "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==", - "dev": true, - "engines": { - "node": ">= 10" + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "license": "ISC" + } + }, + "dependencies": { + "@actions/core": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.11.1.tgz", + "integrity": "sha512-hXJCSrkwfA46Vd9Z3q4cpEpHB1rL5NG04+/rbqW9d3+CSvtB1tYe8UTpAlixa1vj0m/ULglfEK2UKxMGxCxv5A==", + "requires": { + "@actions/exec": "^1.1.1", + "@actions/http-client": "^2.0.1" } }, - "node_modules/@tufjs/canonical-json": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@tufjs/canonical-json/-/canonical-json-1.0.0.tgz", - "integrity": "sha512-QTnf++uxunWvG2z3UFNzAoQPHxnSXOwtaI3iJ+AohhV+5vONuArPjJE7aPXPVXfXJsqrVbZBu9b81AJoSd09IQ==", - "dev": true, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "@actions/exec": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.1.1.tgz", + "integrity": "sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w==", + "requires": { + "@actions/io": "^1.0.1" } }, - "node_modules/@tufjs/models": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@tufjs/models/-/models-1.0.4.tgz", - "integrity": "sha512-qaGV9ltJP0EO25YfFUPhxRVK0evXFIAGicsVXuRim4Ed9cjPxYhNnNJ49SFmbeLgtxpslIkX317IgpfcHPVj/A==", - "dev": true, - "dependencies": { - "@tufjs/canonical-json": "1.0.0", - "minimatch": "^9.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "@actions/github": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/@actions/github/-/github-6.0.1.tgz", + "integrity": "sha512-xbZVcaqD4XnQAe35qSQqskb3SqIAfRyLBrHMd/8TuL7hJSz2QtbDwnNM8zWx4zO5l2fnGtseNE3MbEvD7BxVMw==", + "requires": { + "@actions/http-client": "^2.2.0", + "@octokit/core": "^5.0.1", + "@octokit/plugin-paginate-rest": "^9.2.2", + "@octokit/plugin-rest-endpoint-methods": "^10.4.0", + "@octokit/request": "^8.4.1", + "@octokit/request-error": "^5.1.1", + "undici": "^5.28.5" } }, - "node_modules/@tufjs/models/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "dev": true, - "dependencies": { - "balanced-match": "^1.0.0" + "@actions/http-client": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.0.tgz", + "integrity": "sha512-q+epW0trjVUUHboliPb4UF9g2msf+w61b32tAkFEwL/IwP0DQWgbCMM0Hbe3e3WXSKz5VcUXbzJQgy8Hkra/Lg==", + "requires": { + "tunnel": "^0.0.6", + "undici": "^5.25.4" } }, - "node_modules/@tufjs/models/node_modules/minimatch": { - "version": "9.0.3", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", - "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", - "dev": true, - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } + "@actions/io": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.3.tgz", + "integrity": "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q==" }, - "node_modules/@types/http-cache-semantics": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.0.1.tgz", - "integrity": "sha512-SZs7ekbP8CN0txVG2xVRH6EgKmEm31BOxA07vkFaETzZz1xh+cbt8BcI0slpymvwhx5dlFnQG2rTlPVQn+iRPQ==", - "dev": true + "@fastify/busboy": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.0.0.tgz", + "integrity": "sha512-JUFJad5lv7jxj926GPgymrWQxxjPYuJNiNjNMzqT+HiuP6Vl3dk5xzG+8sTX96np0ZAluvaMzPsjhHZ5rNuNQQ==" }, - "node_modules/@types/json5": { - "version": "0.0.29", - "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz", - "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==", - "dev": true + "@octokit/auth-token": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-4.0.0.tgz", + "integrity": "sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA==" }, - "node_modules/abbrev": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", - "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==" + "@octokit/core": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/@octokit/core/-/core-5.2.1.tgz", + "integrity": "sha512-dKYCMuPO1bmrpuogcjQ8z7ICCH3FP6WmxpwC03yjzGfZhj9fTJg6+bS1+UAplekbN2C+M61UNllGOOoAfGCrdQ==", + "requires": { + "@octokit/auth-token": "^4.0.0", + "@octokit/graphql": "^7.1.0", + "@octokit/request": "^8.4.1", + "@octokit/request-error": "^5.1.1", + "@octokit/types": "^13.0.0", + "before-after-hook": "^2.2.0", + "universal-user-agent": "^6.0.0" + } }, - "node_modules/acorn": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.10.0.tgz", - "integrity": "sha512-F0SAmZ8iUtS//m8DmCTA0jlh6TDKkHQyK6xc6V4KDTyZKA9dnvX9/3sRTVQrWm79glUAZbnmmNcdYwUIHWVybw==", - "dev": true, - "bin": { - "acorn": "bin/acorn" - }, - "engines": { - "node": ">=0.4.0" + "@octokit/endpoint": { + "version": "9.0.6", + "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-9.0.6.tgz", + "integrity": "sha512-H1fNTMA57HbkFESSt3Y9+FBICv+0jFceJFPWDePYlR/iMGrwM5ph+Dd4XRQs+8X+PUFURLQgX9ChPfhJ/1uNQw==", + "requires": { + "@octokit/types": "^13.1.0", + "universal-user-agent": "^6.0.0" } }, - "node_modules/acorn-jsx": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", - "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", - "dev": true, - "peerDependencies": { - "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" - } - }, - "node_modules/agent-base": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", - "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", - "dependencies": { - "debug": "4" - }, - "engines": { - "node": ">= 6.0.0" - } - }, - "node_modules/agentkeepalive": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-4.3.0.tgz", - "integrity": "sha512-7Epl1Blf4Sy37j4v9f9FjICCh4+KAQOyXgHEwlyBiAQLbhKdq/i2QQU3amQalS/wPhdPzDXPL5DMR5bkn+YeWg==", - "dev": true, - "dependencies": { - "debug": "^4.1.0", - "depd": "^2.0.0", - "humanize-ms": "^1.2.1" - }, - "engines": { - "node": ">= 8.0.0" - } - }, - "node_modules/aggregate-error": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", - "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", - "dev": true, - "dependencies": { - "clean-stack": "^2.0.0", - "indent-string": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/ajv": { - "version": "6.12.6", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", - "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", - "dev": true, - "dependencies": { - "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, - "node_modules/ansi-align": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/ansi-align/-/ansi-align-3.0.1.tgz", - "integrity": "sha512-IOfwwBF5iczOjp/WeY4YxyjqAFMQoZufdQWDd19SEExbVLNXqvpzSJ/M7Za4/sCPmQ0+GRquoA7bGcINcxew6w==", - "dev": true, - "dependencies": { - "string-width": "^4.1.0" - } - }, - "node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "engines": { - "node": ">=8" - } - }, - "node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/aproba": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/aproba/-/aproba-2.0.0.tgz", - "integrity": "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==" - }, - "node_modules/are-we-there-yet": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz", - "integrity": "sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==", - "dependencies": { - "delegates": "^1.0.0", - "readable-stream": "^3.6.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/argparse": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", - "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", - "dev": true - }, - "node_modules/array-buffer-byte-length": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.0.tgz", - "integrity": "sha512-LPuwb2P+NrQw3XhxGc36+XSvuBPopovXYTR9Ew++Du9Yb/bx5AzBfrIsBoj0EZUifjQU+sHL21sseZ3jerWO/A==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "is-array-buffer": "^3.0.1" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/array-includes": { - "version": "3.1.6", - "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.6.tgz", - "integrity": "sha512-sgTbLvL6cNnw24FnbaDyjmvddQ2ML8arZsgaJhoABMoplz/4QRhtrYS+alr1BUM1Bwp6dhx8vVCBSLG+StwOFw==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", - "get-intrinsic": "^1.1.3", - "is-string": "^1.0.7" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/array-union": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", - "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/array.prototype.findlastindex": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.2.tgz", - "integrity": "sha512-tb5thFFlUcp7NdNF6/MpDk/1r/4awWG1FIz3YqDf+/zJSTezBb+/5WViH41obXULHVpDzoiCLpJ/ZO9YbJMsdw==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", - "es-shim-unscopables": "^1.0.0", - "get-intrinsic": "^1.1.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/array.prototype.flat": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.1.tgz", - "integrity": "sha512-roTU0KWIOmJ4DRLmwKd19Otg0/mT3qPNt0Qb3GWW8iObuZXxrjB/pzn0R3hqpRSWg4HCwqx+0vwOnWnvlOyeIA==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", - "es-shim-unscopables": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/array.prototype.flatmap": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.1.tgz", - "integrity": "sha512-8UGn9O1FDVvMNB0UlLv4voxRMze7+FpHyF5mSMRjWHUMlpoDViniy05870VlxhfgTnLbpuwTzvD76MTtWxB/mQ==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", - "es-shim-unscopables": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/available-typed-arrays": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz", - "integrity": "sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==", - "dev": true, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/balanced-match": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" - }, - "node_modules/before-after-hook": { - "version": "2.2.3", - "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz", - "integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==" - }, - "node_modules/boxen": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/boxen/-/boxen-7.0.2.tgz", - "integrity": "sha512-1Z4UJabXUP1/R9rLpoU3O2lEMnG3pPLAs/ZD2lF3t2q7qD5lM8rqbtnvtvm4N0wEyNlE+9yZVTVAGmd1V5jabg==", - "dev": true, - "dependencies": { - "ansi-align": "^3.0.1", - "camelcase": "^7.0.0", - "chalk": "^5.0.1", - "cli-boxes": "^3.0.0", - "string-width": "^5.1.2", - "type-fest": "^2.13.0", - "widest-line": "^4.0.1", - "wrap-ansi": "^8.0.1" - }, - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/boxen/node_modules/ansi-regex": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", - "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", - "dev": true, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-regex?sponsor=1" - } - }, - "node_modules/boxen/node_modules/chalk": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.2.0.tgz", - "integrity": "sha512-ree3Gqw/nazQAPuJJEy+avdl7QfZMcUvmHIKgEZkGL+xOBzRvup5Hxo6LHuMceSxOabuJLJm5Yp/92R9eMmMvA==", - "dev": true, - "engines": { - "node": "^12.17.0 || ^14.13 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, - "node_modules/boxen/node_modules/emoji-regex": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", - "dev": true - }, - "node_modules/boxen/node_modules/string-width": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", - "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", - "dev": true, - "dependencies": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/boxen/node_modules/strip-ansi": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.0.1.tgz", - "integrity": "sha512-cXNxvT8dFNRVfhVME3JAe98mkXDYN2O1l7jmcwMnOslDeESg1rF/OZMtK0nRAhiari1unG5cD4jG3rapUAkLbw==", - "dev": true, - "dependencies": { - "ansi-regex": "^6.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/strip-ansi?sponsor=1" - } - }, - "node_modules/boxen/node_modules/type-fest": { - "version": "2.19.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.19.0.tgz", - "integrity": "sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA==", - "dev": true, - "engines": { - "node": ">=12.20" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dev": true, - "dependencies": { - "fill-range": "^7.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/buffer-from": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", - "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", - "dev": true - }, - "node_modules/builtins": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/builtins/-/builtins-5.0.1.tgz", - "integrity": "sha512-qwVpFEHNfhYJIzNRBvd2C1kyo6jz3ZSMPyyuR47OPdiKWlbYnZNyDWuyR175qDnAJLiCo5fBBqPb3RiXgWlkOQ==", - "dev": true, - "dependencies": { - "semver": "^7.0.0" - } - }, - "node_modules/cacache": { - "version": "17.1.3", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-17.1.3.tgz", - "integrity": "sha512-jAdjGxmPxZh0IipMdR7fK/4sDSrHMLUV0+GvVUsjwyGNKHsh79kW/otg+GkbXwl6Uzvy9wsvHOX4nUoWldeZMg==", - "dev": true, - "dependencies": { - "@npmcli/fs": "^3.1.0", - "fs-minipass": "^3.0.0", - "glob": "^10.2.2", - "lru-cache": "^7.7.1", - "minipass": "^5.0.0", - "minipass-collect": "^1.0.2", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "p-map": "^4.0.0", - "ssri": "^10.0.0", - "tar": "^6.1.11", - "unique-filename": "^3.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/cacache/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "dev": true, - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/cacache/node_modules/fs-minipass": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-3.0.2.tgz", - "integrity": "sha512-2GAfyfoaCDRrM6jaOS3UsBts8yJ55VioXdWcOL7dK9zdAuKT71+WBA4ifnNYqVjYv+4SsPxjK0JT4yIIn4cA/g==", - "dev": true, - "dependencies": { - "minipass": "^5.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/cacache/node_modules/glob": { - "version": "10.3.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.3.3.tgz", - "integrity": "sha512-92vPiMb/iqpmEgsOoIDvTjc50wf9CCCvMzsi6W0JLPeUKE8TWP1a73PgqSrqy7iAZxaSD1YdzU7QZR5LF51MJw==", - "dev": true, - "dependencies": { - "foreground-child": "^3.1.0", - "jackspeak": "^2.0.3", - "minimatch": "^9.0.1", - "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0", - "path-scurry": "^1.10.1" - }, - "bin": { - "glob": "dist/cjs/src/bin.js" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/cacache/node_modules/lru-cache": { - "version": "7.18.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", - "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", - "dev": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/cacache/node_modules/minimatch": { - "version": "9.0.3", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", - "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", - "dev": true, - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/cacache/node_modules/minipass": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", - "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/cacheable-lookup": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/cacheable-lookup/-/cacheable-lookup-7.0.0.tgz", - "integrity": "sha512-+qJyx4xiKra8mZrcwhjMRMUhD5NR1R8esPkzIYxX96JiecFoxAXFuz/GpR3+ev4PE1WamHip78wV0vcmPQtp8w==", - "dev": true, - "engines": { - "node": ">=14.16" - } - }, - "node_modules/cacheable-request": { - "version": "10.2.8", - "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-10.2.8.tgz", - "integrity": "sha512-IDVO5MJ4LItE6HKFQTqT2ocAQsisOoCTUDu1ddCmnhyiwFQjXNPp4081Xj23N4tO+AFEFNzGuNEf/c8Gwwt15A==", - "dev": true, - "dependencies": { - "@types/http-cache-semantics": "^4.0.1", - "get-stream": "^6.0.1", - "http-cache-semantics": "^4.1.1", - "keyv": "^4.5.2", - "mimic-response": "^4.0.0", - "normalize-url": "^8.0.0", - "responselike": "^3.0.0" - }, - "engines": { - "node": ">=14.16" - } - }, - "node_modules/cacheable-request/node_modules/mimic-response": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-4.0.0.tgz", - "integrity": "sha512-e5ISH9xMYU0DzrT+jl8q2ze9D6eWBto+I8CNpe+VI+K2J/F/k3PdkdTdz4wvGVH4NTpo+NRYTVIuMQEMMcsLqg==", - "dev": true, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/call-bind": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", - "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", - "dev": true, - "dependencies": { - "function-bind": "^1.1.1", - "get-intrinsic": "^1.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/callsites": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", - "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/camelcase": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-7.0.1.tgz", - "integrity": "sha512-xlx1yCK2Oc1APsPXDL2LdlNP6+uu8OCDdhOBSVT279M/S+y75O30C2VuD8T2ogdePBBl7PfPF4504tnLgX3zfw==", - "dev": true, - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/canvas": { - "version": "2.9.1", - "resolved": "https://registry.npmjs.org/canvas/-/canvas-2.9.1.tgz", - "integrity": "sha512-vSQti1uG/2gjv3x6QLOZw7TctfufaerTWbVe+NSduHxxLGB+qf3kFgQ6n66DSnuoINtVUjrLLIK2R+lxrBG07A==", - "hasInstallScript": true, - "dependencies": { - "@mapbox/node-pre-gyp": "^1.0.0", - "nan": "^2.15.0", - "simple-get": "^3.0.3" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/chalk": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", - "dev": true, - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, - "node_modules/chart.js": { - "version": "3.5.1", - "resolved": "https://registry.npmjs.org/chart.js/-/chart.js-3.5.1.tgz", - "integrity": "sha512-m5kzt72I1WQ9LILwQC4syla/LD/N413RYv2Dx2nnTkRS9iv/ey1xLTt0DnPc/eWV4zI+BgEgDYBIzbQhZHc/PQ==" - }, - "node_modules/chartjs-node-canvas": { - "version": "4.1.6", - "resolved": "https://registry.npmjs.org/chartjs-node-canvas/-/chartjs-node-canvas-4.1.6.tgz", - "integrity": "sha512-UQJbPWrvqB/FoLclGA9BaLQmZbzSYlujF4w8NZd6Xzb+sqgACBb2owDX6m7ifCXLjUW5Nz0Qx0qqrTtQkkSoYw==", - "dependencies": { - "canvas": "^2.8.0", - "tslib": "^2.3.1" - }, - "peerDependencies": { - "chart.js": "^3.5.1" - } - }, - "node_modules/chownr": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", - "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", - "engines": { - "node": ">=10" - } - }, - "node_modules/ci-info": { - "version": "3.8.0", - "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.8.0.tgz", - "integrity": "sha512-eXTggHWSooYhq49F2opQhuHWgzucfF2YgODK4e1566GQs5BIfP30B0oenwBJHfWxAs2fyPB1s7Mg949zLf61Yw==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/sibiraj-s" - } - ], - "engines": { - "node": ">=8" - } - }, - "node_modules/clean-stack": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", - "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/cli-boxes": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/cli-boxes/-/cli-boxes-3.0.0.tgz", - "integrity": "sha512-/lzGpEWL/8PfI0BmBOPRwp0c/wFNX1RdUML3jK/RcSBA9T8mZDdQpqYBKtCFTOfQbwPqWEOpjqW+Fnayc0969g==", - "dev": true, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/cli-table3": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/cli-table3/-/cli-table3-0.6.3.tgz", - "integrity": "sha512-w5Jac5SykAeZJKntOxJCrm63Eg5/4dhMWIcuTbo9rpE+brgaSZo0RuNJZeOyMgsUdhDeojvgyQLmjI+K50ZGyg==", - "dev": true, - "dependencies": { - "string-width": "^4.2.0" - }, - "engines": { - "node": "10.* || >= 12.*" - }, - "optionalDependencies": { - "@colors/colors": "1.5.0" - } - }, - "node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true - }, - "node_modules/color-support": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", - "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==", - "bin": { - "color-support": "bin.js" - } - }, - "node_modules/commander": { - "version": "10.0.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-10.0.1.tgz", - "integrity": "sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==", - "dev": true, - "engines": { - "node": ">=14" - } - }, - "node_modules/concat-map": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" - }, - "node_modules/config-chain": { - "version": "1.1.13", - "resolved": "https://registry.npmjs.org/config-chain/-/config-chain-1.1.13.tgz", - "integrity": "sha512-qj+f8APARXHrM0hraqXYb2/bOVSV4PvJQlNZ/DVj0QrmNM2q2euizkeuVckQ57J+W0mRH6Hvi+k50M4Jul2VRQ==", - "dev": true, - "dependencies": { - "ini": "^1.3.4", - "proto-list": "~1.2.1" - } - }, - "node_modules/config-chain/node_modules/ini": { - "version": "1.3.8", - "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", - "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", - "dev": true - }, - "node_modules/configstore": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/configstore/-/configstore-6.0.0.tgz", - "integrity": "sha512-cD31W1v3GqUlQvbBCGcXmd2Nj9SvLDOP1oQ0YFuLETufzSPaKp11rYBsSOm7rCsW3OnIRAFM3OxRhceaXNYHkA==", - "dev": true, - "dependencies": { - "dot-prop": "^6.0.1", - "graceful-fs": "^4.2.6", - "unique-string": "^3.0.0", - "write-file-atomic": "^3.0.3", - "xdg-basedir": "^5.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/yeoman/configstore?sponsor=1" - } - }, - "node_modules/console-control-strings": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", - "integrity": "sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4=" - }, - "node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dev": true, - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/crypto-random-string": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-4.0.0.tgz", - "integrity": "sha512-x8dy3RnvYdlUcPOjkEHqozhiwzKNSq7GcPuXFbnyMOCHxX8V3OgIg/pYuabl2sbUPfIJaeAQB7PMOK8DFIdoRA==", - "dev": true, - "dependencies": { - "type-fest": "^1.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/crypto-random-string/node_modules/type-fest": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-1.4.0.tgz", - "integrity": "sha512-yGSza74xk0UG8k+pLh5oeoYirvIiWo5t0/o3zHHAO2tRDiZcxWP7fywNlXhqb6/r6sWvwi+RsyQMWhVLe4BVuA==", - "dev": true, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", - "dependencies": { - "ms": "2.1.2" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/decompress-response": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-4.2.1.tgz", - "integrity": "sha512-jOSne2qbyE+/r8G1VU+G/82LBs2Fs4LAsTiLSHOCOMZQl2OKZ6i8i4IyHemTe+/yIXOtTcRQMzPcgyhoFlqPkw==", - "dependencies": { - "mimic-response": "^2.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/deep-extend": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", - "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==", - "dev": true, - "engines": { - "node": ">=4.0.0" - } - }, - "node_modules/deep-is": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", - "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", - "dev": true - }, - "node_modules/defer-to-connect": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-2.0.1.tgz", - "integrity": "sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==", - "dev": true, - "engines": { - "node": ">=10" - } - }, - "node_modules/define-properties": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.0.tgz", - "integrity": "sha512-xvqAVKGfT1+UAvPwKTVw/njhdQ8ZhXK4lI0bCIuCMrp2up9nPnaDftrLtmpTazqd1o+UY4zgzU+avtMbDP+ldA==", - "dev": true, - "dependencies": { - "has-property-descriptors": "^1.0.0", - "object-keys": "^1.1.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/delegates": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", - "integrity": "sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o=" - }, - "node_modules/depd": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", - "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", - "dev": true, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/deprecation": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", - "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" - }, - "node_modules/detect-libc": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz", - "integrity": "sha1-+hN8S9aY7fVc1c0CrFWfkaTEups=", - "bin": { - "detect-libc": "bin/detect-libc.js" - }, - "engines": { - "node": ">=0.10" - } - }, - "node_modules/dir-glob": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", - "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", - "dev": true, - "dependencies": { - "path-type": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/doctrine": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", - "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", - "dev": true, - "dependencies": { - "esutils": "^2.0.2" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/dot-prop": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-6.0.1.tgz", - "integrity": "sha512-tE7ztYzXHIeyvc7N+hR3oi7FIbf/NIjVP9hmAt3yMXzrQ072/fpjGLx2GxNxGxUl5V73MEqYzioOMoVhGMJ5cA==", - "dev": true, - "dependencies": { - "is-obj": "^2.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/eastasianwidth": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", - "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", - "dev": true - }, - "node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" - }, - "node_modules/encoding": { - "version": "0.1.13", - "resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.13.tgz", - "integrity": "sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==", - "optional": true, - "dependencies": { - "iconv-lite": "^0.6.2" - } - }, - "node_modules/env-paths": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.1.tgz", - "integrity": "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/err-code": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/err-code/-/err-code-2.0.3.tgz", - "integrity": "sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==", - "dev": true - }, - "node_modules/es-abstract": { - "version": "1.21.2", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.21.2.tgz", - "integrity": "sha512-y/B5POM2iBnIxCiernH1G7rC9qQoM77lLIMQLuob0zhp8C56Po81+2Nj0WFKnd0pNReDTnkYryc+zhOzpEIROg==", - "dev": true, - "dependencies": { - "array-buffer-byte-length": "^1.0.0", - "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.2", - "es-set-tostringtag": "^2.0.1", - "es-to-primitive": "^1.2.1", - "function.prototype.name": "^1.1.5", - "get-intrinsic": "^1.2.0", - "get-symbol-description": "^1.0.0", - "globalthis": "^1.0.3", - "gopd": "^1.0.1", - "has": "^1.0.3", - "has-property-descriptors": "^1.0.0", - "has-proto": "^1.0.1", - "has-symbols": "^1.0.3", - "internal-slot": "^1.0.5", - "is-array-buffer": "^3.0.2", - "is-callable": "^1.2.7", - "is-negative-zero": "^2.0.2", - "is-regex": "^1.1.4", - "is-shared-array-buffer": "^1.0.2", - "is-string": "^1.0.7", - "is-typed-array": "^1.1.10", - "is-weakref": "^1.0.2", - "object-inspect": "^1.12.3", - "object-keys": "^1.1.1", - "object.assign": "^4.1.4", - "regexp.prototype.flags": "^1.4.3", - "safe-regex-test": "^1.0.0", - "string.prototype.trim": "^1.2.7", - "string.prototype.trimend": "^1.0.6", - "string.prototype.trimstart": "^1.0.6", - "typed-array-length": "^1.0.4", - "unbox-primitive": "^1.0.2", - "which-typed-array": "^1.1.9" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/es-set-tostringtag": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.1.tgz", - "integrity": "sha512-g3OMbtlwY3QewlqAiMLI47KywjWZoEytKr8pf6iTC8uJq5bIAH52Z9pnQ8pVL6whrCto53JZDuUIsifGeLorTg==", - "dev": true, - "dependencies": { - "get-intrinsic": "^1.1.3", - "has": "^1.0.3", - "has-tostringtag": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/es-shim-unscopables": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz", - "integrity": "sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w==", - "dev": true, - "dependencies": { - "has": "^1.0.3" - } - }, - "node_modules/es-to-primitive": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", - "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", - "dev": true, - "dependencies": { - "is-callable": "^1.1.4", - "is-date-object": "^1.0.1", - "is-symbol": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/escape-goat": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/escape-goat/-/escape-goat-4.0.0.tgz", - "integrity": "sha512-2Sd4ShcWxbx6OY1IHyla/CVNwvg7XwZVoXZHcSu9w9SReNP1EzzD5T8NWKIR38fIqEns9kDWKUQTXXAmlDrdPg==", - "dev": true, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/escape-string-regexp": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", - "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", - "dev": true, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/eslint": { - "version": "8.51.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.51.0.tgz", - "integrity": "sha512-2WuxRZBrlwnXi+/vFSJyjMqrNjtJqiasMzehF0shoLaW7DzS3/9Yvrmq5JiT66+pNjiX4UBnLDiKHcWAr/OInA==", - "dev": true, - "dependencies": { - "@eslint-community/eslint-utils": "^4.2.0", - "@eslint-community/regexpp": "^4.6.1", - "@eslint/eslintrc": "^2.1.2", - "@eslint/js": "8.51.0", - "@humanwhocodes/config-array": "^0.11.11", - "@humanwhocodes/module-importer": "^1.0.1", - "@nodelib/fs.walk": "^1.2.8", - "ajv": "^6.12.4", - "chalk": "^4.0.0", - "cross-spawn": "^7.0.2", - "debug": "^4.3.2", - "doctrine": "^3.0.0", - "escape-string-regexp": "^4.0.0", - "eslint-scope": "^7.2.2", - "eslint-visitor-keys": "^3.4.3", - "espree": "^9.6.1", - "esquery": "^1.4.2", - "esutils": "^2.0.2", - "fast-deep-equal": "^3.1.3", - "file-entry-cache": "^6.0.1", - "find-up": "^5.0.0", - "glob-parent": "^6.0.2", - "globals": "^13.19.0", - "graphemer": "^1.4.0", - "ignore": "^5.2.0", - "imurmurhash": "^0.1.4", - "is-glob": "^4.0.0", - "is-path-inside": "^3.0.3", - "js-yaml": "^4.1.0", - "json-stable-stringify-without-jsonify": "^1.0.1", - "levn": "^0.4.1", - "lodash.merge": "^4.6.2", - "minimatch": "^3.1.2", - "natural-compare": "^1.4.0", - "optionator": "^0.9.3", - "strip-ansi": "^6.0.1", - "text-table": "^0.2.0" - }, - "bin": { - "eslint": "bin/eslint.js" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/eslint-config-standard": { - "version": "17.1.0", - "resolved": "https://registry.npmjs.org/eslint-config-standard/-/eslint-config-standard-17.1.0.tgz", - "integrity": "sha512-IwHwmaBNtDK4zDHQukFDW5u/aTb8+meQWZvNFWkiGmbWjD6bqyuSSBxxXKkCftCUzc1zwCH2m/baCNDLGmuO5Q==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "engines": { - "node": ">=12.0.0" - }, - "peerDependencies": { - "eslint": "^8.0.1", - "eslint-plugin-import": "^2.25.2", - "eslint-plugin-n": "^15.0.0 || ^16.0.0 ", - "eslint-plugin-promise": "^6.0.0" - } - }, - "node_modules/eslint-import-resolver-node": { - "version": "0.3.7", - "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.7.tgz", - "integrity": "sha512-gozW2blMLJCeFpBwugLTGyvVjNoeo1knonXAcatC6bjPBZitotxdWf7Gimr25N4c0AAOo4eOUfaG82IJPDpqCA==", - "dev": true, - "dependencies": { - "debug": "^3.2.7", - "is-core-module": "^2.11.0", - "resolve": "^1.22.1" - } - }, - "node_modules/eslint-import-resolver-node/node_modules/debug": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", - "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", - "dev": true, - "dependencies": { - "ms": "^2.1.1" - } - }, - "node_modules/eslint-module-utils": { - "version": "2.8.0", - "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.8.0.tgz", - "integrity": "sha512-aWajIYfsqCKRDgUfjEXNN/JlrzauMuSEy5sbd7WXbtW3EH6A6MpwEh42c7qD+MqQo9QMJ6fWLAeIJynx0g6OAw==", - "dev": true, - "dependencies": { - "debug": "^3.2.7" - }, - "engines": { - "node": ">=4" - }, - "peerDependenciesMeta": { - "eslint": { - "optional": true - } - } - }, - "node_modules/eslint-module-utils/node_modules/debug": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", - "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", - "dev": true, - "dependencies": { - "ms": "^2.1.1" - } - }, - "node_modules/eslint-plugin-es": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-es/-/eslint-plugin-es-3.0.1.tgz", - "integrity": "sha512-GUmAsJaN4Fc7Gbtl8uOBlayo2DqhwWvEzykMHSCZHU3XdJ+NSzzZcVhXh3VxX5icqQ+oQdIEawXX8xkR3mIFmQ==", - "dev": true, - "dependencies": { - "eslint-utils": "^2.0.0", - "regexpp": "^3.0.0" - }, - "engines": { - "node": ">=8.10.0" - }, - "funding": { - "url": "https://github.com/sponsors/mysticatea" - }, - "peerDependencies": { - "eslint": ">=4.19.1" - } - }, - "node_modules/eslint-plugin-import": { - "version": "2.28.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.28.1.tgz", - "integrity": "sha512-9I9hFlITvOV55alzoKBI+K9q74kv0iKMeY6av5+umsNwayt59fz692daGyjR+oStBQgx6nwR9rXldDev3Clw+A==", - "dev": true, - "dependencies": { - "array-includes": "^3.1.6", - "array.prototype.findlastindex": "^1.2.2", - "array.prototype.flat": "^1.3.1", - "array.prototype.flatmap": "^1.3.1", - "debug": "^3.2.7", - "doctrine": "^2.1.0", - "eslint-import-resolver-node": "^0.3.7", - "eslint-module-utils": "^2.8.0", - "has": "^1.0.3", - "is-core-module": "^2.13.0", - "is-glob": "^4.0.3", - "minimatch": "^3.1.2", - "object.fromentries": "^2.0.6", - "object.groupby": "^1.0.0", - "object.values": "^1.1.6", - "semver": "^6.3.1", - "tsconfig-paths": "^3.14.2" - }, - "engines": { - "node": ">=4" - }, - "peerDependencies": { - "eslint": "^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8" - } - }, - "node_modules/eslint-plugin-import/node_modules/debug": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", - "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", - "dev": true, - "dependencies": { - "ms": "^2.1.1" - } - }, - "node_modules/eslint-plugin-import/node_modules/doctrine": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", - "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", - "dev": true, - "dependencies": { - "esutils": "^2.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/eslint-plugin-import/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "dev": true, - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/eslint-plugin-n": { - "version": "15.6.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-n/-/eslint-plugin-n-15.6.1.tgz", - "integrity": "sha512-R9xw9OtCRxxaxaszTQmQAlPgM+RdGjaL1akWuY/Fv9fRAi8Wj4CUKc6iYVG8QNRjRuo8/BqVYIpfqberJUEacA==", - "dev": true, - "peer": true, - "dependencies": { - "builtins": "^5.0.1", - "eslint-plugin-es": "^4.1.0", - "eslint-utils": "^3.0.0", - "ignore": "^5.1.1", - "is-core-module": "^2.11.0", - "minimatch": "^3.1.2", - "resolve": "^1.22.1", - "semver": "^7.3.8" - }, - "engines": { - "node": ">=12.22.0" - }, - "funding": { - "url": "https://github.com/sponsors/mysticatea" - }, - "peerDependencies": { - "eslint": ">=7.0.0" - } - }, - "node_modules/eslint-plugin-n/node_modules/eslint-plugin-es": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-es/-/eslint-plugin-es-4.1.0.tgz", - "integrity": "sha512-GILhQTnjYE2WorX5Jyi5i4dz5ALWxBIdQECVQavL6s7cI76IZTDWleTHkxz/QT3kvcs2QlGHvKLYsSlPOlPXnQ==", - "dev": true, - "peer": true, - "dependencies": { - "eslint-utils": "^2.0.0", - "regexpp": "^3.0.0" - }, - "engines": { - "node": ">=8.10.0" - }, - "funding": { - "url": "https://github.com/sponsors/mysticatea" - }, - "peerDependencies": { - "eslint": ">=4.19.1" - } - }, - "node_modules/eslint-plugin-n/node_modules/eslint-plugin-es/node_modules/eslint-utils": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz", - "integrity": "sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==", - "dev": true, - "peer": true, - "dependencies": { - "eslint-visitor-keys": "^1.1.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/mysticatea" - } - }, - "node_modules/eslint-plugin-n/node_modules/eslint-plugin-es/node_modules/eslint-visitor-keys": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", - "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", - "dev": true, - "peer": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/eslint-plugin-n/node_modules/eslint-utils": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-3.0.0.tgz", - "integrity": "sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA==", - "dev": true, - "peer": true, - "dependencies": { - "eslint-visitor-keys": "^2.0.0" - }, - "engines": { - "node": "^10.0.0 || ^12.0.0 || >= 14.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/mysticatea" - }, - "peerDependencies": { - "eslint": ">=5" - } - }, - "node_modules/eslint-plugin-n/node_modules/eslint-visitor-keys": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz", - "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==", - "dev": true, - "peer": true, - "engines": { - "node": ">=10" - } - }, - "node_modules/eslint-plugin-node": { - "version": "11.1.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-node/-/eslint-plugin-node-11.1.0.tgz", - "integrity": "sha512-oUwtPJ1W0SKD0Tr+wqu92c5xuCeQqB3hSCHasn/ZgjFdA9iDGNkNf2Zi9ztY7X+hNuMib23LNGRm6+uN+KLE3g==", - "dev": true, - "dependencies": { - "eslint-plugin-es": "^3.0.0", - "eslint-utils": "^2.0.0", - "ignore": "^5.1.1", - "minimatch": "^3.0.4", - "resolve": "^1.10.1", - "semver": "^6.1.0" - }, - "engines": { - "node": ">=8.10.0" - }, - "peerDependencies": { - "eslint": ">=5.16.0" - } - }, - "node_modules/eslint-plugin-node/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", - "dev": true, - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/eslint-plugin-promise": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-6.1.1.tgz", - "integrity": "sha512-tjqWDwVZQo7UIPMeDReOpUgHCmCiH+ePnVT+5zVapL0uuHnegBUs2smM13CzOs2Xb5+MHMRFTs9v24yjba4Oig==", - "dev": true, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "peerDependencies": { - "eslint": "^7.0.0 || ^8.0.0" - } - }, - "node_modules/eslint-scope": { - "version": "7.2.2", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", - "integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==", - "dev": true, - "dependencies": { - "esrecurse": "^4.3.0", - "estraverse": "^5.2.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/eslint-utils": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz", - "integrity": "sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==", - "dev": true, - "dependencies": { - "eslint-visitor-keys": "^1.1.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/mysticatea" - } - }, - "node_modules/eslint-utils/node_modules/eslint-visitor-keys": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", - "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/eslint-visitor-keys": { - "version": "3.4.3", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", - "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", - "dev": true, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/eslint/node_modules/glob-parent": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", - "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", - "dev": true, - "dependencies": { - "is-glob": "^4.0.3" - }, - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/espree": { - "version": "9.6.1", - "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz", - "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==", - "dev": true, - "dependencies": { - "acorn": "^8.9.0", - "acorn-jsx": "^5.3.2", - "eslint-visitor-keys": "^3.4.1" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/esquery": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz", - "integrity": "sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==", - "dev": true, - "dependencies": { - "estraverse": "^5.1.0" - }, - "engines": { - "node": ">=0.10" - } - }, - "node_modules/esrecurse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", - "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", - "dev": true, - "dependencies": { - "estraverse": "^5.2.0" - }, - "engines": { - "node": ">=4.0" - } - }, - "node_modules/estraverse": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", - "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", - "dev": true, - "engines": { - "node": ">=4.0" - } - }, - "node_modules/esutils": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", - "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/exponential-backoff": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/exponential-backoff/-/exponential-backoff-3.1.1.tgz", - "integrity": "sha512-dX7e/LHVJ6W3DE1MHWi9S1EYzDESENfLrYohG2G++ovZrYOkm4Knwa0mc1cn84xJOR4KEU0WSchhLbd0UklbHw==", - "dev": true - }, - "node_modules/fast-deep-equal": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", - "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", - "dev": true - }, - "node_modules/fast-glob": { - "version": "3.2.12", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.12.tgz", - "integrity": "sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==", - "dev": true, - "dependencies": { - "@nodelib/fs.stat": "^2.0.2", - "@nodelib/fs.walk": "^1.2.3", - "glob-parent": "^5.1.2", - "merge2": "^1.3.0", - "micromatch": "^4.0.4" - }, - "engines": { - "node": ">=8.6.0" - } - }, - "node_modules/fast-json-stable-stringify": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", - "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", - "dev": true - }, - "node_modules/fast-levenshtein": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", - "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", - "dev": true - }, - "node_modules/fast-memoize": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/fast-memoize/-/fast-memoize-2.5.2.tgz", - "integrity": "sha512-Ue0LwpDYErFbmNnZSF0UH6eImUwDmogUO1jyE+JbN2gsQz/jICm1Ve7t9QT0rNSsfJt+Hs4/S3GnsDVjL4HVrw==", - "dev": true - }, - "node_modules/fastq": { - "version": "1.15.0", - "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.15.0.tgz", - "integrity": "sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw==", - "dev": true, - "dependencies": { - "reusify": "^1.0.4" - } - }, - "node_modules/file-entry-cache": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", - "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", - "dev": true, - "dependencies": { - "flat-cache": "^3.0.4" - }, - "engines": { - "node": "^10.12.0 || >=12.0.0" - } - }, - "node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dev": true, - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/find-up": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", - "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", - "dev": true, - "dependencies": { - "locate-path": "^6.0.0", - "path-exists": "^4.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/flat-cache": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz", - "integrity": "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==", - "dev": true, - "dependencies": { - "flatted": "^3.1.0", - "rimraf": "^3.0.2" - }, - "engines": { - "node": "^10.12.0 || >=12.0.0" - } - }, - "node_modules/flatted": { - "version": "3.2.5", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.5.tgz", - "integrity": "sha512-WIWGi2L3DyTUvUrwRKgGi9TwxQMUEqPOPQBVi71R96jZXJdFskXEmf54BoZaS1kknGODoIGASGEzBUYdyMCBJg==", - "dev": true - }, - "node_modules/for-each": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz", - "integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==", - "dev": true, - "dependencies": { - "is-callable": "^1.1.3" - } - }, - "node_modules/foreground-child": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.1.1.tgz", - "integrity": "sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg==", - "dev": true, - "dependencies": { - "cross-spawn": "^7.0.0", - "signal-exit": "^4.0.1" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/foreground-child/node_modules/signal-exit": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.0.2.tgz", - "integrity": "sha512-MY2/qGx4enyjprQnFaZsHib3Yadh3IXyV2C321GY0pjGfVBu4un0uDJkwgdxqO+Rdx8JMT8IfJIRwbYVz3Ob3Q==", - "dev": true, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/form-data-encoder": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/form-data-encoder/-/form-data-encoder-2.1.4.tgz", - "integrity": "sha512-yDYSgNMraqvnxiEXO4hi88+YZxaHC6QKzb5N84iRCTDeRO7ZALpir/lVmf/uXUhnwUr2O4HU8s/n6x+yNjQkHw==", - "dev": true, - "engines": { - "node": ">= 14.17" - } - }, - "node_modules/fp-and-or": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/fp-and-or/-/fp-and-or-0.1.4.tgz", - "integrity": "sha512-+yRYRhpnFPWXSly/6V4Lw9IfOV26uu30kynGJ03PW+MnjOEQe45RZ141QcS0aJehYBYA50GfCDnsRbFJdhssRw==", - "dev": true, - "engines": { - "node": ">=10" - } - }, - "node_modules/fs-minipass": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", - "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", - "dependencies": { - "minipass": "^3.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" - }, - "node_modules/function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", - "dev": true - }, - "node_modules/function.prototype.name": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.5.tgz", - "integrity": "sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "es-abstract": "^1.19.0", - "functions-have-names": "^1.2.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/functions-have-names": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz", - "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==", - "dev": true, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/gauge": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/gauge/-/gauge-3.0.2.tgz", - "integrity": "sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q==", - "dependencies": { - "aproba": "^1.0.3 || ^2.0.0", - "color-support": "^1.1.2", - "console-control-strings": "^1.0.0", - "has-unicode": "^2.0.1", - "object-assign": "^4.1.1", - "signal-exit": "^3.0.0", - "string-width": "^4.2.3", - "strip-ansi": "^6.0.1", - "wide-align": "^1.1.2" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/get-intrinsic": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.1.tgz", - "integrity": "sha512-2DcsyfABl+gVHEfCOaTrWgyt+tb6MSEGmKq+kI5HwLbIYgjgmMcV8KQ41uaKz1xxUcn9tJtgFbQUEVcEbd0FYw==", - "dev": true, - "dependencies": { - "function-bind": "^1.1.1", - "has": "^1.0.3", - "has-proto": "^1.0.1", - "has-symbols": "^1.0.3" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/get-stdin": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-8.0.0.tgz", - "integrity": "sha512-sY22aA6xchAzprjyqmSEQv4UbAAzRN0L2dQB0NlN5acTTK9Don6nhoc3eAbUnpZiCANAMfd/+40kVdKfFygohg==", - "dev": true, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/get-stream": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", - "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", - "dev": true, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/get-symbol-description": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz", - "integrity": "sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/glob": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz", - "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==", - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - }, - "engines": { - "node": "*" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "dev": true, - "dependencies": { - "is-glob": "^4.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/global-dirs": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/global-dirs/-/global-dirs-3.0.1.tgz", - "integrity": "sha512-NBcGGFbBA9s1VzD41QXDG+3++t9Mn5t1FpLdhESY6oKY4gYTFpX4wO3sqGUa0Srjtbfj3szX0RnemmrVRUdULA==", - "dev": true, - "dependencies": { - "ini": "2.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/global-dirs/node_modules/ini": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ini/-/ini-2.0.0.tgz", - "integrity": "sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA==", - "dev": true, - "engines": { - "node": ">=10" - } - }, - "node_modules/globals": { - "version": "13.21.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.21.0.tgz", - "integrity": "sha512-ybyme3s4yy/t/3s35bewwXKOf7cvzfreG2lH0lZl0JB7I4GxRP2ghxOK/Nb9EkRXdbBXZLfq/p/0W2JUONB/Gg==", - "dev": true, - "dependencies": { - "type-fest": "^0.20.2" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/globalthis": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.3.tgz", - "integrity": "sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA==", - "dev": true, - "dependencies": { - "define-properties": "^1.1.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/globby": { - "version": "11.1.0", - "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", - "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", - "dev": true, - "dependencies": { - "array-union": "^2.1.0", - "dir-glob": "^3.0.1", - "fast-glob": "^3.2.9", - "ignore": "^5.2.0", - "merge2": "^1.4.1", - "slash": "^3.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/gopd": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", - "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", - "dev": true, - "dependencies": { - "get-intrinsic": "^1.1.3" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/got": { - "version": "12.6.0", - "resolved": "https://registry.npmjs.org/got/-/got-12.6.0.tgz", - "integrity": "sha512-WTcaQ963xV97MN3x0/CbAriXFZcXCfgxVp91I+Ze6pawQOa7SgzwSx2zIJJsX+kTajMnVs0xcFD1TxZKFqhdnQ==", - "dev": true, - "dependencies": { - "@sindresorhus/is": "^5.2.0", - "@szmarczak/http-timer": "^5.0.1", - "cacheable-lookup": "^7.0.0", - "cacheable-request": "^10.2.8", - "decompress-response": "^6.0.0", - "form-data-encoder": "^2.1.2", - "get-stream": "^6.0.1", - "http2-wrapper": "^2.1.10", - "lowercase-keys": "^3.0.0", - "p-cancelable": "^3.0.0", - "responselike": "^3.0.0" - }, - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sindresorhus/got?sponsor=1" - } - }, - "node_modules/got/node_modules/decompress-response": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", - "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", - "dev": true, - "dependencies": { - "mimic-response": "^3.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/got/node_modules/mimic-response": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", - "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==", - "dev": true, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/graceful-fs": { - "version": "4.2.11", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", - "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", - "dev": true - }, - "node_modules/graphemer": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", - "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", - "dev": true - }, - "node_modules/has": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", - "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", - "dev": true, - "dependencies": { - "function-bind": "^1.1.1" - }, - "engines": { - "node": ">= 0.4.0" - } - }, - "node_modules/has-bigints": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", - "integrity": "sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==", - "dev": true, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/has-property-descriptors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz", - "integrity": "sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==", - "dev": true, - "dependencies": { - "get-intrinsic": "^1.1.1" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.1.tgz", - "integrity": "sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg==", - "dev": true, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has-symbols": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", - "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", - "dev": true, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has-tostringtag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", - "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", - "dev": true, - "dependencies": { - "has-symbols": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has-unicode": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", - "integrity": "sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk=" - }, - "node_modules/has-yarn": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-yarn/-/has-yarn-3.0.0.tgz", - "integrity": "sha512-IrsVwUHhEULx3R8f/aA8AHuEzAorplsab/v8HBzEiIukwq5i/EC+xmOW+HfP1OaDP+2JkgT1yILHN2O3UFIbcA==", - "dev": true, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/hosted-git-info": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-5.2.1.tgz", - "integrity": "sha512-xIcQYMnhcx2Nr4JTjsFmwwnr9vldugPy9uVm0o87bjqqWMv9GaqsTeT+i99wTl0mk1uLxJtHxLb8kymqTENQsw==", - "dev": true, - "dependencies": { - "lru-cache": "^7.5.1" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - } - }, - "node_modules/hosted-git-info/node_modules/lru-cache": { - "version": "7.18.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", - "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", - "dev": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/http-cache-semantics": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz", - "integrity": "sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==", - "dev": true - }, - "node_modules/http-proxy-agent": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz", - "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==", - "dev": true, - "dependencies": { - "@tootallnate/once": "2", - "agent-base": "6", - "debug": "4" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/http2-wrapper": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/http2-wrapper/-/http2-wrapper-2.2.0.tgz", - "integrity": "sha512-kZB0wxMo0sh1PehyjJUWRFEd99KC5TLjZ2cULC4f9iqJBAmKQQXEICjxl5iPJRwP40dpeHFqqhm7tYCvODpqpQ==", - "dev": true, - "dependencies": { - "quick-lru": "^5.1.1", - "resolve-alpn": "^1.2.0" - }, - "engines": { - "node": ">=10.19.0" - } - }, - "node_modules/https-proxy-agent": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz", - "integrity": "sha512-EkYm5BcKUGiduxzSt3Eppko+PiNWNEpa4ySk9vTC6wDsQJW9rHSa+UhGNJoRYp7bz6Ht1eaRIa6QaJqO5rCFbA==", - "dependencies": { - "agent-base": "6", - "debug": "4" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/humanize-ms": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/humanize-ms/-/humanize-ms-1.2.1.tgz", - "integrity": "sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==", - "dev": true, - "dependencies": { - "ms": "^2.0.0" - } - }, - "node_modules/iconv-lite": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", - "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", - "optional": true, - "dependencies": { - "safer-buffer": ">= 2.1.2 < 3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/ignore": { - "version": "5.2.4", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.4.tgz", - "integrity": "sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==", - "dev": true, - "engines": { - "node": ">= 4" - } - }, - "node_modules/ignore-walk": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-6.0.3.tgz", - "integrity": "sha512-C7FfFoTA+bI10qfeydT8aZbvr91vAEU+2W5BZUlzPec47oNb07SsOfwYrtxuvOYdUApPP/Qlh4DtAO51Ekk2QA==", - "dev": true, - "dependencies": { - "minimatch": "^9.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/ignore-walk/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "dev": true, - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/ignore-walk/node_modules/minimatch": { - "version": "9.0.3", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", - "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", - "dev": true, - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/import-fresh": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", - "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", - "dev": true, - "dependencies": { - "parent-module": "^1.0.0", - "resolve-from": "^4.0.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/import-lazy": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/import-lazy/-/import-lazy-4.0.0.tgz", - "integrity": "sha512-rKtvo6a868b5Hu3heneU+L4yEQ4jYKLtjpnPeUdK7h0yzXGmyBTypknlkCvHFBqfX9YlorEiMM6Dnq/5atfHkw==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/imurmurhash": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", - "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", - "dev": true, - "engines": { - "node": ">=0.8.19" - } - }, - "node_modules/indent-string": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", - "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", - "dependencies": { - "once": "^1.3.0", - "wrappy": "1" - } - }, - "node_modules/inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" - }, - "node_modules/ini": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/ini/-/ini-4.1.1.tgz", - "integrity": "sha512-QQnnxNyfvmHFIsj7gkPcYymR8Jdw/o7mp5ZFihxn6h8Ci6fh3Dx4E1gPjpQEpIuPo9XVNY/ZUwh4BPMjGyL01g==", - "dev": true, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/internal-slot": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.5.tgz", - "integrity": "sha512-Y+R5hJrzs52QCG2laLn4udYVnxsfny9CpOhNhUvk/SSSVyF6T27FzRbF0sroPidSu3X8oEAkOn2K804mjpt6UQ==", - "dev": true, - "dependencies": { - "get-intrinsic": "^1.2.0", - "has": "^1.0.3", - "side-channel": "^1.0.4" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/ip": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ip/-/ip-2.0.0.tgz", - "integrity": "sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ==", - "dev": true - }, - "node_modules/is-array-buffer": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.2.tgz", - "integrity": "sha512-y+FyyR/w8vfIRq4eQcM1EYgSTnmHXPqaF+IgzgraytCFq5Xh8lllDVmAZolPJiZttZLeFSINPYMaEJ7/vWUa1w==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.2.0", - "is-typed-array": "^1.1.10" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-bigint": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz", - "integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==", - "dev": true, - "dependencies": { - "has-bigints": "^1.0.1" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-boolean-object": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz", - "integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "has-tostringtag": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-callable": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", - "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", - "dev": true, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-ci": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-3.0.1.tgz", - "integrity": "sha512-ZYvCgrefwqoQ6yTyYUbQu64HsITZ3NfKX1lzaEYdkTDcfKzzCI/wthRRYKkdjHKFVgNiXKAKm65Zo1pk2as/QQ==", - "dev": true, - "dependencies": { - "ci-info": "^3.2.0" - }, - "bin": { - "is-ci": "bin.js" - } - }, - "node_modules/is-core-module": { - "version": "2.13.0", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.13.0.tgz", - "integrity": "sha512-Z7dk6Qo8pOCp3l4tsX2C5ZVas4V+UxwQodwZhLopL91TX8UyyHEXafPcyoeeWuLrwzHcr3igO78wNLwHJHsMCQ==", - "dev": true, - "dependencies": { - "has": "^1.0.3" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-date-object": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", - "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==", - "dev": true, - "dependencies": { - "has-tostringtag": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-extglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "engines": { - "node": ">=8" - } - }, - "node_modules/is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "dev": true, - "dependencies": { - "is-extglob": "^2.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-installed-globally": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/is-installed-globally/-/is-installed-globally-0.4.0.tgz", - "integrity": "sha512-iwGqO3J21aaSkC7jWnHP/difazwS7SFeIqxv6wEtLU8Y5KlzFTjyqcSIT0d8s4+dDhKytsk9PJZ2BkS5eZwQRQ==", - "dev": true, - "dependencies": { - "global-dirs": "^3.0.0", - "is-path-inside": "^3.0.2" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/is-lambda": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-lambda/-/is-lambda-1.0.1.tgz", - "integrity": "sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ==", - "dev": true - }, - "node_modules/is-negative-zero": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz", - "integrity": "sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==", - "dev": true, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-npm": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/is-npm/-/is-npm-6.0.0.tgz", - "integrity": "sha512-JEjxbSmtPSt1c8XTkVrlujcXdKV1/tvuQ7GwKcAlyiVLeYFQ2VHat8xfrDJsIkhCdF/tZ7CiIR3sy141c6+gPQ==", - "dev": true, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true, - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/is-number-object": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz", - "integrity": "sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==", - "dev": true, - "dependencies": { - "has-tostringtag": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-obj": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", - "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/is-path-inside": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", - "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/is-plain-object": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz", - "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-regex": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", - "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "has-tostringtag": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-shared-array-buffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz", - "integrity": "sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-string": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", - "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", - "dev": true, - "dependencies": { - "has-tostringtag": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-symbol": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", - "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", - "dev": true, - "dependencies": { - "has-symbols": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-typed-array": { - "version": "1.1.10", - "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.10.tgz", - "integrity": "sha512-PJqgEHiWZvMpaFZ3uTc8kHPM4+4ADTlDniuQL7cU/UDA0Ql7F70yGfHph3cLNe+c9toaigv+DFzTJKhc2CtO6A==", - "dev": true, - "dependencies": { - "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.2", - "for-each": "^0.3.3", - "gopd": "^1.0.1", - "has-tostringtag": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-typedarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", - "integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==", - "dev": true - }, - "node_modules/is-weakref": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", - "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-yarn-global": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/is-yarn-global/-/is-yarn-global-0.4.1.tgz", - "integrity": "sha512-/kppl+R+LO5VmhYSEWARUFjodS25D68gvj8W7z0I7OWhUla5xWu8KL6CtB2V0R6yqhnRgbcaREMr4EEM6htLPQ==", - "dev": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", - "dev": true - }, - "node_modules/jackspeak": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-2.3.6.tgz", - "integrity": "sha512-N3yCS/NegsOBokc8GAdM8UcmfsKiSS8cipheD/nivzr700H+nsMOxJjQnvwOcRYVuFkdH0wGUvW2WbXGmrZGbQ==", - "dev": true, - "dependencies": { - "@isaacs/cliui": "^8.0.2" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - }, - "optionalDependencies": { - "@pkgjs/parseargs": "^0.11.0" - } - }, - "node_modules/jju": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/jju/-/jju-1.4.0.tgz", - "integrity": "sha512-8wb9Yw966OSxApiCt0K3yNJL8pnNeIv+OEq2YMidz4FKP6nonSRoOXc80iXY4JaN2FC11B9qsNmDsm+ZOfMROA==", - "dev": true - }, - "node_modules/js-yaml": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", - "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", - "dev": true, - "dependencies": { - "argparse": "^2.0.1" - }, - "bin": { - "js-yaml": "bin/js-yaml.js" - } - }, - "node_modules/json-buffer": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", - "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", - "dev": true - }, - "node_modules/json-parse-even-better-errors": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-3.0.0.tgz", - "integrity": "sha512-iZbGHafX/59r39gPwVPRBGw0QQKnA7tte5pSMrhWOW7swGsVvVTjmfyAV9pNqk8YGT7tRCdxRu8uzcgZwoDooA==", - "dev": true, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/json-parse-helpfulerror": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/json-parse-helpfulerror/-/json-parse-helpfulerror-1.0.3.tgz", - "integrity": "sha512-XgP0FGR77+QhUxjXkwOMkC94k3WtqEBfcnjWqhRd82qTat4SWKRE+9kUnynz/shm3I4ea2+qISvTIeGTNU7kJg==", - "dev": true, - "dependencies": { - "jju": "^1.1.0" - } - }, - "node_modules/json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "dev": true - }, - "node_modules/json-stable-stringify-without-jsonify": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", - "integrity": "sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=", - "dev": true - }, - "node_modules/json5": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz", - "integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==", - "dev": true, - "dependencies": { - "minimist": "^1.2.0" - }, - "bin": { - "json5": "lib/cli.js" - } - }, - "node_modules/jsonlines": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/jsonlines/-/jsonlines-0.1.1.tgz", - "integrity": "sha512-ekDrAGso79Cvf+dtm+mL8OBI2bmAOt3gssYs833De/C9NmIpWDWyUO4zPgB5x2/OhY366dkhgfPMYfwZF7yOZA==", - "dev": true - }, - "node_modules/jsonparse": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/jsonparse/-/jsonparse-1.3.1.tgz", - "integrity": "sha512-POQXvpdL69+CluYsillJ7SUhKvytYjW9vG/GKpnf+xP8UWgYEM/RaMzHHofbALDiKbbP1W8UEYmgGl39WkPZsg==", - "dev": true, - "engines": [ - "node >= 0.2.0" - ] - }, - "node_modules/keyv": { - "version": "4.5.2", - "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.2.tgz", - "integrity": "sha512-5MHbFaKn8cNSmVW7BYnijeAVlE4cYA/SVkifVgrh7yotnfhKmjuXpDKjrABLnT0SfHWV21P8ow07OGfRrNDg8g==", - "dev": true, - "dependencies": { - "json-buffer": "3.0.1" - } - }, - "node_modules/kleur": { - "version": "4.1.5", - "resolved": "https://registry.npmjs.org/kleur/-/kleur-4.1.5.tgz", - "integrity": "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/latest-version": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/latest-version/-/latest-version-7.0.0.tgz", - "integrity": "sha512-KvNT4XqAMzdcL6ka6Tl3i2lYeFDgXNCuIX+xNx6ZMVR1dFq+idXd9FLKNMOIx0t9mJ9/HudyX4oZWXZQ0UJHeg==", - "dev": true, - "dependencies": { - "package-json": "^8.1.0" - }, - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/levn": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", - "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", - "dev": true, - "dependencies": { - "prelude-ls": "^1.2.1", - "type-check": "~0.4.0" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/locate-path": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", - "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", - "dev": true, - "dependencies": { - "p-locate": "^5.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" - }, - "node_modules/lodash.merge": { - "version": "4.6.2", - "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", - "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", - "dev": true - }, - "node_modules/lowercase-keys": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-3.0.0.tgz", - "integrity": "sha512-ozCC6gdQ+glXOQsveKD0YsDy8DSQFjDTz4zyzEHNV5+JP5D62LmfDZ6o1cycFx9ouG940M5dE8C8CTewdj2YWQ==", - "dev": true, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/luxon": { - "version": "3.4.3", - "resolved": "https://registry.npmjs.org/luxon/-/luxon-3.4.3.tgz", - "integrity": "sha512-tFWBiv3h7z+T/tDaoxA8rqTxy1CHV6gHS//QdaH4pulbq/JuBSGgQspQQqcgnwdAx6pNI7cmvz5Sv/addzHmUg==", - "engines": { - "node": ">=12" - } - }, - "node_modules/make-dir": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", - "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", - "dependencies": { - "semver": "^6.0.0" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/make-dir/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/make-fetch-happen": { - "version": "11.1.1", - "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-11.1.1.tgz", - "integrity": "sha512-rLWS7GCSTcEujjVBs2YqG7Y4643u8ucvCJeSRqiLYhesrDuzeuFIk37xREzAsfQaqzl8b9rNCE4m6J8tvX4Q8w==", - "dev": true, - "dependencies": { - "agentkeepalive": "^4.2.1", - "cacache": "^17.0.0", - "http-cache-semantics": "^4.1.1", - "http-proxy-agent": "^5.0.0", - "https-proxy-agent": "^5.0.0", - "is-lambda": "^1.0.1", - "lru-cache": "^7.7.1", - "minipass": "^5.0.0", - "minipass-fetch": "^3.0.0", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "negotiator": "^0.6.3", - "promise-retry": "^2.0.1", - "socks-proxy-agent": "^7.0.0", - "ssri": "^10.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/make-fetch-happen/node_modules/lru-cache": { - "version": "7.18.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", - "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", - "dev": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/make-fetch-happen/node_modules/minipass": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", - "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/merge2": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", - "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", - "dev": true, - "engines": { - "node": ">= 8" - } - }, - "node_modules/micromatch": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", - "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", - "dev": true, - "dependencies": { - "braces": "^3.0.2", - "picomatch": "^2.3.1" - }, - "engines": { - "node": ">=8.6" - } - }, - "node_modules/mimic-response": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-2.1.0.tgz", - "integrity": "sha512-wXqjST+SLt7R009ySCglWBCFpjUygmCIfD790/kVbiGmUgfYGuB14PiTd5DwVxSV4NcYHjzMkoj5LjQZwTQLEA==", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, - "node_modules/minimist": { - "version": "1.2.6", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", - "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==", - "dev": true - }, - "node_modules/minipass": { - "version": "3.1.6", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.6.tgz", - "integrity": "sha512-rty5kpw9/z8SX9dmxblFA6edItUmwJgMeYDZRrwlIVN27i8gysGbznJwUggw2V/FVqFSDdWy040ZPS811DYAqQ==", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/minipass-collect": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/minipass-collect/-/minipass-collect-1.0.2.tgz", - "integrity": "sha512-6T6lH0H8OG9kITm/Jm6tdooIbogG9e0tLgpY6mphXSm/A9u8Nq1ryBG+Qspiub9LjWlBPsPS3tWQ/Botq4FdxA==", - "dev": true, - "dependencies": { - "minipass": "^3.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/minipass-fetch": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-3.0.3.tgz", - "integrity": "sha512-n5ITsTkDqYkYJZjcRWzZt9qnZKCT7nKCosJhHoj7S7zD+BP4jVbWs+odsniw5TA3E0sLomhTKOKjF86wf11PuQ==", - "dev": true, - "dependencies": { - "minipass": "^5.0.0", - "minipass-sized": "^1.0.3", - "minizlib": "^2.1.2" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - }, - "optionalDependencies": { - "encoding": "^0.1.13" - } - }, - "node_modules/minipass-fetch/node_modules/minipass": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", - "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/minipass-flush": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/minipass-flush/-/minipass-flush-1.0.5.tgz", - "integrity": "sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw==", - "dev": true, - "dependencies": { - "minipass": "^3.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/minipass-json-stream": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/minipass-json-stream/-/minipass-json-stream-1.0.1.tgz", - "integrity": "sha512-ODqY18UZt/I8k+b7rl2AENgbWE8IDYam+undIJONvigAz8KR5GWblsFTEfQs0WODsjbSXWlm+JHEv8Gr6Tfdbg==", - "dev": true, - "dependencies": { - "jsonparse": "^1.3.1", - "minipass": "^3.0.0" - } - }, - "node_modules/minipass-pipeline": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/minipass-pipeline/-/minipass-pipeline-1.2.4.tgz", - "integrity": "sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A==", - "dev": true, - "dependencies": { - "minipass": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/minipass-sized": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/minipass-sized/-/minipass-sized-1.0.3.tgz", - "integrity": "sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g==", - "dev": true, - "dependencies": { - "minipass": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/minizlib": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", - "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", - "dependencies": { - "minipass": "^3.0.0", - "yallist": "^4.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/mkdirp": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", - "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", - "bin": { - "mkdirp": "bin/cmd.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" - }, - "node_modules/nan": { - "version": "2.15.0", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.15.0.tgz", - "integrity": "sha512-8ZtvEnA2c5aYCZYd1cvgdnU6cqwixRoYg70xPLWUws5ORTa/lnw+u4amixRS/Ac5U5mQVgp9pnlSUnbNWFaWZQ==" - }, - "node_modules/natural-compare": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", - "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=", - "dev": true - }, - "node_modules/negotiator": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", - "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", - "dev": true, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/node-fetch": { - "version": "2.6.7", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", - "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", - "dependencies": { - "whatwg-url": "^5.0.0" - }, - "engines": { - "node": "4.x || >=6.0.0" - }, - "peerDependencies": { - "encoding": "^0.1.0" - }, - "peerDependenciesMeta": { - "encoding": { - "optional": true - } - } - }, - "node_modules/node-gyp": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-9.4.0.tgz", - "integrity": "sha512-dMXsYP6gc9rRbejLXmTbVRYjAHw7ppswsKyMxuxJxxOHzluIO1rGp9TOQgjFJ+2MCqcOcQTOPB/8Xwhr+7s4Eg==", - "dev": true, - "dependencies": { - "env-paths": "^2.2.0", - "exponential-backoff": "^3.1.1", - "glob": "^7.1.4", - "graceful-fs": "^4.2.6", - "make-fetch-happen": "^11.0.3", - "nopt": "^6.0.0", - "npmlog": "^6.0.0", - "rimraf": "^3.0.2", - "semver": "^7.3.5", - "tar": "^6.1.2", - "which": "^2.0.2" - }, - "bin": { - "node-gyp": "bin/node-gyp.js" - }, - "engines": { - "node": "^12.13 || ^14.13 || >=16" - } - }, - "node_modules/node-gyp/node_modules/are-we-there-yet": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-3.0.1.tgz", - "integrity": "sha512-QZW4EDmGwlYur0Yyf/b2uGucHQMa8aFUP7eu9ddR73vvhFyt4V0Vl3QHPcTNJ8l6qYOBdxgXdnBXQrHilfRQBg==", - "dev": true, - "dependencies": { - "delegates": "^1.0.0", - "readable-stream": "^3.6.0" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - } - }, - "node_modules/node-gyp/node_modules/gauge": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/gauge/-/gauge-4.0.4.tgz", - "integrity": "sha512-f9m+BEN5jkg6a0fZjleidjN51VE1X+mPFQ2DJ0uv1V39oCLCbsGe6yjbBnp7eK7z/+GAon99a3nHuqbuuthyPg==", - "dev": true, - "dependencies": { - "aproba": "^1.0.3 || ^2.0.0", - "color-support": "^1.1.3", - "console-control-strings": "^1.1.0", - "has-unicode": "^2.0.1", - "signal-exit": "^3.0.7", - "string-width": "^4.2.3", - "strip-ansi": "^6.0.1", - "wide-align": "^1.1.5" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - } - }, - "node_modules/node-gyp/node_modules/nopt": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/nopt/-/nopt-6.0.0.tgz", - "integrity": "sha512-ZwLpbTgdhuZUnZzjd7nb1ZV+4DoiC6/sfiVKok72ym/4Tlf+DFdlHYmT2JPmcNNWV6Pi3SDf1kT+A4r9RTuT9g==", - "dev": true, - "dependencies": { - "abbrev": "^1.0.0" - }, - "bin": { - "nopt": "bin/nopt.js" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - } - }, - "node_modules/node-gyp/node_modules/npmlog": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-6.0.2.tgz", - "integrity": "sha512-/vBvz5Jfr9dT/aFWd0FIRf+T/Q2WBsLENygUaFUqstqsycmZAP/t5BvFJTK0viFmSUxiUKTUplWy5vt+rvKIxg==", - "dev": true, - "dependencies": { - "are-we-there-yet": "^3.0.0", - "console-control-strings": "^1.1.0", - "gauge": "^4.0.3", - "set-blocking": "^2.0.0" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - } - }, - "node_modules/nopt": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz", - "integrity": "sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ==", - "dependencies": { - "abbrev": "1" - }, - "bin": { - "nopt": "bin/nopt.js" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/normalize-package-data": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-5.0.0.tgz", - "integrity": "sha512-h9iPVIfrVZ9wVYQnxFgtw1ugSvGEMOlyPWWtm8BMJhnwyEL/FLbYbTY3V3PpjI/BUK67n9PEWDu6eHzu1fB15Q==", - "dev": true, - "dependencies": { - "hosted-git-info": "^6.0.0", - "is-core-module": "^2.8.1", - "semver": "^7.3.5", - "validate-npm-package-license": "^3.0.4" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/normalize-package-data/node_modules/hosted-git-info": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-6.1.1.tgz", - "integrity": "sha512-r0EI+HBMcXadMrugk0GCQ+6BQV39PiWAZVfq7oIckeGiN7sjRGyQxPdft3nQekFTCQbYxLBH+/axZMeH8UX6+w==", - "dev": true, - "dependencies": { - "lru-cache": "^7.5.1" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/normalize-package-data/node_modules/lru-cache": { - "version": "7.18.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", - "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", - "dev": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/normalize-url": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-8.0.0.tgz", - "integrity": "sha512-uVFpKhj5MheNBJRTiMZ9pE/7hD1QTeEvugSJW/OmLzAp78PB5O6adfMNTvmfKhXBkvCzC+rqifWcVYpGFwTjnw==", - "dev": true, - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/npm-bundled": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-3.0.0.tgz", - "integrity": "sha512-Vq0eyEQy+elFpzsKjMss9kxqb9tG3YHg4dsyWuUENuzvSUWe1TCnW/vV9FkhvBk/brEDoDiVd+M1Btosa6ImdQ==", - "dev": true, - "dependencies": { - "npm-normalize-package-bin": "^3.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/npm-check-updates": { - "version": "16.14.6", - "resolved": "https://registry.npmjs.org/npm-check-updates/-/npm-check-updates-16.14.6.tgz", - "integrity": "sha512-sJ6w4AmSDP7YzBXah94Ul2JhiIbjBDfx9XYgib15um2wtiQkOyjE7Lov3MNUSQ84Ry7T81mE4ynMbl/mGbK4HQ==", - "dev": true, - "dependencies": { - "chalk": "^5.3.0", - "cli-table3": "^0.6.3", - "commander": "^10.0.1", - "fast-memoize": "^2.5.2", - "find-up": "5.0.0", - "fp-and-or": "^0.1.4", - "get-stdin": "^8.0.0", - "globby": "^11.0.4", - "hosted-git-info": "^5.1.0", - "ini": "^4.1.1", - "js-yaml": "^4.1.0", - "json-parse-helpfulerror": "^1.0.3", - "jsonlines": "^0.1.1", - "lodash": "^4.17.21", - "make-fetch-happen": "^11.1.1", - "minimatch": "^9.0.3", - "p-map": "^4.0.0", - "pacote": "15.2.0", - "parse-github-url": "^1.0.2", - "progress": "^2.0.3", - "prompts-ncu": "^3.0.0", - "rc-config-loader": "^4.1.3", - "remote-git-tags": "^3.0.0", - "rimraf": "^5.0.5", - "semver": "^7.5.4", - "semver-utils": "^1.1.4", - "source-map-support": "^0.5.21", - "spawn-please": "^2.0.2", - "strip-ansi": "^7.1.0", - "strip-json-comments": "^5.0.1", - "untildify": "^4.0.0", - "update-notifier": "^6.0.2" - }, - "bin": { - "ncu": "build/src/bin/cli.js", - "npm-check-updates": "build/src/bin/cli.js" - }, - "engines": { - "node": ">=14.14" - } - }, - "node_modules/npm-check-updates/node_modules/ansi-regex": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", - "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", - "dev": true, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-regex?sponsor=1" - } - }, - "node_modules/npm-check-updates/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "dev": true, - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/npm-check-updates/node_modules/chalk": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.3.0.tgz", - "integrity": "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==", - "dev": true, - "engines": { - "node": "^12.17.0 || ^14.13 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, - "node_modules/npm-check-updates/node_modules/glob": { - "version": "10.3.10", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.3.10.tgz", - "integrity": "sha512-fa46+tv1Ak0UPK1TOy/pZrIybNNt4HCv7SDzwyfiOZkvZLEbjsZkJBPtDHVshZjbecAoAGSC20MjLDG/qr679g==", - "dev": true, - "dependencies": { - "foreground-child": "^3.1.0", - "jackspeak": "^2.3.5", - "minimatch": "^9.0.1", - "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0", - "path-scurry": "^1.10.1" - }, - "bin": { - "glob": "dist/esm/bin.mjs" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/npm-check-updates/node_modules/minimatch": { - "version": "9.0.3", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", - "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", - "dev": true, - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/npm-check-updates/node_modules/minipass": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.0.4.tgz", - "integrity": "sha512-jYofLM5Dam9279rdkWzqHozUo4ybjdZmCsDHePy5V/PbBcVMiSZR97gmAy45aqi8CK1lG2ECd356FU86avfwUQ==", - "dev": true, - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, - "node_modules/npm-check-updates/node_modules/rimraf": { - "version": "5.0.5", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-5.0.5.tgz", - "integrity": "sha512-CqDakW+hMe/Bz202FPEymy68P+G50RfMQK+Qo5YUqc9SPipvbGjCGKd0RSKEelbsfQuw3g5NZDSrlZZAJurH1A==", - "dev": true, - "dependencies": { - "glob": "^10.3.7" - }, - "bin": { - "rimraf": "dist/esm/bin.mjs" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/npm-check-updates/node_modules/strip-ansi": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", - "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", - "dev": true, - "dependencies": { - "ansi-regex": "^6.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/strip-ansi?sponsor=1" - } - }, - "node_modules/npm-check-updates/node_modules/strip-json-comments": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-5.0.1.tgz", - "integrity": "sha512-0fk9zBqO67Nq5M/m45qHCJxylV/DhBlIOVExqgOMiCCrzrhU6tCibRXNqE3jwJLftzE9SNuZtYbpzcO+i9FiKw==", - "dev": true, - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/npm-install-checks": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/npm-install-checks/-/npm-install-checks-6.1.1.tgz", - "integrity": "sha512-dH3GmQL4vsPtld59cOn8uY0iOqRmqKvV+DLGwNXV/Q7MDgD2QfOADWd/mFXcIE5LVhYYGjA3baz6W9JneqnuCw==", - "dev": true, - "dependencies": { - "semver": "^7.1.1" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/npm-normalize-package-bin": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-3.0.1.tgz", - "integrity": "sha512-dMxCf+zZ+3zeQZXKxmyuCKlIDPGuv8EF940xbkC4kQVDTtqoh6rJFO+JTKSA6/Rwi0getWmtuy4Itup0AMcaDQ==", - "dev": true, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/npm-package-arg": { - "version": "10.1.0", - "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-10.1.0.tgz", - "integrity": "sha512-uFyyCEmgBfZTtrKk/5xDfHp6+MdrqGotX/VoOyEEl3mBwiEE5FlBaePanazJSVMPT7vKepcjYBY2ztg9A3yPIA==", - "dev": true, - "dependencies": { - "hosted-git-info": "^6.0.0", - "proc-log": "^3.0.0", - "semver": "^7.3.5", - "validate-npm-package-name": "^5.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/npm-package-arg/node_modules/hosted-git-info": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-6.1.1.tgz", - "integrity": "sha512-r0EI+HBMcXadMrugk0GCQ+6BQV39PiWAZVfq7oIckeGiN7sjRGyQxPdft3nQekFTCQbYxLBH+/axZMeH8UX6+w==", - "dev": true, - "dependencies": { - "lru-cache": "^7.5.1" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/npm-package-arg/node_modules/lru-cache": { - "version": "7.18.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", - "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", - "dev": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/npm-packlist": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-7.0.4.tgz", - "integrity": "sha512-d6RGEuRrNS5/N84iglPivjaJPxhDbZmlbTwTDX2IbcRHG5bZCdtysYMhwiPvcF4GisXHGn7xsxv+GQ7T/02M5Q==", - "dev": true, - "dependencies": { - "ignore-walk": "^6.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/npm-pick-manifest": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-8.0.2.tgz", - "integrity": "sha512-1dKY+86/AIiq1tkKVD3l0WI+Gd3vkknVGAggsFeBkTvbhMQ1OND/LKkYv4JtXPKUJ8bOTCyLiqEg2P6QNdK+Gg==", - "dev": true, - "dependencies": { - "npm-install-checks": "^6.0.0", - "npm-normalize-package-bin": "^3.0.0", - "npm-package-arg": "^10.0.0", - "semver": "^7.3.5" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/npm-registry-fetch": { - "version": "14.0.5", - "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-14.0.5.tgz", - "integrity": "sha512-kIDMIo4aBm6xg7jOttupWZamsZRkAqMqwqqbVXnUqstY5+tapvv6bkH/qMR76jdgV+YljEUCyWx3hRYMrJiAgA==", - "dev": true, - "dependencies": { - "make-fetch-happen": "^11.0.0", - "minipass": "^5.0.0", - "minipass-fetch": "^3.0.0", - "minipass-json-stream": "^1.0.1", - "minizlib": "^2.1.2", - "npm-package-arg": "^10.0.0", - "proc-log": "^3.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/npm-registry-fetch/node_modules/minipass": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", - "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/npmlog": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.1.tgz", - "integrity": "sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==", - "dependencies": { - "are-we-there-yet": "^2.0.0", - "console-control-strings": "^1.1.0", - "gauge": "^3.0.0", - "set-blocking": "^2.0.0" - } - }, - "node_modules/object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object-inspect": { - "version": "1.12.3", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.3.tgz", - "integrity": "sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g==", - "dev": true, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/object-keys": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", - "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", - "dev": true, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/object.assign": { - "version": "4.1.4", - "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.4.tgz", - "integrity": "sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "has-symbols": "^1.0.3", - "object-keys": "^1.1.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/object.fromentries": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.6.tgz", - "integrity": "sha512-VciD13dswC4j1Xt5394WR4MzmAQmlgN72phd/riNp9vtD7tp4QQWJ0R4wvclXcafgcYK8veHRed2W6XeGBvcfg==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/object.groupby": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/object.groupby/-/object.groupby-1.0.0.tgz", - "integrity": "sha512-70MWG6NfRH9GnbZOikuhPPYzpUpof9iW2J9E4dW7FXTqPNb6rllE6u39SKwwiNh8lCwX3DDb5OgcKGiEBrTTyw==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.21.2", - "get-intrinsic": "^1.2.1" - } - }, - "node_modules/object.values": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.6.tgz", - "integrity": "sha512-FVVTkD1vENCsAcwNs9k6jea2uHC/X0+JcjG8YA60FN5CMaJmG95wT9jek/xX9nornqGRrBkKtzuAu2wuHpKqvw==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", - "dependencies": { - "wrappy": "1" - } - }, - "node_modules/optionator": { - "version": "0.9.3", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.3.tgz", - "integrity": "sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg==", - "dev": true, - "dependencies": { - "@aashutoshrathi/word-wrap": "^1.2.3", - "deep-is": "^0.1.3", - "fast-levenshtein": "^2.0.6", - "levn": "^0.4.1", - "prelude-ls": "^1.2.1", - "type-check": "^0.4.0" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/p-cancelable": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-3.0.0.tgz", - "integrity": "sha512-mlVgR3PGuzlo0MmTdk4cXqXWlwQDLnONTAg6sm62XkMJEiRxN3GL3SffkYvqwonbkJBcrI7Uvv5Zh9yjvn2iUw==", - "dev": true, - "engines": { - "node": ">=12.20" - } - }, - "node_modules/p-limit": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", - "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", - "dev": true, - "dependencies": { - "yocto-queue": "^0.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/p-locate": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", - "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", - "dev": true, - "dependencies": { - "p-limit": "^3.0.2" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/p-map": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", - "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", - "dev": true, - "dependencies": { - "aggregate-error": "^3.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/package-json": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/package-json/-/package-json-8.1.0.tgz", - "integrity": "sha512-hySwcV8RAWeAfPsXb9/HGSPn8lwDnv6fabH+obUZKX169QknRkRhPxd1yMubpKDskLFATkl3jHpNtVtDPFA0Wg==", - "dev": true, - "dependencies": { - "got": "^12.1.0", - "registry-auth-token": "^5.0.1", - "registry-url": "^6.0.0", - "semver": "^7.3.7" - }, - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/pacote": { - "version": "15.2.0", - "resolved": "https://registry.npmjs.org/pacote/-/pacote-15.2.0.tgz", - "integrity": "sha512-rJVZeIwHTUta23sIZgEIM62WYwbmGbThdbnkt81ravBplQv+HjyroqnLRNH2+sLJHcGZmLRmhPwACqhfTcOmnA==", - "dev": true, - "dependencies": { - "@npmcli/git": "^4.0.0", - "@npmcli/installed-package-contents": "^2.0.1", - "@npmcli/promise-spawn": "^6.0.1", - "@npmcli/run-script": "^6.0.0", - "cacache": "^17.0.0", - "fs-minipass": "^3.0.0", - "minipass": "^5.0.0", - "npm-package-arg": "^10.0.0", - "npm-packlist": "^7.0.0", - "npm-pick-manifest": "^8.0.0", - "npm-registry-fetch": "^14.0.0", - "proc-log": "^3.0.0", - "promise-retry": "^2.0.1", - "read-package-json": "^6.0.0", - "read-package-json-fast": "^3.0.0", - "sigstore": "^1.3.0", - "ssri": "^10.0.0", - "tar": "^6.1.11" - }, - "bin": { - "pacote": "lib/bin.js" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/pacote/node_modules/fs-minipass": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-3.0.2.tgz", - "integrity": "sha512-2GAfyfoaCDRrM6jaOS3UsBts8yJ55VioXdWcOL7dK9zdAuKT71+WBA4ifnNYqVjYv+4SsPxjK0JT4yIIn4cA/g==", - "dev": true, - "dependencies": { - "minipass": "^5.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/pacote/node_modules/minipass": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", - "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/parent-module": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", - "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", - "dev": true, - "dependencies": { - "callsites": "^3.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/parse-github-url": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/parse-github-url/-/parse-github-url-1.0.2.tgz", - "integrity": "sha512-kgBf6avCbO3Cn6+RnzRGLkUsv4ZVqv/VfAYkRsyBcgkshNvVBkRn1FEZcW0Jb+npXQWm2vHPnnOqFteZxRRGNw==", - "dev": true, - "bin": { - "parse-github-url": "cli.js" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/path-exists": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", - "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/path-is-absolute": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/path-parse": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", - "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", - "dev": true - }, - "node_modules/path-scurry": { - "version": "1.10.1", - "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.10.1.tgz", - "integrity": "sha512-MkhCqzzBEpPvxxQ71Md0b1Kk51W01lrYvlMzSUaIzNsODdd7mqhiimSZlr+VegAz5Z6Vzt9Xg2ttE//XBhH3EQ==", - "dev": true, - "dependencies": { - "lru-cache": "^9.1.1 || ^10.0.0", - "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/path-scurry/node_modules/lru-cache": { - "version": "9.1.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-9.1.1.tgz", - "integrity": "sha512-65/Jky17UwSb0BuB9V+MyDpsOtXKmYwzhyl+cOa9XUiI4uV2Ouy/2voFP3+al0BjZbJgMBD8FojMpAf+Z+qn4A==", - "dev": true, - "engines": { - "node": "14 || >=16.14" - } - }, - "node_modules/path-scurry/node_modules/minipass": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-6.0.2.tgz", - "integrity": "sha512-MzWSV5nYVT7mVyWCwn2o7JH13w2TBRmmSqSRCKzTw+lmft9X4z+3wjvs06Tzijo5z4W/kahUCDpRXTF+ZrmF/w==", - "dev": true, - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, - "node_modules/path-type": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", - "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", - "dev": true, - "engines": { - "node": ">=8.6" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/prelude-ls": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", - "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", - "dev": true, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/proc-log": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-3.0.0.tgz", - "integrity": "sha512-++Vn7NS4Xf9NacaU9Xq3URUuqZETPsf8L4j5/ckhaRYsfPeRyzGw+iDjFhV/Jr3uNmTvvddEJFWh5R1gRgUH8A==", - "dev": true, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/progress": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", - "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==", - "dev": true, - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/promise-inflight": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/promise-inflight/-/promise-inflight-1.0.1.tgz", - "integrity": "sha512-6zWPyEOFaQBJYcGMHBKTKJ3u6TBsnMFOIZSa6ce1e/ZrrsOlnHRHbabMjLiBYKp+n44X9eUI6VUPaukCXHuG4g==", - "dev": true - }, - "node_modules/promise-retry": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/promise-retry/-/promise-retry-2.0.1.tgz", - "integrity": "sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g==", - "dev": true, - "dependencies": { - "err-code": "^2.0.2", - "retry": "^0.12.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/prompts-ncu": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/prompts-ncu/-/prompts-ncu-3.0.0.tgz", - "integrity": "sha512-qyz9UxZ5MlPKWVhWrCmSZ1ahm2GVYdjLb8og2sg0IPth1KRuhcggHGuijz0e41dkx35p1t1q3GRISGH7QGALFA==", - "dev": true, - "dependencies": { - "kleur": "^4.0.1", - "sisteransi": "^1.0.5" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/proto-list": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/proto-list/-/proto-list-1.2.4.tgz", - "integrity": "sha512-vtK/94akxsTMhe0/cbfpR+syPuszcuwhqVjJq26CuNDgFGj682oRBXOP5MJpv2r7JtE8MsiepGIqvvOTBwn2vA==", - "dev": true - }, - "node_modules/punycode": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz", - "integrity": "sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/pupa": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/pupa/-/pupa-3.1.0.tgz", - "integrity": "sha512-FLpr4flz5xZTSJxSeaheeMKN/EDzMdK7b8PTOC6a5PYFKTucWbdqjgqaEyH0shFiSJrVB1+Qqi4Tk19ccU6Aug==", - "dev": true, - "dependencies": { - "escape-goat": "^4.0.0" - }, - "engines": { - "node": ">=12.20" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/queue-microtask": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", - "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, - "node_modules/quick-lru": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-5.1.1.tgz", - "integrity": "sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==", - "dev": true, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/rc": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", - "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", - "dev": true, - "dependencies": { - "deep-extend": "^0.6.0", - "ini": "~1.3.0", - "minimist": "^1.2.0", - "strip-json-comments": "~2.0.1" - }, - "bin": { - "rc": "cli.js" - } - }, - "node_modules/rc-config-loader": { - "version": "4.1.3", - "resolved": "https://registry.npmjs.org/rc-config-loader/-/rc-config-loader-4.1.3.tgz", - "integrity": "sha512-kD7FqML7l800i6pS6pvLyIE2ncbk9Du8Q0gp/4hMPhJU6ZxApkoLcGD8ZeqgiAlfwZ6BlETq6qqe+12DUL207w==", - "dev": true, - "dependencies": { - "debug": "^4.3.4", - "js-yaml": "^4.1.0", - "json5": "^2.2.2", - "require-from-string": "^2.0.2" - } - }, - "node_modules/rc-config-loader/node_modules/json5": { - "version": "2.2.3", - "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", - "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", - "dev": true, - "bin": { - "json5": "lib/cli.js" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/rc/node_modules/ini": { - "version": "1.3.8", - "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", - "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", - "dev": true - }, - "node_modules/rc/node_modules/strip-json-comments": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", - "integrity": "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/read-package-json": { - "version": "6.0.4", - "resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-6.0.4.tgz", - "integrity": "sha512-AEtWXYfopBj2z5N5PbkAOeNHRPUg5q+Nen7QLxV8M2zJq1ym6/lCz3fYNTCXe19puu2d06jfHhrP7v/S2PtMMw==", - "dev": true, - "dependencies": { - "glob": "^10.2.2", - "json-parse-even-better-errors": "^3.0.0", - "normalize-package-data": "^5.0.0", - "npm-normalize-package-bin": "^3.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/read-package-json-fast": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/read-package-json-fast/-/read-package-json-fast-3.0.2.tgz", - "integrity": "sha512-0J+Msgym3vrLOUB3hzQCuZHII0xkNGCtz/HJH9xZshwv9DbDwkw1KaE3gx/e2J5rpEY5rtOy6cyhKOPrkP7FZw==", - "dev": true, - "dependencies": { - "json-parse-even-better-errors": "^3.0.0", - "npm-normalize-package-bin": "^3.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/read-package-json/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "dev": true, - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/read-package-json/node_modules/glob": { - "version": "10.3.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.3.3.tgz", - "integrity": "sha512-92vPiMb/iqpmEgsOoIDvTjc50wf9CCCvMzsi6W0JLPeUKE8TWP1a73PgqSrqy7iAZxaSD1YdzU7QZR5LF51MJw==", - "dev": true, - "dependencies": { - "foreground-child": "^3.1.0", - "jackspeak": "^2.0.3", - "minimatch": "^9.0.1", - "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0", - "path-scurry": "^1.10.1" - }, - "bin": { - "glob": "dist/cjs/src/bin.js" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/read-package-json/node_modules/minimatch": { - "version": "9.0.3", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", - "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", - "dev": true, - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/read-package-json/node_modules/minipass": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.0.2.tgz", - "integrity": "sha512-eL79dXrE1q9dBbDCLg7xfn/vl7MS4F1gvJAgjJrQli/jbQWdUttuVawphqpffoIYfRdq78LHx6GP4bU/EQ2ATA==", - "dev": true, - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, - "node_modules/readable-stream": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/regexp.prototype.flags": { - "version": "1.4.3", - "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz", - "integrity": "sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "functions-have-names": "^1.2.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/regexpp": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz", - "integrity": "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==", - "dev": true, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/mysticatea" - } - }, - "node_modules/registry-auth-token": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-5.0.2.tgz", - "integrity": "sha512-o/3ikDxtXaA59BmZuZrJZDJv8NMDGSj+6j6XaeBmHw8eY1i1qd9+6H+LjVvQXx3HN6aRCGa1cUdJ9RaJZUugnQ==", - "dev": true, - "dependencies": { - "@pnpm/npm-conf": "^2.1.0" - }, - "engines": { - "node": ">=14" - } - }, - "node_modules/registry-url": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/registry-url/-/registry-url-6.0.1.tgz", - "integrity": "sha512-+crtS5QjFRqFCoQmvGduwYWEBng99ZvmFvF+cUJkGYF1L1BfU8C6Zp9T7f5vPAwyLkUExpvK+ANVZmGU49qi4Q==", - "dev": true, - "dependencies": { - "rc": "1.2.8" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/remote-git-tags": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/remote-git-tags/-/remote-git-tags-3.0.0.tgz", - "integrity": "sha512-C9hAO4eoEsX+OXA4rla66pXZQ+TLQ8T9dttgQj18yuKlPMTVkIkdYXvlMC55IuUsIkV6DpmQYi10JKFLaU+l7w==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/require-from-string": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", - "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/resolve": { - "version": "1.22.4", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.4.tgz", - "integrity": "sha512-PXNdCiPqDqeUou+w1C2eTQbNfxKSuMxqTCuvlmmMsk1NWHL5fRrhY6Pl0qEYYc6+QqGClco1Qj8XnjPego4wfg==", - "dev": true, - "dependencies": { - "is-core-module": "^2.13.0", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" - }, - "bin": { - "resolve": "bin/resolve" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/resolve-alpn": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/resolve-alpn/-/resolve-alpn-1.2.1.tgz", - "integrity": "sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g==", - "dev": true - }, - "node_modules/resolve-from": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", - "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/responselike": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/responselike/-/responselike-3.0.0.tgz", - "integrity": "sha512-40yHxbNcl2+rzXvZuVkrYohathsSJlMTXKryG5y8uciHv1+xDLHQpgjG64JUO9nrEq2jGLH6IZ8BcZyw3wrweg==", - "dev": true, - "dependencies": { - "lowercase-keys": "^3.0.0" - }, - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/retry": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz", - "integrity": "sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==", - "dev": true, - "engines": { - "node": ">= 4" - } - }, - "node_modules/reusify": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", - "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", - "dev": true, - "engines": { - "iojs": ">=1.0.0", - "node": ">=0.10.0" - } - }, - "node_modules/rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "dependencies": { - "glob": "^7.1.3" - }, - "bin": { - "rimraf": "bin.js" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/run-parallel": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", - "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "dependencies": { - "queue-microtask": "^1.2.2" - } - }, - "node_modules/safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, - "node_modules/safe-regex-test": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.0.tgz", - "integrity": "sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.3", - "is-regex": "^1.1.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/safer-buffer": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", - "optional": true - }, - "node_modules/semver": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", - "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", - "dependencies": { - "lru-cache": "^6.0.0" - }, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/semver-diff": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/semver-diff/-/semver-diff-4.0.0.tgz", - "integrity": "sha512-0Ju4+6A8iOnpL/Thra7dZsSlOHYAHIeMxfhWQRI1/VLcT3WDBZKKtQt/QkBOsiIN9ZpuvHE6cGZ0x4glCMmfiA==", - "dev": true, - "dependencies": { - "semver": "^7.3.5" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/semver-utils": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/semver-utils/-/semver-utils-1.1.4.tgz", - "integrity": "sha512-EjnoLE5OGmDAVV/8YDoN5KiajNadjzIp9BAHOhYeQHt7j0UWxjmgsx4YD48wp4Ue1Qogq38F1GNUJNqF1kKKxA==", - "dev": true - }, - "node_modules/set-blocking": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", - "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=" - }, - "node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dev": true, - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/side-channel": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", - "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.0", - "get-intrinsic": "^1.0.2", - "object-inspect": "^1.9.0" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/signal-exit": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", - "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==" - }, - "node_modules/sigstore": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-1.8.0.tgz", - "integrity": "sha512-ogU8qtQ3VFBawRJ8wjsBEX/vIFeHuGs1fm4jZtjWQwjo8pfAt7T/rh+udlAN4+QUe0IzA8qRSc/YZ7dHP6kh+w==", - "dev": true, - "dependencies": { - "@sigstore/bundle": "^1.0.0", - "@sigstore/protobuf-specs": "^0.2.0", - "@sigstore/tuf": "^1.0.3", - "make-fetch-happen": "^11.0.1" - }, - "bin": { - "sigstore": "bin/sigstore.js" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/simple-concat": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.1.tgz", - "integrity": "sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, - "node_modules/simple-get": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/simple-get/-/simple-get-3.1.1.tgz", - "integrity": "sha512-CQ5LTKGfCpvE1K0n2us+kuMPbk/q0EKl82s4aheV9oXjFEz6W/Y7oQFVJuU6QG77hRT4Ghb5RURteF5vnWjupA==", - "dependencies": { - "decompress-response": "^4.2.0", - "once": "^1.3.1", - "simple-concat": "^1.0.0" - } - }, - "node_modules/sisteransi": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", - "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==", - "dev": true - }, - "node_modules/slash": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", - "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/smart-buffer": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz", - "integrity": "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==", - "dev": true, - "engines": { - "node": ">= 6.0.0", - "npm": ">= 3.0.0" - } - }, - "node_modules/socks": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/socks/-/socks-2.7.1.tgz", - "integrity": "sha512-7maUZy1N7uo6+WVEX6psASxtNlKaNVMlGQKkG/63nEDdLOWNbiUMoLK7X4uYoLhQstau72mLgfEWcXcwsaHbYQ==", - "dev": true, - "dependencies": { - "ip": "^2.0.0", - "smart-buffer": "^4.2.0" - }, - "engines": { - "node": ">= 10.13.0", - "npm": ">= 3.0.0" - } - }, - "node_modules/socks-proxy-agent": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-7.0.0.tgz", - "integrity": "sha512-Fgl0YPZ902wEsAyiQ+idGd1A7rSFx/ayC1CQVMw5P+EQx2V0SgpGtf6OKFhVjPflPUl9YMmEOnmfjCdMUsygww==", - "dev": true, - "dependencies": { - "agent-base": "^6.0.2", - "debug": "^4.3.3", - "socks": "^2.6.2" - }, - "engines": { - "node": ">= 10" - } - }, - "node_modules/source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/source-map-support": { - "version": "0.5.21", - "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", - "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", - "dev": true, - "dependencies": { - "buffer-from": "^1.0.0", - "source-map": "^0.6.0" - } - }, - "node_modules/spawn-please": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/spawn-please/-/spawn-please-2.0.2.tgz", - "integrity": "sha512-KM8coezO6ISQ89c1BzyWNtcn2V2kAVtwIXd3cN/V5a0xPYc1F/vydrRc01wsKFEQ/p+V1a4sw4z2yMITIXrgGw==", - "dev": true, - "dependencies": { - "cross-spawn": "^7.0.3" - }, - "engines": { - "node": ">=14" - } - }, - "node_modules/spdx-correct": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.2.0.tgz", - "integrity": "sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA==", - "dev": true, - "dependencies": { - "spdx-expression-parse": "^3.0.0", - "spdx-license-ids": "^3.0.0" - } - }, - "node_modules/spdx-exceptions": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz", - "integrity": "sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A==", - "dev": true - }, - "node_modules/spdx-expression-parse": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", - "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", - "dev": true, - "dependencies": { - "spdx-exceptions": "^2.1.0", - "spdx-license-ids": "^3.0.0" - } - }, - "node_modules/spdx-license-ids": { - "version": "3.0.13", - "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.13.tgz", - "integrity": "sha512-XkD+zwiqXHikFZm4AX/7JSCXA98U5Db4AFd5XUg/+9UNtnH75+Z9KxtpYiJZx36mUDVOwH83pl7yvCer6ewM3w==", - "dev": true - }, - "node_modules/ssri": { - "version": "10.0.4", - "resolved": "https://registry.npmjs.org/ssri/-/ssri-10.0.4.tgz", - "integrity": "sha512-12+IR2CB2C28MMAw0Ncqwj5QbTcs0nGIhgJzYWzDkb21vWmfNI83KS4f3Ci6GI98WreIfG7o9UXp3C0qbpA8nQ==", - "dev": true, - "dependencies": { - "minipass": "^5.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/ssri/node_modules/minipass": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", - "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/string_decoder": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", - "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", - "dependencies": { - "safe-buffer": "~5.2.0" - } - }, - "node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/string-width-cjs": { - "name": "string-width", - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dev": true, - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/string.prototype.trim": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.7.tgz", - "integrity": "sha512-p6TmeT1T3411M8Cgg9wBTMRtY2q9+PNy9EV1i2lIXUN/btt763oIfxwN3RR8VU6wHX8j/1CFy0L+YuThm6bgOg==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/string.prototype.trimend": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.6.tgz", - "integrity": "sha512-JySq+4mrPf9EsDBEDYMOb/lM7XQLulwg5R/m1r0PXEFqrV0qHvl58sdTilSXtKOflCsK2E8jxf+GKC0T07RWwQ==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/string.prototype.trimstart": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.6.tgz", - "integrity": "sha512-omqjMDaY92pbn5HOX7f9IccLA+U1tA9GvtU4JrodiXFfYB7jPzzHpRzpglLAjtUV6bB557zwClJezTqnAiYnQA==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-ansi-cjs": { - "name": "strip-ansi", - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-bom": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", - "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/strip-json-comments": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", - "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", - "dev": true, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "dev": true, - "dependencies": { - "has-flag": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/supports-preserve-symlinks-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", - "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", - "dev": true, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/tar": { - "version": "6.1.11", - "resolved": "https://registry.npmjs.org/tar/-/tar-6.1.11.tgz", - "integrity": "sha512-an/KZQzQUkZCkuoAA64hM92X0Urb6VpRhAFllDzz44U2mcD5scmT3zBc4VgVpkugF580+DQn8eAFSyoQt0tznA==", - "dependencies": { - "chownr": "^2.0.0", - "fs-minipass": "^2.0.0", - "minipass": "^3.0.0", - "minizlib": "^2.1.1", - "mkdirp": "^1.0.3", - "yallist": "^4.0.0" - }, - "engines": { - "node": ">= 10" - } - }, - "node_modules/text-table": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", - "integrity": "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=", - "dev": true - }, - "node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, - "node_modules/tr46": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", - "integrity": "sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o=" - }, - "node_modules/tsconfig-paths": { - "version": "3.14.2", - "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.2.tgz", - "integrity": "sha512-o/9iXgCYc5L/JxCHPe3Hvh8Q/2xm5Z+p18PESBU6Ff33695QnCHBEjcytY2q19ua7Mbl/DavtBOLq+oG0RCL+g==", - "dev": true, - "dependencies": { - "@types/json5": "^0.0.29", - "json5": "^1.0.2", - "minimist": "^1.2.6", - "strip-bom": "^3.0.0" - } - }, - "node_modules/tslib": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.3.1.tgz", - "integrity": "sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw==" - }, - "node_modules/tuf-js": { - "version": "1.1.7", - "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-1.1.7.tgz", - "integrity": "sha512-i3P9Kgw3ytjELUfpuKVDNBJvk4u5bXL6gskv572mcevPbSKCV3zt3djhmlEQ65yERjIbOSncy7U4cQJaB1CBCg==", - "dev": true, - "dependencies": { - "@tufjs/models": "1.0.4", - "debug": "^4.3.4", - "make-fetch-happen": "^11.1.1" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/tunnel": { - "version": "0.0.6", - "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", - "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==", - "engines": { - "node": ">=0.6.11 <=0.7.0 || >=0.7.3" - } - }, - "node_modules/type-check": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", - "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", - "dev": true, - "dependencies": { - "prelude-ls": "^1.2.1" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/type-fest": { - "version": "0.20.2", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", - "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", - "dev": true, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/typed-array-length": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.4.tgz", - "integrity": "sha512-KjZypGq+I/H7HI5HlOoGHkWUUGq+Q0TPhQurLbyrVrvnKTBgzLhIJ7j6J/XTQOi0d1RjyZ0wdas8bKs2p0x3Ng==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "for-each": "^0.3.3", - "is-typed-array": "^1.1.9" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/typedarray-to-buffer": { - "version": "3.1.5", - "resolved": "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz", - "integrity": "sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==", - "dev": true, - "dependencies": { - "is-typedarray": "^1.0.0" - } - }, - "node_modules/unbox-primitive": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz", - "integrity": "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "has-bigints": "^1.0.2", - "has-symbols": "^1.0.3", - "which-boxed-primitive": "^1.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/undici": { - "version": "5.26.4", - "resolved": "https://registry.npmjs.org/undici/-/undici-5.26.4.tgz", - "integrity": "sha512-OG+QOf0fTLtazL9P9X7yqWxQ+Z0395Wk6DSkyTxtaq3wQEjIroVe7Y4asCX/vcCxYpNGMnwz8F0qbRYUoaQVMw==", - "dependencies": { - "@fastify/busboy": "^2.0.0" - }, - "engines": { - "node": ">=14.0" - } - }, - "node_modules/unique-filename": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-3.0.0.tgz", - "integrity": "sha512-afXhuC55wkAmZ0P18QsVE6kp8JaxrEokN2HGIoIVv2ijHQd419H0+6EigAFcIzXeMIkcIkNBpB3L/DXB3cTS/g==", - "dev": true, - "dependencies": { - "unique-slug": "^4.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/unique-slug": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-4.0.0.tgz", - "integrity": "sha512-WrcA6AyEfqDX5bWige/4NQfPZMtASNVxdmWR76WESYQVAACSgWcR6e9i0mofqqBxYFtL4oAxPIptY73/0YE1DQ==", - "dev": true, - "dependencies": { - "imurmurhash": "^0.1.4" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/unique-string": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/unique-string/-/unique-string-3.0.0.tgz", - "integrity": "sha512-VGXBUVwxKMBUznyffQweQABPRRW1vHZAbadFZud4pLFAqRGvv/96vafgjWFqzourzr8YonlQiPgH0YCJfawoGQ==", - "dev": true, - "dependencies": { - "crypto-random-string": "^4.0.0" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/universal-user-agent": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.0.tgz", - "integrity": "sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w==" - }, - "node_modules/untildify": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/untildify/-/untildify-4.0.0.tgz", - "integrity": "sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/update-notifier": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/update-notifier/-/update-notifier-6.0.2.tgz", - "integrity": "sha512-EDxhTEVPZZRLWYcJ4ZXjGFN0oP7qYvbXWzEgRm/Yql4dHX5wDbvh89YHP6PK1lzZJYrMtXUuZZz8XGK+U6U1og==", - "dev": true, - "dependencies": { - "boxen": "^7.0.0", - "chalk": "^5.0.1", - "configstore": "^6.0.0", - "has-yarn": "^3.0.0", - "import-lazy": "^4.0.0", - "is-ci": "^3.0.1", - "is-installed-globally": "^0.4.0", - "is-npm": "^6.0.0", - "is-yarn-global": "^0.4.0", - "latest-version": "^7.0.0", - "pupa": "^3.1.0", - "semver": "^7.3.7", - "semver-diff": "^4.0.0", - "xdg-basedir": "^5.1.0" - }, - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/yeoman/update-notifier?sponsor=1" - } - }, - "node_modules/update-notifier/node_modules/chalk": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.2.0.tgz", - "integrity": "sha512-ree3Gqw/nazQAPuJJEy+avdl7QfZMcUvmHIKgEZkGL+xOBzRvup5Hxo6LHuMceSxOabuJLJm5Yp/92R9eMmMvA==", - "dev": true, - "engines": { - "node": "^12.17.0 || ^14.13 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, - "node_modules/uri-js": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", - "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", - "dev": true, - "dependencies": { - "punycode": "^2.1.0" - } - }, - "node_modules/util-deprecate": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" - }, - "node_modules/uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", - "bin": { - "uuid": "dist/bin/uuid" - } - }, - "node_modules/validate-npm-package-license": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", - "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", - "dev": true, - "dependencies": { - "spdx-correct": "^3.0.0", - "spdx-expression-parse": "^3.0.0" - } - }, - "node_modules/validate-npm-package-name": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-5.0.0.tgz", - "integrity": "sha512-YuKoXDAhBYxY7SfOKxHBDoSyENFeW5VvIIQp2TGQuit8gpK6MnWaQelBKxso72DoxTZfZdcP3W90LqpSkgPzLQ==", - "dev": true, - "dependencies": { - "builtins": "^5.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/webidl-conversions": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", - "integrity": "sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE=" - }, - "node_modules/whatwg-url": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", - "integrity": "sha1-lmRU6HZUYuN2RNNib2dCzotwll0=", - "dependencies": { - "tr46": "~0.0.3", - "webidl-conversions": "^3.0.0" - } - }, - "node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/which-boxed-primitive": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", - "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", - "dev": true, - "dependencies": { - "is-bigint": "^1.0.1", - "is-boolean-object": "^1.1.0", - "is-number-object": "^1.0.4", - "is-string": "^1.0.5", - "is-symbol": "^1.0.3" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/which-typed-array": { - "version": "1.1.9", - "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.9.tgz", - "integrity": "sha512-w9c4xkx6mPidwp7180ckYWfMmvxpjlZuIudNtDf4N/tTAUB8VJbX25qZoAsrtGuYNnGw3pa0AXgbGKRB8/EceA==", - "dev": true, - "dependencies": { - "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.2", - "for-each": "^0.3.3", - "gopd": "^1.0.1", - "has-tostringtag": "^1.0.0", - "is-typed-array": "^1.1.10" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/wide-align": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz", - "integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==", - "dependencies": { - "string-width": "^1.0.2 || 2 || 3 || 4" - } - }, - "node_modules/widest-line": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/widest-line/-/widest-line-4.0.1.tgz", - "integrity": "sha512-o0cyEG0e8GPzT4iGHphIOh0cJOV8fivsXxddQasHPHfoZf1ZexrfeA21w2NaEN1RHE+fXlfISmOE8R9N3u3Qig==", - "dev": true, - "dependencies": { - "string-width": "^5.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/widest-line/node_modules/ansi-regex": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", - "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", - "dev": true, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-regex?sponsor=1" - } - }, - "node_modules/widest-line/node_modules/emoji-regex": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", - "dev": true - }, - "node_modules/widest-line/node_modules/string-width": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", - "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", - "dev": true, - "dependencies": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/widest-line/node_modules/strip-ansi": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.0.1.tgz", - "integrity": "sha512-cXNxvT8dFNRVfhVME3JAe98mkXDYN2O1l7jmcwMnOslDeESg1rF/OZMtK0nRAhiari1unG5cD4jG3rapUAkLbw==", - "dev": true, - "dependencies": { - "ansi-regex": "^6.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/strip-ansi?sponsor=1" - } - }, - "node_modules/wrap-ansi": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", - "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", - "dev": true, - "dependencies": { - "ansi-styles": "^6.1.0", - "string-width": "^5.0.1", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/wrap-ansi-cjs": { - "name": "wrap-ansi", - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "dev": true, - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/wrap-ansi/node_modules/ansi-regex": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", - "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", - "dev": true, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-regex?sponsor=1" - } - }, - "node_modules/wrap-ansi/node_modules/ansi-styles": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", - "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", - "dev": true, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/wrap-ansi/node_modules/emoji-regex": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", - "dev": true - }, - "node_modules/wrap-ansi/node_modules/string-width": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", - "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", - "dev": true, - "dependencies": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/wrap-ansi/node_modules/strip-ansi": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.0.1.tgz", - "integrity": "sha512-cXNxvT8dFNRVfhVME3JAe98mkXDYN2O1l7jmcwMnOslDeESg1rF/OZMtK0nRAhiari1unG5cD4jG3rapUAkLbw==", - "dev": true, - "dependencies": { - "ansi-regex": "^6.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/strip-ansi?sponsor=1" - } - }, - "node_modules/wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" - }, - "node_modules/write-file-atomic": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.3.tgz", - "integrity": "sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==", - "dev": true, - "dependencies": { - "imurmurhash": "^0.1.4", - "is-typedarray": "^1.0.0", - "signal-exit": "^3.0.2", - "typedarray-to-buffer": "^3.1.5" - } - }, - "node_modules/xdg-basedir": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-5.1.0.tgz", - "integrity": "sha512-GCPAHLvrIH13+c0SuacwvRYj2SxJXQ4kaVTT5xgL3kPrz56XxkF21IGhjSE1+W0aw7gpBWRGXLCPnPby6lSpmQ==", - "dev": true, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - }, - "node_modules/yocto-queue": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", - "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", - "dev": true, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - } - }, - "dependencies": { - "@aashutoshrathi/word-wrap": { - "version": "1.2.6", - "resolved": "https://registry.npmjs.org/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz", - "integrity": "sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA==", - "dev": true - }, - "@actions/core": { - "version": "1.10.1", - "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.1.tgz", - "integrity": "sha512-3lBR9EDAY+iYIpTnTIXmWcNbX3T2kCkAEQGIQx4NVQ0575nk2k3GRZDTPQG+vVtS2izSLmINlxXf0uLtnrTP+g==", - "requires": { - "@actions/http-client": "^2.0.1", - "uuid": "^8.3.2" - } - }, - "@actions/github": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/@actions/github/-/github-6.0.0.tgz", - "integrity": "sha512-alScpSVnYmjNEXboZjarjukQEzgCRmjMv6Xj47fsdnqGS73bjJNDpiiXmp8jr0UZLdUB6d9jW63IcmddUP+l0g==", - "requires": { - "@actions/http-client": "^2.2.0", - "@octokit/core": "^5.0.1", - "@octokit/plugin-paginate-rest": "^9.0.0", - "@octokit/plugin-rest-endpoint-methods": "^10.0.0" - } - }, - "@actions/http-client": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.0.tgz", - "integrity": "sha512-q+epW0trjVUUHboliPb4UF9g2msf+w61b32tAkFEwL/IwP0DQWgbCMM0Hbe3e3WXSKz5VcUXbzJQgy8Hkra/Lg==", - "requires": { - "tunnel": "^0.0.6", - "undici": "^5.25.4" - } - }, - "@colors/colors": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz", - "integrity": "sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==", - "dev": true, - "optional": true - }, - "@eslint-community/eslint-utils": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.2.0.tgz", - "integrity": "sha512-gB8T4H4DEfX2IV9zGDJPOBgP1e/DbfCPDTtEqUMckpvzS1OYtva8JdFYBqMwYk7xAQ429WGF/UPqn8uQ//h2vQ==", - "dev": true, - "requires": { - "eslint-visitor-keys": "^3.3.0" - } - }, - "@eslint-community/regexpp": { - "version": "4.8.0", - "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.8.0.tgz", - "integrity": "sha512-JylOEEzDiOryeUnFbQz+oViCXS0KsvR1mvHkoMiu5+UiBvy+RYX7tzlIIIEstF/gVa2tj9AQXk3dgnxv6KxhFg==", - "dev": true - }, - "@eslint/eslintrc": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.2.tgz", - "integrity": "sha512-+wvgpDsrB1YqAMdEUCcnTlpfVBH7Vqn6A/NT3D8WVXFIaKMlErPIZT3oCIAVCOtarRpMtelZLqJeU3t7WY6X6g==", - "dev": true, - "requires": { - "ajv": "^6.12.4", - "debug": "^4.3.2", - "espree": "^9.6.0", - "globals": "^13.19.0", - "ignore": "^5.2.0", - "import-fresh": "^3.2.1", - "js-yaml": "^4.1.0", - "minimatch": "^3.1.2", - "strip-json-comments": "^3.1.1" - } - }, - "@eslint/js": { - "version": "8.51.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.51.0.tgz", - "integrity": "sha512-HxjQ8Qn+4SI3/AFv6sOrDB+g6PpUTDwSJiQqOrnneEk8L71161srI9gjzzZvYVbzHiVg/BvcH95+cK/zfIt4pg==", - "dev": true - }, - "@fastify/busboy": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.0.0.tgz", - "integrity": "sha512-JUFJad5lv7jxj926GPgymrWQxxjPYuJNiNjNMzqT+HiuP6Vl3dk5xzG+8sTX96np0ZAluvaMzPsjhHZ5rNuNQQ==" - }, - "@humanwhocodes/config-array": { - "version": "0.11.11", - "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.11.tgz", - "integrity": "sha512-N2brEuAadi0CcdeMXUkhbZB84eskAc8MEX1By6qEchoVywSgXPIjou4rYsl0V3Hj0ZnuGycGCjdNgockbzeWNA==", - "dev": true, - "requires": { - "@humanwhocodes/object-schema": "^1.2.1", - "debug": "^4.1.1", - "minimatch": "^3.0.5" - } - }, - "@humanwhocodes/module-importer": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", - "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", - "dev": true - }, - "@humanwhocodes/object-schema": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz", - "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==", - "dev": true - }, - "@isaacs/cliui": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", - "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", - "dev": true, - "requires": { - "string-width": "^5.1.2", - "string-width-cjs": "npm:string-width@^4.2.0", - "strip-ansi": "^7.0.1", - "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", - "wrap-ansi": "^8.1.0", - "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" - }, - "dependencies": { - "ansi-regex": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", - "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", - "dev": true - }, - "emoji-regex": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", - "dev": true - }, - "string-width": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", - "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", - "dev": true, - "requires": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" - } - }, - "strip-ansi": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.0.1.tgz", - "integrity": "sha512-cXNxvT8dFNRVfhVME3JAe98mkXDYN2O1l7jmcwMnOslDeESg1rF/OZMtK0nRAhiari1unG5cD4jG3rapUAkLbw==", - "dev": true, - "requires": { - "ansi-regex": "^6.0.1" - } - } - } - }, - "@mapbox/node-pre-gyp": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.8.tgz", - "integrity": "sha512-CMGKi28CF+qlbXh26hDe6NxCd7amqeAzEqnS6IHeO6LoaKyM/n+Xw3HT1COdq8cuioOdlKdqn/hCmqPUOMOywg==", - "requires": { - "detect-libc": "^1.0.3", - "https-proxy-agent": "^5.0.0", - "make-dir": "^3.1.0", - "node-fetch": "^2.6.5", - "nopt": "^5.0.0", - "npmlog": "^5.0.1", - "rimraf": "^3.0.2", - "semver": "^7.3.5", - "tar": "^6.1.11" - } - }, - "@nodelib/fs.scandir": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", - "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", - "dev": true, - "requires": { - "@nodelib/fs.stat": "2.0.5", - "run-parallel": "^1.1.9" - } - }, - "@nodelib/fs.stat": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", - "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", - "dev": true - }, - "@nodelib/fs.walk": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", - "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", - "dev": true, - "requires": { - "@nodelib/fs.scandir": "2.1.5", - "fastq": "^1.6.0" - } - }, - "@npmcli/fs": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-3.1.0.tgz", - "integrity": "sha512-7kZUAaLscfgbwBQRbvdMYaZOWyMEcPTH/tJjnyAWJ/dvvs9Ef+CERx/qJb9GExJpl1qipaDGn7KqHnFGGixd0w==", - "dev": true, - "requires": { - "semver": "^7.3.5" - } - }, - "@npmcli/git": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-4.1.0.tgz", - "integrity": "sha512-9hwoB3gStVfa0N31ymBmrX+GuDGdVA/QWShZVqE0HK2Af+7QGGrCTbZia/SW0ImUTjTne7SP91qxDmtXvDHRPQ==", - "dev": true, - "requires": { - "@npmcli/promise-spawn": "^6.0.0", - "lru-cache": "^7.4.4", - "npm-pick-manifest": "^8.0.0", - "proc-log": "^3.0.0", - "promise-inflight": "^1.0.1", - "promise-retry": "^2.0.1", - "semver": "^7.3.5", - "which": "^3.0.0" - }, - "dependencies": { - "lru-cache": { - "version": "7.18.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", - "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", - "dev": true - }, - "which": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/which/-/which-3.0.1.tgz", - "integrity": "sha512-XA1b62dzQzLfaEOSQFTCOd5KFf/1VSzZo7/7TUjnya6u0vGGKzU96UQBZTAThCb2j4/xjBAyii1OhRLJEivHvg==", - "dev": true, - "requires": { - "isexe": "^2.0.0" - } - } - } - }, - "@npmcli/installed-package-contents": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/@npmcli/installed-package-contents/-/installed-package-contents-2.0.2.tgz", - "integrity": "sha512-xACzLPhnfD51GKvTOOuNX2/V4G4mz9/1I2MfDoye9kBM3RYe5g2YbscsaGoTlaWqkxeiapBWyseULVKpSVHtKQ==", - "dev": true, - "requires": { - "npm-bundled": "^3.0.0", - "npm-normalize-package-bin": "^3.0.0" - } - }, - "@npmcli/node-gyp": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@npmcli/node-gyp/-/node-gyp-3.0.0.tgz", - "integrity": "sha512-gp8pRXC2oOxu0DUE1/M3bYtb1b3/DbJ5aM113+XJBgfXdussRAsX0YOrOhdd8WvnAR6auDBvJomGAkLKA5ydxA==", - "dev": true - }, - "@npmcli/promise-spawn": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-6.0.2.tgz", - "integrity": "sha512-gGq0NJkIGSwdbUt4yhdF8ZrmkGKVz9vAdVzpOfnom+V8PLSmSOVhZwbNvZZS1EYcJN5hzzKBxmmVVAInM6HQLg==", - "dev": true, - "requires": { - "which": "^3.0.0" - }, - "dependencies": { - "which": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/which/-/which-3.0.1.tgz", - "integrity": "sha512-XA1b62dzQzLfaEOSQFTCOd5KFf/1VSzZo7/7TUjnya6u0vGGKzU96UQBZTAThCb2j4/xjBAyii1OhRLJEivHvg==", - "dev": true, - "requires": { - "isexe": "^2.0.0" - } - } - } - }, - "@npmcli/run-script": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-6.0.2.tgz", - "integrity": "sha512-NCcr1uQo1k5U+SYlnIrbAh3cxy+OQT1VtqiAbxdymSlptbzBb62AjH2xXgjNCoP073hoa1CfCAcwoZ8k96C4nA==", - "dev": true, - "requires": { - "@npmcli/node-gyp": "^3.0.0", - "@npmcli/promise-spawn": "^6.0.0", - "node-gyp": "^9.0.0", - "read-package-json-fast": "^3.0.0", - "which": "^3.0.0" - }, - "dependencies": { - "which": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/which/-/which-3.0.1.tgz", - "integrity": "sha512-XA1b62dzQzLfaEOSQFTCOd5KFf/1VSzZo7/7TUjnya6u0vGGKzU96UQBZTAThCb2j4/xjBAyii1OhRLJEivHvg==", - "dev": true, - "requires": { - "isexe": "^2.0.0" - } - } - } - }, - "@octokit/auth-token": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-4.0.0.tgz", - "integrity": "sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA==" - }, - "@octokit/core": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/@octokit/core/-/core-5.0.1.tgz", - "integrity": "sha512-lyeeeZyESFo+ffI801SaBKmCfsvarO+dgV8/0gD8u1d87clbEdWsP5yC+dSj3zLhb2eIf5SJrn6vDz9AheETHw==", - "requires": { - "@octokit/auth-token": "^4.0.0", - "@octokit/graphql": "^7.0.0", - "@octokit/request": "^8.0.2", - "@octokit/request-error": "^5.0.0", - "@octokit/types": "^12.0.0", - "before-after-hook": "^2.2.0", - "universal-user-agent": "^6.0.0" - } - }, - "@octokit/endpoint": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-9.0.1.tgz", - "integrity": "sha512-hRlOKAovtINHQPYHZlfyFwaM8OyetxeoC81lAkBy34uLb8exrZB50SQdeW3EROqiY9G9yxQTpp5OHTV54QD+vA==", - "requires": { - "@octokit/types": "^12.0.0", - "is-plain-object": "^5.0.0", - "universal-user-agent": "^6.0.0" - } - }, - "@octokit/graphql": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-7.0.2.tgz", - "integrity": "sha512-OJ2iGMtj5Tg3s6RaXH22cJcxXRi7Y3EBqbHTBRq+PQAqfaS8f/236fUrWhfSn8P4jovyzqucxme7/vWSSZBX2Q==", - "requires": { - "@octokit/request": "^8.0.1", - "@octokit/types": "^12.0.0", - "universal-user-agent": "^6.0.0" - } - }, - "@octokit/openapi-types": { - "version": "19.0.0", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-19.0.0.tgz", - "integrity": "sha512-PclQ6JGMTE9iUStpzMkwLCISFn/wDeRjkZFIKALpvJQNBGwDoYYi2fFvuHwssoQ1rXI5mfh6jgTgWuddeUzfWw==" - }, - "@octokit/plugin-paginate-rest": { - "version": "9.0.0", - "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-9.0.0.tgz", - "integrity": "sha512-oIJzCpttmBTlEhBmRvb+b9rlnGpmFgDtZ0bB6nq39qIod6A5DP+7RkVLMOixIgRCYSHDTeayWqmiJ2SZ6xgfdw==", - "requires": { - "@octokit/types": "^12.0.0" - } - }, - "@octokit/plugin-rest-endpoint-methods": { - "version": "10.0.1", - "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-10.0.1.tgz", - "integrity": "sha512-fgS6HPkPvJiz8CCliewLyym9qAx0RZ/LKh3sATaPfM41y/O2wQ4Z9MrdYeGPVh04wYmHFmWiGlKPC7jWVtZXQA==", - "requires": { - "@octokit/types": "^12.0.0" - } - }, - "@octokit/request": { - "version": "8.1.4", - "resolved": "https://registry.npmjs.org/@octokit/request/-/request-8.1.4.tgz", - "integrity": "sha512-M0aaFfpGPEKrg7XoA/gwgRvc9MSXHRO2Ioki1qrPDbl1e9YhjIwVoHE7HIKmv/m3idzldj//xBujcFNqGX6ENA==", - "requires": { - "@octokit/endpoint": "^9.0.0", - "@octokit/request-error": "^5.0.0", - "@octokit/types": "^12.0.0", - "is-plain-object": "^5.0.0", - "universal-user-agent": "^6.0.0" - } - }, - "@octokit/request-error": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.0.1.tgz", - "integrity": "sha512-X7pnyTMV7MgtGmiXBwmO6M5kIPrntOXdyKZLigNfQWSEQzVxR4a4vo49vJjTWX70mPndj8KhfT4Dx+2Ng3vnBQ==", - "requires": { - "@octokit/types": "^12.0.0", - "deprecation": "^2.0.0", - "once": "^1.4.0" - } - }, - "@octokit/types": { - "version": "12.0.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-12.0.0.tgz", - "integrity": "sha512-EzD434aHTFifGudYAygnFlS1Tl6KhbTynEWELQXIbTY8Msvb5nEqTZIm7sbPEt4mQYLZwu3zPKVdeIrw0g7ovg==", - "requires": { - "@octokit/openapi-types": "^19.0.0" - } - }, - "@pkgjs/parseargs": { - "version": "0.11.0", - "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", - "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", - "dev": true, - "optional": true - }, - "@pnpm/config.env-replace": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@pnpm/config.env-replace/-/config.env-replace-1.0.0.tgz", - "integrity": "sha512-ZVPVDi1E8oeXlYqkGRtX0CkzLTwE2zt62bjWaWKaAvI8NZqHzlMvGeSNDpW+JB3+aKanYb4UETJOF1/CxGPemA==", - "dev": true - }, - "@pnpm/network.ca-file": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@pnpm/network.ca-file/-/network.ca-file-1.0.2.tgz", - "integrity": "sha512-YcPQ8a0jwYU9bTdJDpXjMi7Brhkr1mXsXrUJvjqM2mQDgkRiz8jFaQGOdaLxgjtUfQgZhKy/O3cG/YwmgKaxLA==", - "dev": true, - "requires": { - "graceful-fs": "4.2.10" - }, - "dependencies": { - "graceful-fs": { - "version": "4.2.10", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", - "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==", - "dev": true - } - } - }, - "@pnpm/npm-conf": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@pnpm/npm-conf/-/npm-conf-2.1.0.tgz", - "integrity": "sha512-Oe6ntvgsMTE3hDIqy6sajqHF+MnzJrOF06qC2QSiUEybLL7cp6tjoKUa32gpd9+KPVl4QyMs3E3nsXrx/Vdnlw==", - "dev": true, - "requires": { - "@pnpm/config.env-replace": "^1.0.0", - "@pnpm/network.ca-file": "^1.0.1", - "config-chain": "^1.1.11" - } - }, - "@sigstore/bundle": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@sigstore/bundle/-/bundle-1.0.0.tgz", - "integrity": "sha512-yLvrWDOh6uMOUlFCTJIZEnwOT9Xte7NPXUqVexEKGSF5XtBAuSg5du0kn3dRR0p47a4ah10Y0mNt8+uyeQXrBQ==", - "dev": true, - "requires": { - "@sigstore/protobuf-specs": "^0.2.0" - } - }, - "@sigstore/protobuf-specs": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.2.0.tgz", - "integrity": "sha512-8ZhZKAVfXjIspDWwm3D3Kvj0ddbJ0HqDZ/pOs5cx88HpT8mVsotFrg7H1UMnXOuDHz6Zykwxn4mxG3QLuN+RUg==", - "dev": true - }, - "@sigstore/tuf": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-1.0.3.tgz", - "integrity": "sha512-2bRovzs0nJZFlCN3rXirE4gwxCn97JNjMmwpecqlbgV9WcxX7WRuIrgzx/X7Ib7MYRbyUTpBYE0s2x6AmZXnlg==", - "dev": true, - "requires": { - "@sigstore/protobuf-specs": "^0.2.0", - "tuf-js": "^1.1.7" - } - }, - "@sindresorhus/is": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-5.3.0.tgz", - "integrity": "sha512-CX6t4SYQ37lzxicAqsBtxA3OseeoVrh9cSJ5PFYam0GksYlupRfy1A+Q4aYD3zvcfECLc0zO2u+ZnR2UYKvCrw==", - "dev": true - }, - "@szmarczak/http-timer": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-5.0.1.tgz", - "integrity": "sha512-+PmQX0PiAYPMeVYe237LJAYvOMYW1j2rH5YROyS3b4CTVJum34HfRvKvAzozHAQG0TnHNdUfY9nCeUyRAs//cw==", - "dev": true, - "requires": { - "defer-to-connect": "^2.0.1" - } - }, - "@tootallnate/once": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", - "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==", - "dev": true - }, - "@tufjs/canonical-json": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@tufjs/canonical-json/-/canonical-json-1.0.0.tgz", - "integrity": "sha512-QTnf++uxunWvG2z3UFNzAoQPHxnSXOwtaI3iJ+AohhV+5vONuArPjJE7aPXPVXfXJsqrVbZBu9b81AJoSd09IQ==", - "dev": true - }, - "@tufjs/models": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@tufjs/models/-/models-1.0.4.tgz", - "integrity": "sha512-qaGV9ltJP0EO25YfFUPhxRVK0evXFIAGicsVXuRim4Ed9cjPxYhNnNJ49SFmbeLgtxpslIkX317IgpfcHPVj/A==", - "dev": true, - "requires": { - "@tufjs/canonical-json": "1.0.0", - "minimatch": "^9.0.0" - }, - "dependencies": { - "brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "dev": true, - "requires": { - "balanced-match": "^1.0.0" - } - }, - "minimatch": { - "version": "9.0.3", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", - "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", - "dev": true, - "requires": { - "brace-expansion": "^2.0.1" - } - } - } - }, - "@types/http-cache-semantics": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.0.1.tgz", - "integrity": "sha512-SZs7ekbP8CN0txVG2xVRH6EgKmEm31BOxA07vkFaETzZz1xh+cbt8BcI0slpymvwhx5dlFnQG2rTlPVQn+iRPQ==", - "dev": true - }, - "@types/json5": { - "version": "0.0.29", - "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz", - "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==", - "dev": true - }, - "abbrev": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", - "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==" - }, - "acorn": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.10.0.tgz", - "integrity": "sha512-F0SAmZ8iUtS//m8DmCTA0jlh6TDKkHQyK6xc6V4KDTyZKA9dnvX9/3sRTVQrWm79glUAZbnmmNcdYwUIHWVybw==", - "dev": true - }, - "acorn-jsx": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", - "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", - "dev": true, - "requires": {} - }, - "agent-base": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", - "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", - "requires": { - "debug": "4" - } - }, - "agentkeepalive": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-4.3.0.tgz", - "integrity": "sha512-7Epl1Blf4Sy37j4v9f9FjICCh4+KAQOyXgHEwlyBiAQLbhKdq/i2QQU3amQalS/wPhdPzDXPL5DMR5bkn+YeWg==", - "dev": true, - "requires": { - "debug": "^4.1.0", - "depd": "^2.0.0", - "humanize-ms": "^1.2.1" - } - }, - "aggregate-error": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", - "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", - "dev": true, - "requires": { - "clean-stack": "^2.0.0", - "indent-string": "^4.0.0" - } - }, - "ajv": { - "version": "6.12.6", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", - "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", - "dev": true, - "requires": { - "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" - } - }, - "ansi-align": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/ansi-align/-/ansi-align-3.0.1.tgz", - "integrity": "sha512-IOfwwBF5iczOjp/WeY4YxyjqAFMQoZufdQWDd19SEExbVLNXqvpzSJ/M7Za4/sCPmQ0+GRquoA7bGcINcxew6w==", - "dev": true, - "requires": { - "string-width": "^4.1.0" - } - }, - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - }, - "ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, - "requires": { - "color-convert": "^2.0.1" - } - }, - "aproba": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/aproba/-/aproba-2.0.0.tgz", - "integrity": "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==" - }, - "are-we-there-yet": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz", - "integrity": "sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==", - "requires": { - "delegates": "^1.0.0", - "readable-stream": "^3.6.0" - } - }, - "argparse": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", - "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", - "dev": true - }, - "array-buffer-byte-length": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.0.tgz", - "integrity": "sha512-LPuwb2P+NrQw3XhxGc36+XSvuBPopovXYTR9Ew++Du9Yb/bx5AzBfrIsBoj0EZUifjQU+sHL21sseZ3jerWO/A==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "is-array-buffer": "^3.0.1" - } - }, - "array-includes": { - "version": "3.1.6", - "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.6.tgz", - "integrity": "sha512-sgTbLvL6cNnw24FnbaDyjmvddQ2ML8arZsgaJhoABMoplz/4QRhtrYS+alr1BUM1Bwp6dhx8vVCBSLG+StwOFw==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", - "get-intrinsic": "^1.1.3", - "is-string": "^1.0.7" - } - }, - "array-union": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", - "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", - "dev": true - }, - "array.prototype.findlastindex": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.2.tgz", - "integrity": "sha512-tb5thFFlUcp7NdNF6/MpDk/1r/4awWG1FIz3YqDf+/zJSTezBb+/5WViH41obXULHVpDzoiCLpJ/ZO9YbJMsdw==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", - "es-shim-unscopables": "^1.0.0", - "get-intrinsic": "^1.1.3" - } - }, - "array.prototype.flat": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.1.tgz", - "integrity": "sha512-roTU0KWIOmJ4DRLmwKd19Otg0/mT3qPNt0Qb3GWW8iObuZXxrjB/pzn0R3hqpRSWg4HCwqx+0vwOnWnvlOyeIA==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", - "es-shim-unscopables": "^1.0.0" - } - }, - "array.prototype.flatmap": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.1.tgz", - "integrity": "sha512-8UGn9O1FDVvMNB0UlLv4voxRMze7+FpHyF5mSMRjWHUMlpoDViniy05870VlxhfgTnLbpuwTzvD76MTtWxB/mQ==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", - "es-shim-unscopables": "^1.0.0" - } - }, - "available-typed-arrays": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz", - "integrity": "sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==", - "dev": true - }, - "balanced-match": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" - }, - "before-after-hook": { - "version": "2.2.3", - "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz", - "integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==" - }, - "boxen": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/boxen/-/boxen-7.0.2.tgz", - "integrity": "sha512-1Z4UJabXUP1/R9rLpoU3O2lEMnG3pPLAs/ZD2lF3t2q7qD5lM8rqbtnvtvm4N0wEyNlE+9yZVTVAGmd1V5jabg==", - "dev": true, - "requires": { - "ansi-align": "^3.0.1", - "camelcase": "^7.0.0", - "chalk": "^5.0.1", - "cli-boxes": "^3.0.0", - "string-width": "^5.1.2", - "type-fest": "^2.13.0", - "widest-line": "^4.0.1", - "wrap-ansi": "^8.0.1" - }, - "dependencies": { - "ansi-regex": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", - "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", - "dev": true - }, - "chalk": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.2.0.tgz", - "integrity": "sha512-ree3Gqw/nazQAPuJJEy+avdl7QfZMcUvmHIKgEZkGL+xOBzRvup5Hxo6LHuMceSxOabuJLJm5Yp/92R9eMmMvA==", - "dev": true - }, - "emoji-regex": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", - "dev": true - }, - "string-width": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", - "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", - "dev": true, - "requires": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" - } - }, - "strip-ansi": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.0.1.tgz", - "integrity": "sha512-cXNxvT8dFNRVfhVME3JAe98mkXDYN2O1l7jmcwMnOslDeESg1rF/OZMtK0nRAhiari1unG5cD4jG3rapUAkLbw==", - "dev": true, - "requires": { - "ansi-regex": "^6.0.1" - } - }, - "type-fest": { - "version": "2.19.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.19.0.tgz", - "integrity": "sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA==", - "dev": true - } - } - }, - "brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "requires": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dev": true, - "requires": { - "fill-range": "^7.0.1" - } - }, - "buffer-from": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", - "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", - "dev": true - }, - "builtins": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/builtins/-/builtins-5.0.1.tgz", - "integrity": "sha512-qwVpFEHNfhYJIzNRBvd2C1kyo6jz3ZSMPyyuR47OPdiKWlbYnZNyDWuyR175qDnAJLiCo5fBBqPb3RiXgWlkOQ==", - "dev": true, - "requires": { - "semver": "^7.0.0" - } - }, - "cacache": { - "version": "17.1.3", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-17.1.3.tgz", - "integrity": "sha512-jAdjGxmPxZh0IipMdR7fK/4sDSrHMLUV0+GvVUsjwyGNKHsh79kW/otg+GkbXwl6Uzvy9wsvHOX4nUoWldeZMg==", - "dev": true, - "requires": { - "@npmcli/fs": "^3.1.0", - "fs-minipass": "^3.0.0", - "glob": "^10.2.2", - "lru-cache": "^7.7.1", - "minipass": "^5.0.0", - "minipass-collect": "^1.0.2", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "p-map": "^4.0.0", - "ssri": "^10.0.0", - "tar": "^6.1.11", - "unique-filename": "^3.0.0" - }, - "dependencies": { - "brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "dev": true, - "requires": { - "balanced-match": "^1.0.0" - } - }, - "fs-minipass": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-3.0.2.tgz", - "integrity": "sha512-2GAfyfoaCDRrM6jaOS3UsBts8yJ55VioXdWcOL7dK9zdAuKT71+WBA4ifnNYqVjYv+4SsPxjK0JT4yIIn4cA/g==", - "dev": true, - "requires": { - "minipass": "^5.0.0" - } - }, - "glob": { - "version": "10.3.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.3.3.tgz", - "integrity": "sha512-92vPiMb/iqpmEgsOoIDvTjc50wf9CCCvMzsi6W0JLPeUKE8TWP1a73PgqSrqy7iAZxaSD1YdzU7QZR5LF51MJw==", - "dev": true, - "requires": { - "foreground-child": "^3.1.0", - "jackspeak": "^2.0.3", - "minimatch": "^9.0.1", - "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0", - "path-scurry": "^1.10.1" - } - }, - "lru-cache": { - "version": "7.18.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", - "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", - "dev": true - }, - "minimatch": { - "version": "9.0.3", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", - "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", - "dev": true, - "requires": { - "brace-expansion": "^2.0.1" - } - }, - "minipass": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", - "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", - "dev": true - } - } - }, - "cacheable-lookup": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/cacheable-lookup/-/cacheable-lookup-7.0.0.tgz", - "integrity": "sha512-+qJyx4xiKra8mZrcwhjMRMUhD5NR1R8esPkzIYxX96JiecFoxAXFuz/GpR3+ev4PE1WamHip78wV0vcmPQtp8w==", - "dev": true - }, - "cacheable-request": { - "version": "10.2.8", - "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-10.2.8.tgz", - "integrity": "sha512-IDVO5MJ4LItE6HKFQTqT2ocAQsisOoCTUDu1ddCmnhyiwFQjXNPp4081Xj23N4tO+AFEFNzGuNEf/c8Gwwt15A==", - "dev": true, - "requires": { - "@types/http-cache-semantics": "^4.0.1", - "get-stream": "^6.0.1", - "http-cache-semantics": "^4.1.1", - "keyv": "^4.5.2", - "mimic-response": "^4.0.0", - "normalize-url": "^8.0.0", - "responselike": "^3.0.0" - }, - "dependencies": { - "mimic-response": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-4.0.0.tgz", - "integrity": "sha512-e5ISH9xMYU0DzrT+jl8q2ze9D6eWBto+I8CNpe+VI+K2J/F/k3PdkdTdz4wvGVH4NTpo+NRYTVIuMQEMMcsLqg==", - "dev": true - } - } - }, - "call-bind": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", - "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", - "dev": true, - "requires": { - "function-bind": "^1.1.1", - "get-intrinsic": "^1.0.2" - } - }, - "callsites": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", - "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", - "dev": true - }, - "camelcase": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-7.0.1.tgz", - "integrity": "sha512-xlx1yCK2Oc1APsPXDL2LdlNP6+uu8OCDdhOBSVT279M/S+y75O30C2VuD8T2ogdePBBl7PfPF4504tnLgX3zfw==", - "dev": true - }, - "canvas": { - "version": "2.9.1", - "resolved": "https://registry.npmjs.org/canvas/-/canvas-2.9.1.tgz", - "integrity": "sha512-vSQti1uG/2gjv3x6QLOZw7TctfufaerTWbVe+NSduHxxLGB+qf3kFgQ6n66DSnuoINtVUjrLLIK2R+lxrBG07A==", - "requires": { - "@mapbox/node-pre-gyp": "^1.0.0", - "nan": "^2.15.0", - "simple-get": "^3.0.3" - } - }, - "chalk": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", - "dev": true, - "requires": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - } - }, - "chart.js": { - "version": "3.5.1", - "resolved": "https://registry.npmjs.org/chart.js/-/chart.js-3.5.1.tgz", - "integrity": "sha512-m5kzt72I1WQ9LILwQC4syla/LD/N413RYv2Dx2nnTkRS9iv/ey1xLTt0DnPc/eWV4zI+BgEgDYBIzbQhZHc/PQ==" - }, - "chartjs-node-canvas": { - "version": "4.1.6", - "resolved": "https://registry.npmjs.org/chartjs-node-canvas/-/chartjs-node-canvas-4.1.6.tgz", - "integrity": "sha512-UQJbPWrvqB/FoLclGA9BaLQmZbzSYlujF4w8NZd6Xzb+sqgACBb2owDX6m7ifCXLjUW5Nz0Qx0qqrTtQkkSoYw==", - "requires": { - "canvas": "^2.8.0", - "tslib": "^2.3.1" - } - }, - "chownr": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", - "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==" - }, - "ci-info": { - "version": "3.8.0", - "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.8.0.tgz", - "integrity": "sha512-eXTggHWSooYhq49F2opQhuHWgzucfF2YgODK4e1566GQs5BIfP30B0oenwBJHfWxAs2fyPB1s7Mg949zLf61Yw==", - "dev": true - }, - "clean-stack": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", - "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", - "dev": true - }, - "cli-boxes": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/cli-boxes/-/cli-boxes-3.0.0.tgz", - "integrity": "sha512-/lzGpEWL/8PfI0BmBOPRwp0c/wFNX1RdUML3jK/RcSBA9T8mZDdQpqYBKtCFTOfQbwPqWEOpjqW+Fnayc0969g==", - "dev": true - }, - "cli-table3": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/cli-table3/-/cli-table3-0.6.3.tgz", - "integrity": "sha512-w5Jac5SykAeZJKntOxJCrm63Eg5/4dhMWIcuTbo9rpE+brgaSZo0RuNJZeOyMgsUdhDeojvgyQLmjI+K50ZGyg==", - "dev": true, - "requires": { - "@colors/colors": "1.5.0", - "string-width": "^4.2.0" - } - }, - "color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, - "requires": { - "color-name": "~1.1.4" - } - }, - "color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true - }, - "color-support": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", - "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==" - }, - "commander": { - "version": "10.0.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-10.0.1.tgz", - "integrity": "sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==", - "dev": true - }, - "concat-map": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" - }, - "config-chain": { - "version": "1.1.13", - "resolved": "https://registry.npmjs.org/config-chain/-/config-chain-1.1.13.tgz", - "integrity": "sha512-qj+f8APARXHrM0hraqXYb2/bOVSV4PvJQlNZ/DVj0QrmNM2q2euizkeuVckQ57J+W0mRH6Hvi+k50M4Jul2VRQ==", - "dev": true, - "requires": { - "ini": "^1.3.4", - "proto-list": "~1.2.1" - }, - "dependencies": { - "ini": { - "version": "1.3.8", - "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", - "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", - "dev": true - } - } - }, - "configstore": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/configstore/-/configstore-6.0.0.tgz", - "integrity": "sha512-cD31W1v3GqUlQvbBCGcXmd2Nj9SvLDOP1oQ0YFuLETufzSPaKp11rYBsSOm7rCsW3OnIRAFM3OxRhceaXNYHkA==", - "dev": true, - "requires": { - "dot-prop": "^6.0.1", - "graceful-fs": "^4.2.6", - "unique-string": "^3.0.0", - "write-file-atomic": "^3.0.3", - "xdg-basedir": "^5.0.1" - } - }, - "console-control-strings": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", - "integrity": "sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4=" - }, - "cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dev": true, - "requires": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - } - }, - "crypto-random-string": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-4.0.0.tgz", - "integrity": "sha512-x8dy3RnvYdlUcPOjkEHqozhiwzKNSq7GcPuXFbnyMOCHxX8V3OgIg/pYuabl2sbUPfIJaeAQB7PMOK8DFIdoRA==", - "dev": true, - "requires": { - "type-fest": "^1.0.1" - }, - "dependencies": { - "type-fest": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-1.4.0.tgz", - "integrity": "sha512-yGSza74xk0UG8k+pLh5oeoYirvIiWo5t0/o3zHHAO2tRDiZcxWP7fywNlXhqb6/r6sWvwi+RsyQMWhVLe4BVuA==", - "dev": true - } - } - }, - "debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", - "requires": { - "ms": "2.1.2" - } - }, - "decompress-response": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-4.2.1.tgz", - "integrity": "sha512-jOSne2qbyE+/r8G1VU+G/82LBs2Fs4LAsTiLSHOCOMZQl2OKZ6i8i4IyHemTe+/yIXOtTcRQMzPcgyhoFlqPkw==", - "requires": { - "mimic-response": "^2.0.0" - } - }, - "deep-extend": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", - "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==", - "dev": true - }, - "deep-is": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", - "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", - "dev": true - }, - "defer-to-connect": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-2.0.1.tgz", - "integrity": "sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==", - "dev": true - }, - "define-properties": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.0.tgz", - "integrity": "sha512-xvqAVKGfT1+UAvPwKTVw/njhdQ8ZhXK4lI0bCIuCMrp2up9nPnaDftrLtmpTazqd1o+UY4zgzU+avtMbDP+ldA==", - "dev": true, - "requires": { - "has-property-descriptors": "^1.0.0", - "object-keys": "^1.1.1" - } - }, - "delegates": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", - "integrity": "sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o=" - }, - "depd": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", - "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", - "dev": true - }, - "deprecation": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", - "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" - }, - "detect-libc": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz", - "integrity": "sha1-+hN8S9aY7fVc1c0CrFWfkaTEups=" - }, - "dir-glob": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", - "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", - "dev": true, - "requires": { - "path-type": "^4.0.0" - } - }, - "doctrine": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", - "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", - "dev": true, - "requires": { - "esutils": "^2.0.2" - } - }, - "dot-prop": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-6.0.1.tgz", - "integrity": "sha512-tE7ztYzXHIeyvc7N+hR3oi7FIbf/NIjVP9hmAt3yMXzrQ072/fpjGLx2GxNxGxUl5V73MEqYzioOMoVhGMJ5cA==", - "dev": true, - "requires": { - "is-obj": "^2.0.0" - } - }, - "eastasianwidth": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", - "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", - "dev": true - }, - "emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" - }, - "encoding": { - "version": "0.1.13", - "resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.13.tgz", - "integrity": "sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==", - "optional": true, - "requires": { - "iconv-lite": "^0.6.2" - } - }, - "env-paths": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.1.tgz", - "integrity": "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==", - "dev": true - }, - "err-code": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/err-code/-/err-code-2.0.3.tgz", - "integrity": "sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==", - "dev": true - }, - "es-abstract": { - "version": "1.21.2", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.21.2.tgz", - "integrity": "sha512-y/B5POM2iBnIxCiernH1G7rC9qQoM77lLIMQLuob0zhp8C56Po81+2Nj0WFKnd0pNReDTnkYryc+zhOzpEIROg==", - "dev": true, - "requires": { - "array-buffer-byte-length": "^1.0.0", - "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.2", - "es-set-tostringtag": "^2.0.1", - "es-to-primitive": "^1.2.1", - "function.prototype.name": "^1.1.5", - "get-intrinsic": "^1.2.0", - "get-symbol-description": "^1.0.0", - "globalthis": "^1.0.3", - "gopd": "^1.0.1", - "has": "^1.0.3", - "has-property-descriptors": "^1.0.0", - "has-proto": "^1.0.1", - "has-symbols": "^1.0.3", - "internal-slot": "^1.0.5", - "is-array-buffer": "^3.0.2", - "is-callable": "^1.2.7", - "is-negative-zero": "^2.0.2", - "is-regex": "^1.1.4", - "is-shared-array-buffer": "^1.0.2", - "is-string": "^1.0.7", - "is-typed-array": "^1.1.10", - "is-weakref": "^1.0.2", - "object-inspect": "^1.12.3", - "object-keys": "^1.1.1", - "object.assign": "^4.1.4", - "regexp.prototype.flags": "^1.4.3", - "safe-regex-test": "^1.0.0", - "string.prototype.trim": "^1.2.7", - "string.prototype.trimend": "^1.0.6", - "string.prototype.trimstart": "^1.0.6", - "typed-array-length": "^1.0.4", - "unbox-primitive": "^1.0.2", - "which-typed-array": "^1.1.9" - } - }, - "es-set-tostringtag": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.1.tgz", - "integrity": "sha512-g3OMbtlwY3QewlqAiMLI47KywjWZoEytKr8pf6iTC8uJq5bIAH52Z9pnQ8pVL6whrCto53JZDuUIsifGeLorTg==", - "dev": true, - "requires": { - "get-intrinsic": "^1.1.3", - "has": "^1.0.3", - "has-tostringtag": "^1.0.0" - } - }, - "es-shim-unscopables": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz", - "integrity": "sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w==", - "dev": true, - "requires": { - "has": "^1.0.3" - } - }, - "es-to-primitive": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", - "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", - "dev": true, - "requires": { - "is-callable": "^1.1.4", - "is-date-object": "^1.0.1", - "is-symbol": "^1.0.2" - } - }, - "escape-goat": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/escape-goat/-/escape-goat-4.0.0.tgz", - "integrity": "sha512-2Sd4ShcWxbx6OY1IHyla/CVNwvg7XwZVoXZHcSu9w9SReNP1EzzD5T8NWKIR38fIqEns9kDWKUQTXXAmlDrdPg==", - "dev": true - }, - "escape-string-regexp": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", - "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", - "dev": true - }, - "eslint": { - "version": "8.51.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.51.0.tgz", - "integrity": "sha512-2WuxRZBrlwnXi+/vFSJyjMqrNjtJqiasMzehF0shoLaW7DzS3/9Yvrmq5JiT66+pNjiX4UBnLDiKHcWAr/OInA==", - "dev": true, - "requires": { - "@eslint-community/eslint-utils": "^4.2.0", - "@eslint-community/regexpp": "^4.6.1", - "@eslint/eslintrc": "^2.1.2", - "@eslint/js": "8.51.0", - "@humanwhocodes/config-array": "^0.11.11", - "@humanwhocodes/module-importer": "^1.0.1", - "@nodelib/fs.walk": "^1.2.8", - "ajv": "^6.12.4", - "chalk": "^4.0.0", - "cross-spawn": "^7.0.2", - "debug": "^4.3.2", - "doctrine": "^3.0.0", - "escape-string-regexp": "^4.0.0", - "eslint-scope": "^7.2.2", - "eslint-visitor-keys": "^3.4.3", - "espree": "^9.6.1", - "esquery": "^1.4.2", - "esutils": "^2.0.2", - "fast-deep-equal": "^3.1.3", - "file-entry-cache": "^6.0.1", - "find-up": "^5.0.0", - "glob-parent": "^6.0.2", - "globals": "^13.19.0", - "graphemer": "^1.4.0", - "ignore": "^5.2.0", - "imurmurhash": "^0.1.4", - "is-glob": "^4.0.0", - "is-path-inside": "^3.0.3", - "js-yaml": "^4.1.0", - "json-stable-stringify-without-jsonify": "^1.0.1", - "levn": "^0.4.1", - "lodash.merge": "^4.6.2", - "minimatch": "^3.1.2", - "natural-compare": "^1.4.0", - "optionator": "^0.9.3", - "strip-ansi": "^6.0.1", - "text-table": "^0.2.0" - }, - "dependencies": { - "glob-parent": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", - "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", - "dev": true, - "requires": { - "is-glob": "^4.0.3" - } - } - } - }, - "eslint-config-standard": { - "version": "17.1.0", - "resolved": "https://registry.npmjs.org/eslint-config-standard/-/eslint-config-standard-17.1.0.tgz", - "integrity": "sha512-IwHwmaBNtDK4zDHQukFDW5u/aTb8+meQWZvNFWkiGmbWjD6bqyuSSBxxXKkCftCUzc1zwCH2m/baCNDLGmuO5Q==", - "dev": true, - "requires": {} - }, - "eslint-import-resolver-node": { - "version": "0.3.7", - "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.7.tgz", - "integrity": "sha512-gozW2blMLJCeFpBwugLTGyvVjNoeo1knonXAcatC6bjPBZitotxdWf7Gimr25N4c0AAOo4eOUfaG82IJPDpqCA==", - "dev": true, - "requires": { - "debug": "^3.2.7", - "is-core-module": "^2.11.0", - "resolve": "^1.22.1" - }, - "dependencies": { - "debug": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", - "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", - "dev": true, - "requires": { - "ms": "^2.1.1" - } - } - } - }, - "eslint-module-utils": { - "version": "2.8.0", - "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.8.0.tgz", - "integrity": "sha512-aWajIYfsqCKRDgUfjEXNN/JlrzauMuSEy5sbd7WXbtW3EH6A6MpwEh42c7qD+MqQo9QMJ6fWLAeIJynx0g6OAw==", - "dev": true, - "requires": { - "debug": "^3.2.7" - }, - "dependencies": { - "debug": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", - "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", - "dev": true, - "requires": { - "ms": "^2.1.1" - } - } - } - }, - "eslint-plugin-es": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-es/-/eslint-plugin-es-3.0.1.tgz", - "integrity": "sha512-GUmAsJaN4Fc7Gbtl8uOBlayo2DqhwWvEzykMHSCZHU3XdJ+NSzzZcVhXh3VxX5icqQ+oQdIEawXX8xkR3mIFmQ==", - "dev": true, - "requires": { - "eslint-utils": "^2.0.0", - "regexpp": "^3.0.0" - } - }, - "eslint-plugin-import": { - "version": "2.28.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.28.1.tgz", - "integrity": "sha512-9I9hFlITvOV55alzoKBI+K9q74kv0iKMeY6av5+umsNwayt59fz692daGyjR+oStBQgx6nwR9rXldDev3Clw+A==", - "dev": true, - "requires": { - "array-includes": "^3.1.6", - "array.prototype.findlastindex": "^1.2.2", - "array.prototype.flat": "^1.3.1", - "array.prototype.flatmap": "^1.3.1", - "debug": "^3.2.7", - "doctrine": "^2.1.0", - "eslint-import-resolver-node": "^0.3.7", - "eslint-module-utils": "^2.8.0", - "has": "^1.0.3", - "is-core-module": "^2.13.0", - "is-glob": "^4.0.3", - "minimatch": "^3.1.2", - "object.fromentries": "^2.0.6", - "object.groupby": "^1.0.0", - "object.values": "^1.1.6", - "semver": "^6.3.1", - "tsconfig-paths": "^3.14.2" - }, - "dependencies": { - "debug": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", - "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", - "dev": true, - "requires": { - "ms": "^2.1.1" - } - }, - "doctrine": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", - "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", - "dev": true, - "requires": { - "esutils": "^2.0.2" - } - }, - "semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "dev": true - } - } - }, - "eslint-plugin-n": { - "version": "15.6.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-n/-/eslint-plugin-n-15.6.1.tgz", - "integrity": "sha512-R9xw9OtCRxxaxaszTQmQAlPgM+RdGjaL1akWuY/Fv9fRAi8Wj4CUKc6iYVG8QNRjRuo8/BqVYIpfqberJUEacA==", - "dev": true, - "peer": true, - "requires": { - "builtins": "^5.0.1", - "eslint-plugin-es": "^4.1.0", - "eslint-utils": "^3.0.0", - "ignore": "^5.1.1", - "is-core-module": "^2.11.0", - "minimatch": "^3.1.2", - "resolve": "^1.22.1", - "semver": "^7.3.8" - }, - "dependencies": { - "eslint-plugin-es": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-es/-/eslint-plugin-es-4.1.0.tgz", - "integrity": "sha512-GILhQTnjYE2WorX5Jyi5i4dz5ALWxBIdQECVQavL6s7cI76IZTDWleTHkxz/QT3kvcs2QlGHvKLYsSlPOlPXnQ==", - "dev": true, - "peer": true, - "requires": { - "eslint-utils": "^2.0.0", - "regexpp": "^3.0.0" - }, - "dependencies": { - "eslint-utils": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz", - "integrity": "sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==", - "dev": true, - "peer": true, - "requires": { - "eslint-visitor-keys": "^1.1.0" - } - }, - "eslint-visitor-keys": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", - "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", - "dev": true, - "peer": true - } - } - }, - "eslint-utils": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-3.0.0.tgz", - "integrity": "sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA==", - "dev": true, - "peer": true, - "requires": { - "eslint-visitor-keys": "^2.0.0" - } - }, - "eslint-visitor-keys": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz", - "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==", - "dev": true, - "peer": true - } - } - }, - "eslint-plugin-node": { - "version": "11.1.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-node/-/eslint-plugin-node-11.1.0.tgz", - "integrity": "sha512-oUwtPJ1W0SKD0Tr+wqu92c5xuCeQqB3hSCHasn/ZgjFdA9iDGNkNf2Zi9ztY7X+hNuMib23LNGRm6+uN+KLE3g==", - "dev": true, - "requires": { - "eslint-plugin-es": "^3.0.0", - "eslint-utils": "^2.0.0", - "ignore": "^5.1.1", - "minimatch": "^3.0.4", - "resolve": "^1.10.1", - "semver": "^6.1.0" - }, - "dependencies": { - "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", - "dev": true - } - } - }, - "eslint-plugin-promise": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-6.1.1.tgz", - "integrity": "sha512-tjqWDwVZQo7UIPMeDReOpUgHCmCiH+ePnVT+5zVapL0uuHnegBUs2smM13CzOs2Xb5+MHMRFTs9v24yjba4Oig==", - "dev": true, - "requires": {} - }, - "eslint-scope": { - "version": "7.2.2", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", - "integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==", - "dev": true, - "requires": { - "esrecurse": "^4.3.0", - "estraverse": "^5.2.0" - } - }, - "eslint-utils": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz", - "integrity": "sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==", - "dev": true, - "requires": { - "eslint-visitor-keys": "^1.1.0" - }, - "dependencies": { - "eslint-visitor-keys": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", - "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", - "dev": true - } - } - }, - "eslint-visitor-keys": { - "version": "3.4.3", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", - "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", - "dev": true - }, - "espree": { - "version": "9.6.1", - "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz", - "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==", - "dev": true, - "requires": { - "acorn": "^8.9.0", - "acorn-jsx": "^5.3.2", - "eslint-visitor-keys": "^3.4.1" - } - }, - "esquery": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz", - "integrity": "sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==", - "dev": true, - "requires": { - "estraverse": "^5.1.0" - } - }, - "esrecurse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", - "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", - "dev": true, - "requires": { - "estraverse": "^5.2.0" - } - }, - "estraverse": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", - "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", - "dev": true - }, - "esutils": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", - "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", - "dev": true - }, - "exponential-backoff": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/exponential-backoff/-/exponential-backoff-3.1.1.tgz", - "integrity": "sha512-dX7e/LHVJ6W3DE1MHWi9S1EYzDESENfLrYohG2G++ovZrYOkm4Knwa0mc1cn84xJOR4KEU0WSchhLbd0UklbHw==", - "dev": true - }, - "fast-deep-equal": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", - "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", - "dev": true - }, - "fast-glob": { - "version": "3.2.12", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.12.tgz", - "integrity": "sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==", - "dev": true, - "requires": { - "@nodelib/fs.stat": "^2.0.2", - "@nodelib/fs.walk": "^1.2.3", - "glob-parent": "^5.1.2", - "merge2": "^1.3.0", - "micromatch": "^4.0.4" - } - }, - "fast-json-stable-stringify": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", - "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", - "dev": true - }, - "fast-levenshtein": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", - "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", - "dev": true - }, - "fast-memoize": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/fast-memoize/-/fast-memoize-2.5.2.tgz", - "integrity": "sha512-Ue0LwpDYErFbmNnZSF0UH6eImUwDmogUO1jyE+JbN2gsQz/jICm1Ve7t9QT0rNSsfJt+Hs4/S3GnsDVjL4HVrw==", - "dev": true - }, - "fastq": { - "version": "1.15.0", - "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.15.0.tgz", - "integrity": "sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw==", - "dev": true, - "requires": { - "reusify": "^1.0.4" - } - }, - "file-entry-cache": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", - "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", - "dev": true, - "requires": { - "flat-cache": "^3.0.4" - } - }, - "fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dev": true, - "requires": { - "to-regex-range": "^5.0.1" - } - }, - "find-up": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", - "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", - "dev": true, - "requires": { - "locate-path": "^6.0.0", - "path-exists": "^4.0.0" - } - }, - "flat-cache": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz", - "integrity": "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==", - "dev": true, - "requires": { - "flatted": "^3.1.0", - "rimraf": "^3.0.2" - } - }, - "flatted": { - "version": "3.2.5", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.5.tgz", - "integrity": "sha512-WIWGi2L3DyTUvUrwRKgGi9TwxQMUEqPOPQBVi71R96jZXJdFskXEmf54BoZaS1kknGODoIGASGEzBUYdyMCBJg==", - "dev": true - }, - "for-each": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz", - "integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==", - "dev": true, - "requires": { - "is-callable": "^1.1.3" - } - }, - "foreground-child": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.1.1.tgz", - "integrity": "sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg==", - "dev": true, - "requires": { - "cross-spawn": "^7.0.0", - "signal-exit": "^4.0.1" - }, - "dependencies": { - "signal-exit": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.0.2.tgz", - "integrity": "sha512-MY2/qGx4enyjprQnFaZsHib3Yadh3IXyV2C321GY0pjGfVBu4un0uDJkwgdxqO+Rdx8JMT8IfJIRwbYVz3Ob3Q==", - "dev": true - } - } - }, - "form-data-encoder": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/form-data-encoder/-/form-data-encoder-2.1.4.tgz", - "integrity": "sha512-yDYSgNMraqvnxiEXO4hi88+YZxaHC6QKzb5N84iRCTDeRO7ZALpir/lVmf/uXUhnwUr2O4HU8s/n6x+yNjQkHw==", - "dev": true - }, - "fp-and-or": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/fp-and-or/-/fp-and-or-0.1.4.tgz", - "integrity": "sha512-+yRYRhpnFPWXSly/6V4Lw9IfOV26uu30kynGJ03PW+MnjOEQe45RZ141QcS0aJehYBYA50GfCDnsRbFJdhssRw==", - "dev": true - }, - "fs-minipass": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", - "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", - "requires": { - "minipass": "^3.0.0" - } - }, - "fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" - }, - "function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", - "dev": true - }, - "function.prototype.name": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.5.tgz", - "integrity": "sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "es-abstract": "^1.19.0", - "functions-have-names": "^1.2.2" - } - }, - "functions-have-names": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz", - "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==", - "dev": true - }, - "gauge": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/gauge/-/gauge-3.0.2.tgz", - "integrity": "sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q==", - "requires": { - "aproba": "^1.0.3 || ^2.0.0", - "color-support": "^1.1.2", - "console-control-strings": "^1.0.0", - "has-unicode": "^2.0.1", - "object-assign": "^4.1.1", - "signal-exit": "^3.0.0", - "string-width": "^4.2.3", - "strip-ansi": "^6.0.1", - "wide-align": "^1.1.2" - } - }, - "get-intrinsic": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.1.tgz", - "integrity": "sha512-2DcsyfABl+gVHEfCOaTrWgyt+tb6MSEGmKq+kI5HwLbIYgjgmMcV8KQ41uaKz1xxUcn9tJtgFbQUEVcEbd0FYw==", - "dev": true, - "requires": { - "function-bind": "^1.1.1", - "has": "^1.0.3", - "has-proto": "^1.0.1", - "has-symbols": "^1.0.3" - } - }, - "get-stdin": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-8.0.0.tgz", - "integrity": "sha512-sY22aA6xchAzprjyqmSEQv4UbAAzRN0L2dQB0NlN5acTTK9Don6nhoc3eAbUnpZiCANAMfd/+40kVdKfFygohg==", - "dev": true - }, - "get-stream": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", - "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", - "dev": true - }, - "get-symbol-description": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz", - "integrity": "sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.1" - } - }, - "glob": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz", - "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==", - "requires": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - } - }, - "glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "dev": true, - "requires": { - "is-glob": "^4.0.1" - } - }, - "global-dirs": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/global-dirs/-/global-dirs-3.0.1.tgz", - "integrity": "sha512-NBcGGFbBA9s1VzD41QXDG+3++t9Mn5t1FpLdhESY6oKY4gYTFpX4wO3sqGUa0Srjtbfj3szX0RnemmrVRUdULA==", - "dev": true, - "requires": { - "ini": "2.0.0" - }, - "dependencies": { - "ini": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ini/-/ini-2.0.0.tgz", - "integrity": "sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA==", - "dev": true - } - } - }, - "globals": { - "version": "13.21.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.21.0.tgz", - "integrity": "sha512-ybyme3s4yy/t/3s35bewwXKOf7cvzfreG2lH0lZl0JB7I4GxRP2ghxOK/Nb9EkRXdbBXZLfq/p/0W2JUONB/Gg==", - "dev": true, - "requires": { - "type-fest": "^0.20.2" - } - }, - "globalthis": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.3.tgz", - "integrity": "sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA==", - "dev": true, - "requires": { - "define-properties": "^1.1.3" - } - }, - "globby": { - "version": "11.1.0", - "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", - "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", - "dev": true, - "requires": { - "array-union": "^2.1.0", - "dir-glob": "^3.0.1", - "fast-glob": "^3.2.9", - "ignore": "^5.2.0", - "merge2": "^1.4.1", - "slash": "^3.0.0" - } - }, - "gopd": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", - "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", - "dev": true, - "requires": { - "get-intrinsic": "^1.1.3" - } - }, - "got": { - "version": "12.6.0", - "resolved": "https://registry.npmjs.org/got/-/got-12.6.0.tgz", - "integrity": "sha512-WTcaQ963xV97MN3x0/CbAriXFZcXCfgxVp91I+Ze6pawQOa7SgzwSx2zIJJsX+kTajMnVs0xcFD1TxZKFqhdnQ==", - "dev": true, - "requires": { - "@sindresorhus/is": "^5.2.0", - "@szmarczak/http-timer": "^5.0.1", - "cacheable-lookup": "^7.0.0", - "cacheable-request": "^10.2.8", - "decompress-response": "^6.0.0", - "form-data-encoder": "^2.1.2", - "get-stream": "^6.0.1", - "http2-wrapper": "^2.1.10", - "lowercase-keys": "^3.0.0", - "p-cancelable": "^3.0.0", - "responselike": "^3.0.0" - }, - "dependencies": { - "decompress-response": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", - "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", - "dev": true, - "requires": { - "mimic-response": "^3.1.0" - } - }, - "mimic-response": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", - "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==", - "dev": true - } - } - }, - "graceful-fs": { - "version": "4.2.11", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", - "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", - "dev": true - }, - "graphemer": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", - "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", - "dev": true - }, - "has": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", - "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", - "dev": true, - "requires": { - "function-bind": "^1.1.1" - } - }, - "has-bigints": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", - "integrity": "sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==", - "dev": true - }, - "has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "dev": true - }, - "has-property-descriptors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz", - "integrity": "sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==", - "dev": true, - "requires": { - "get-intrinsic": "^1.1.1" - } - }, - "has-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.1.tgz", - "integrity": "sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg==", - "dev": true - }, - "has-symbols": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", - "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", - "dev": true - }, - "has-tostringtag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", - "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", - "dev": true, - "requires": { - "has-symbols": "^1.0.2" - } - }, - "has-unicode": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", - "integrity": "sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk=" - }, - "has-yarn": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-yarn/-/has-yarn-3.0.0.tgz", - "integrity": "sha512-IrsVwUHhEULx3R8f/aA8AHuEzAorplsab/v8HBzEiIukwq5i/EC+xmOW+HfP1OaDP+2JkgT1yILHN2O3UFIbcA==", - "dev": true - }, - "hosted-git-info": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-5.2.1.tgz", - "integrity": "sha512-xIcQYMnhcx2Nr4JTjsFmwwnr9vldugPy9uVm0o87bjqqWMv9GaqsTeT+i99wTl0mk1uLxJtHxLb8kymqTENQsw==", - "dev": true, - "requires": { - "lru-cache": "^7.5.1" - }, - "dependencies": { - "lru-cache": { - "version": "7.18.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", - "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", - "dev": true - } - } - }, - "http-cache-semantics": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz", - "integrity": "sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==", - "dev": true - }, - "http-proxy-agent": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz", - "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==", - "dev": true, - "requires": { - "@tootallnate/once": "2", - "agent-base": "6", - "debug": "4" - } - }, - "http2-wrapper": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/http2-wrapper/-/http2-wrapper-2.2.0.tgz", - "integrity": "sha512-kZB0wxMo0sh1PehyjJUWRFEd99KC5TLjZ2cULC4f9iqJBAmKQQXEICjxl5iPJRwP40dpeHFqqhm7tYCvODpqpQ==", - "dev": true, - "requires": { - "quick-lru": "^5.1.1", - "resolve-alpn": "^1.2.0" - } - }, - "https-proxy-agent": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz", - "integrity": "sha512-EkYm5BcKUGiduxzSt3Eppko+PiNWNEpa4ySk9vTC6wDsQJW9rHSa+UhGNJoRYp7bz6Ht1eaRIa6QaJqO5rCFbA==", - "requires": { - "agent-base": "6", - "debug": "4" - } - }, - "humanize-ms": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/humanize-ms/-/humanize-ms-1.2.1.tgz", - "integrity": "sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==", - "dev": true, - "requires": { - "ms": "^2.0.0" - } - }, - "iconv-lite": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", - "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", - "optional": true, - "requires": { - "safer-buffer": ">= 2.1.2 < 3.0.0" - } - }, - "ignore": { - "version": "5.2.4", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.4.tgz", - "integrity": "sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==", - "dev": true - }, - "ignore-walk": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-6.0.3.tgz", - "integrity": "sha512-C7FfFoTA+bI10qfeydT8aZbvr91vAEU+2W5BZUlzPec47oNb07SsOfwYrtxuvOYdUApPP/Qlh4DtAO51Ekk2QA==", - "dev": true, - "requires": { - "minimatch": "^9.0.0" - }, - "dependencies": { - "brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "dev": true, - "requires": { - "balanced-match": "^1.0.0" - } - }, - "minimatch": { - "version": "9.0.3", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", - "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", - "dev": true, - "requires": { - "brace-expansion": "^2.0.1" - } - } - } - }, - "import-fresh": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", - "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", - "dev": true, - "requires": { - "parent-module": "^1.0.0", - "resolve-from": "^4.0.0" - } - }, - "import-lazy": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/import-lazy/-/import-lazy-4.0.0.tgz", - "integrity": "sha512-rKtvo6a868b5Hu3heneU+L4yEQ4jYKLtjpnPeUdK7h0yzXGmyBTypknlkCvHFBqfX9YlorEiMM6Dnq/5atfHkw==", - "dev": true - }, - "imurmurhash": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", - "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", - "dev": true - }, - "indent-string": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", - "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", - "dev": true - }, - "inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", - "requires": { - "once": "^1.3.0", - "wrappy": "1" - } - }, - "inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" - }, - "ini": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/ini/-/ini-4.1.1.tgz", - "integrity": "sha512-QQnnxNyfvmHFIsj7gkPcYymR8Jdw/o7mp5ZFihxn6h8Ci6fh3Dx4E1gPjpQEpIuPo9XVNY/ZUwh4BPMjGyL01g==", - "dev": true - }, - "internal-slot": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.5.tgz", - "integrity": "sha512-Y+R5hJrzs52QCG2laLn4udYVnxsfny9CpOhNhUvk/SSSVyF6T27FzRbF0sroPidSu3X8oEAkOn2K804mjpt6UQ==", - "dev": true, - "requires": { - "get-intrinsic": "^1.2.0", - "has": "^1.0.3", - "side-channel": "^1.0.4" - } - }, - "ip": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ip/-/ip-2.0.0.tgz", - "integrity": "sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ==", - "dev": true - }, - "is-array-buffer": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.2.tgz", - "integrity": "sha512-y+FyyR/w8vfIRq4eQcM1EYgSTnmHXPqaF+IgzgraytCFq5Xh8lllDVmAZolPJiZttZLeFSINPYMaEJ7/vWUa1w==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.2.0", - "is-typed-array": "^1.1.10" - } - }, - "is-bigint": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz", - "integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==", - "dev": true, - "requires": { - "has-bigints": "^1.0.1" - } - }, - "is-boolean-object": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz", - "integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "has-tostringtag": "^1.0.0" - } - }, - "is-callable": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", - "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", - "dev": true - }, - "is-ci": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-3.0.1.tgz", - "integrity": "sha512-ZYvCgrefwqoQ6yTyYUbQu64HsITZ3NfKX1lzaEYdkTDcfKzzCI/wthRRYKkdjHKFVgNiXKAKm65Zo1pk2as/QQ==", - "dev": true, - "requires": { - "ci-info": "^3.2.0" - } - }, - "is-core-module": { - "version": "2.13.0", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.13.0.tgz", - "integrity": "sha512-Z7dk6Qo8pOCp3l4tsX2C5ZVas4V+UxwQodwZhLopL91TX8UyyHEXafPcyoeeWuLrwzHcr3igO78wNLwHJHsMCQ==", - "dev": true, - "requires": { - "has": "^1.0.3" - } - }, - "is-date-object": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", - "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==", - "dev": true, - "requires": { - "has-tostringtag": "^1.0.0" - } - }, - "is-extglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", - "dev": true - }, - "is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" - }, - "is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "dev": true, - "requires": { - "is-extglob": "^2.1.1" - } - }, - "is-installed-globally": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/is-installed-globally/-/is-installed-globally-0.4.0.tgz", - "integrity": "sha512-iwGqO3J21aaSkC7jWnHP/difazwS7SFeIqxv6wEtLU8Y5KlzFTjyqcSIT0d8s4+dDhKytsk9PJZ2BkS5eZwQRQ==", - "dev": true, - "requires": { - "global-dirs": "^3.0.0", - "is-path-inside": "^3.0.2" - } - }, - "is-lambda": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-lambda/-/is-lambda-1.0.1.tgz", - "integrity": "sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ==", - "dev": true - }, - "is-negative-zero": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz", - "integrity": "sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==", - "dev": true - }, - "is-npm": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/is-npm/-/is-npm-6.0.0.tgz", - "integrity": "sha512-JEjxbSmtPSt1c8XTkVrlujcXdKV1/tvuQ7GwKcAlyiVLeYFQ2VHat8xfrDJsIkhCdF/tZ7CiIR3sy141c6+gPQ==", - "dev": true - }, - "is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true - }, - "is-number-object": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz", - "integrity": "sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==", - "dev": true, - "requires": { - "has-tostringtag": "^1.0.0" - } - }, - "is-obj": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", - "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==", - "dev": true - }, - "is-path-inside": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", - "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", - "dev": true - }, - "is-plain-object": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz", - "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==" - }, - "is-regex": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", - "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "has-tostringtag": "^1.0.0" - } - }, - "is-shared-array-buffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz", - "integrity": "sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==", - "dev": true, - "requires": { - "call-bind": "^1.0.2" - } - }, - "is-string": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", - "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", - "dev": true, - "requires": { - "has-tostringtag": "^1.0.0" - } - }, - "is-symbol": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", - "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", - "dev": true, - "requires": { - "has-symbols": "^1.0.2" - } - }, - "is-typed-array": { - "version": "1.1.10", - "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.10.tgz", - "integrity": "sha512-PJqgEHiWZvMpaFZ3uTc8kHPM4+4ADTlDniuQL7cU/UDA0Ql7F70yGfHph3cLNe+c9toaigv+DFzTJKhc2CtO6A==", - "dev": true, - "requires": { - "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.2", - "for-each": "^0.3.3", - "gopd": "^1.0.1", - "has-tostringtag": "^1.0.0" - } - }, - "is-typedarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", - "integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==", - "dev": true - }, - "is-weakref": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", - "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==", - "dev": true, - "requires": { - "call-bind": "^1.0.2" - } - }, - "is-yarn-global": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/is-yarn-global/-/is-yarn-global-0.4.1.tgz", - "integrity": "sha512-/kppl+R+LO5VmhYSEWARUFjodS25D68gvj8W7z0I7OWhUla5xWu8KL6CtB2V0R6yqhnRgbcaREMr4EEM6htLPQ==", - "dev": true - }, - "isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", - "dev": true - }, - "jackspeak": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-2.3.6.tgz", - "integrity": "sha512-N3yCS/NegsOBokc8GAdM8UcmfsKiSS8cipheD/nivzr700H+nsMOxJjQnvwOcRYVuFkdH0wGUvW2WbXGmrZGbQ==", - "dev": true, - "requires": { - "@isaacs/cliui": "^8.0.2", - "@pkgjs/parseargs": "^0.11.0" - } - }, - "jju": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/jju/-/jju-1.4.0.tgz", - "integrity": "sha512-8wb9Yw966OSxApiCt0K3yNJL8pnNeIv+OEq2YMidz4FKP6nonSRoOXc80iXY4JaN2FC11B9qsNmDsm+ZOfMROA==", - "dev": true - }, - "js-yaml": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", - "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", - "dev": true, - "requires": { - "argparse": "^2.0.1" - } - }, - "json-buffer": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", - "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", - "dev": true - }, - "json-parse-even-better-errors": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-3.0.0.tgz", - "integrity": "sha512-iZbGHafX/59r39gPwVPRBGw0QQKnA7tte5pSMrhWOW7swGsVvVTjmfyAV9pNqk8YGT7tRCdxRu8uzcgZwoDooA==", - "dev": true - }, - "json-parse-helpfulerror": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/json-parse-helpfulerror/-/json-parse-helpfulerror-1.0.3.tgz", - "integrity": "sha512-XgP0FGR77+QhUxjXkwOMkC94k3WtqEBfcnjWqhRd82qTat4SWKRE+9kUnynz/shm3I4ea2+qISvTIeGTNU7kJg==", - "dev": true, - "requires": { - "jju": "^1.1.0" - } - }, - "json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "dev": true - }, - "json-stable-stringify-without-jsonify": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", - "integrity": "sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=", - "dev": true - }, - "json5": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz", - "integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==", - "dev": true, - "requires": { - "minimist": "^1.2.0" - } - }, - "jsonlines": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/jsonlines/-/jsonlines-0.1.1.tgz", - "integrity": "sha512-ekDrAGso79Cvf+dtm+mL8OBI2bmAOt3gssYs833De/C9NmIpWDWyUO4zPgB5x2/OhY366dkhgfPMYfwZF7yOZA==", - "dev": true - }, - "jsonparse": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/jsonparse/-/jsonparse-1.3.1.tgz", - "integrity": "sha512-POQXvpdL69+CluYsillJ7SUhKvytYjW9vG/GKpnf+xP8UWgYEM/RaMzHHofbALDiKbbP1W8UEYmgGl39WkPZsg==", - "dev": true - }, - "keyv": { - "version": "4.5.2", - "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.2.tgz", - "integrity": "sha512-5MHbFaKn8cNSmVW7BYnijeAVlE4cYA/SVkifVgrh7yotnfhKmjuXpDKjrABLnT0SfHWV21P8ow07OGfRrNDg8g==", - "dev": true, - "requires": { - "json-buffer": "3.0.1" - } - }, - "kleur": { - "version": "4.1.5", - "resolved": "https://registry.npmjs.org/kleur/-/kleur-4.1.5.tgz", - "integrity": "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==", - "dev": true - }, - "latest-version": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/latest-version/-/latest-version-7.0.0.tgz", - "integrity": "sha512-KvNT4XqAMzdcL6ka6Tl3i2lYeFDgXNCuIX+xNx6ZMVR1dFq+idXd9FLKNMOIx0t9mJ9/HudyX4oZWXZQ0UJHeg==", - "dev": true, - "requires": { - "package-json": "^8.1.0" - } - }, - "levn": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", - "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", - "dev": true, - "requires": { - "prelude-ls": "^1.2.1", - "type-check": "~0.4.0" - } - }, - "locate-path": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", - "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", - "dev": true, - "requires": { - "p-locate": "^5.0.0" - } - }, - "lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" - }, - "lodash.merge": { - "version": "4.6.2", - "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", - "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", - "dev": true - }, - "lowercase-keys": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-3.0.0.tgz", - "integrity": "sha512-ozCC6gdQ+glXOQsveKD0YsDy8DSQFjDTz4zyzEHNV5+JP5D62LmfDZ6o1cycFx9ouG940M5dE8C8CTewdj2YWQ==", - "dev": true - }, - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "requires": { - "yallist": "^4.0.0" - } - }, - "luxon": { - "version": "3.4.3", - "resolved": "https://registry.npmjs.org/luxon/-/luxon-3.4.3.tgz", - "integrity": "sha512-tFWBiv3h7z+T/tDaoxA8rqTxy1CHV6gHS//QdaH4pulbq/JuBSGgQspQQqcgnwdAx6pNI7cmvz5Sv/addzHmUg==" - }, - "make-dir": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", - "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", - "requires": { - "semver": "^6.0.0" - }, - "dependencies": { - "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" - } - } - }, - "make-fetch-happen": { - "version": "11.1.1", - "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-11.1.1.tgz", - "integrity": "sha512-rLWS7GCSTcEujjVBs2YqG7Y4643u8ucvCJeSRqiLYhesrDuzeuFIk37xREzAsfQaqzl8b9rNCE4m6J8tvX4Q8w==", - "dev": true, - "requires": { - "agentkeepalive": "^4.2.1", - "cacache": "^17.0.0", - "http-cache-semantics": "^4.1.1", - "http-proxy-agent": "^5.0.0", - "https-proxy-agent": "^5.0.0", - "is-lambda": "^1.0.1", - "lru-cache": "^7.7.1", - "minipass": "^5.0.0", - "minipass-fetch": "^3.0.0", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "negotiator": "^0.6.3", - "promise-retry": "^2.0.1", - "socks-proxy-agent": "^7.0.0", - "ssri": "^10.0.0" - }, - "dependencies": { - "lru-cache": { - "version": "7.18.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", - "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", - "dev": true - }, - "minipass": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", - "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", - "dev": true - } - } - }, - "merge2": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", - "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", - "dev": true - }, - "micromatch": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", - "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", - "dev": true, - "requires": { - "braces": "^3.0.2", - "picomatch": "^2.3.1" - } - }, - "mimic-response": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-2.1.0.tgz", - "integrity": "sha512-wXqjST+SLt7R009ySCglWBCFpjUygmCIfD790/kVbiGmUgfYGuB14PiTd5DwVxSV4NcYHjzMkoj5LjQZwTQLEA==" - }, - "minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "requires": { - "brace-expansion": "^1.1.7" - } - }, - "minimist": { - "version": "1.2.6", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", - "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==", - "dev": true - }, - "minipass": { - "version": "3.1.6", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.6.tgz", - "integrity": "sha512-rty5kpw9/z8SX9dmxblFA6edItUmwJgMeYDZRrwlIVN27i8gysGbznJwUggw2V/FVqFSDdWy040ZPS811DYAqQ==", - "requires": { - "yallist": "^4.0.0" - } - }, - "minipass-collect": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/minipass-collect/-/minipass-collect-1.0.2.tgz", - "integrity": "sha512-6T6lH0H8OG9kITm/Jm6tdooIbogG9e0tLgpY6mphXSm/A9u8Nq1ryBG+Qspiub9LjWlBPsPS3tWQ/Botq4FdxA==", - "dev": true, - "requires": { - "minipass": "^3.0.0" - } - }, - "minipass-fetch": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-3.0.3.tgz", - "integrity": "sha512-n5ITsTkDqYkYJZjcRWzZt9qnZKCT7nKCosJhHoj7S7zD+BP4jVbWs+odsniw5TA3E0sLomhTKOKjF86wf11PuQ==", - "dev": true, - "requires": { - "encoding": "^0.1.13", - "minipass": "^5.0.0", - "minipass-sized": "^1.0.3", - "minizlib": "^2.1.2" - }, - "dependencies": { - "minipass": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", - "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", - "dev": true - } - } - }, - "minipass-flush": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/minipass-flush/-/minipass-flush-1.0.5.tgz", - "integrity": "sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw==", - "dev": true, - "requires": { - "minipass": "^3.0.0" - } - }, - "minipass-json-stream": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/minipass-json-stream/-/minipass-json-stream-1.0.1.tgz", - "integrity": "sha512-ODqY18UZt/I8k+b7rl2AENgbWE8IDYam+undIJONvigAz8KR5GWblsFTEfQs0WODsjbSXWlm+JHEv8Gr6Tfdbg==", - "dev": true, - "requires": { - "jsonparse": "^1.3.1", - "minipass": "^3.0.0" - } - }, - "minipass-pipeline": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/minipass-pipeline/-/minipass-pipeline-1.2.4.tgz", - "integrity": "sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A==", - "dev": true, - "requires": { - "minipass": "^3.0.0" - } - }, - "minipass-sized": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/minipass-sized/-/minipass-sized-1.0.3.tgz", - "integrity": "sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g==", - "dev": true, - "requires": { - "minipass": "^3.0.0" - } - }, - "minizlib": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", - "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", - "requires": { - "minipass": "^3.0.0", - "yallist": "^4.0.0" - } - }, - "mkdirp": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", - "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==" - }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" - }, - "nan": { - "version": "2.15.0", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.15.0.tgz", - "integrity": "sha512-8ZtvEnA2c5aYCZYd1cvgdnU6cqwixRoYg70xPLWUws5ORTa/lnw+u4amixRS/Ac5U5mQVgp9pnlSUnbNWFaWZQ==" - }, - "natural-compare": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", - "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=", - "dev": true - }, - "negotiator": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", - "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", - "dev": true - }, - "node-fetch": { - "version": "2.6.7", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", - "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", - "requires": { - "whatwg-url": "^5.0.0" - } - }, - "node-gyp": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-9.4.0.tgz", - "integrity": "sha512-dMXsYP6gc9rRbejLXmTbVRYjAHw7ppswsKyMxuxJxxOHzluIO1rGp9TOQgjFJ+2MCqcOcQTOPB/8Xwhr+7s4Eg==", - "dev": true, - "requires": { - "env-paths": "^2.2.0", - "exponential-backoff": "^3.1.1", - "glob": "^7.1.4", - "graceful-fs": "^4.2.6", - "make-fetch-happen": "^11.0.3", - "nopt": "^6.0.0", - "npmlog": "^6.0.0", - "rimraf": "^3.0.2", - "semver": "^7.3.5", - "tar": "^6.1.2", - "which": "^2.0.2" - }, - "dependencies": { - "are-we-there-yet": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-3.0.1.tgz", - "integrity": "sha512-QZW4EDmGwlYur0Yyf/b2uGucHQMa8aFUP7eu9ddR73vvhFyt4V0Vl3QHPcTNJ8l6qYOBdxgXdnBXQrHilfRQBg==", - "dev": true, - "requires": { - "delegates": "^1.0.0", - "readable-stream": "^3.6.0" - } - }, - "gauge": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/gauge/-/gauge-4.0.4.tgz", - "integrity": "sha512-f9m+BEN5jkg6a0fZjleidjN51VE1X+mPFQ2DJ0uv1V39oCLCbsGe6yjbBnp7eK7z/+GAon99a3nHuqbuuthyPg==", - "dev": true, - "requires": { - "aproba": "^1.0.3 || ^2.0.0", - "color-support": "^1.1.3", - "console-control-strings": "^1.1.0", - "has-unicode": "^2.0.1", - "signal-exit": "^3.0.7", - "string-width": "^4.2.3", - "strip-ansi": "^6.0.1", - "wide-align": "^1.1.5" - } - }, - "nopt": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/nopt/-/nopt-6.0.0.tgz", - "integrity": "sha512-ZwLpbTgdhuZUnZzjd7nb1ZV+4DoiC6/sfiVKok72ym/4Tlf+DFdlHYmT2JPmcNNWV6Pi3SDf1kT+A4r9RTuT9g==", - "dev": true, - "requires": { - "abbrev": "^1.0.0" - } - }, - "npmlog": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-6.0.2.tgz", - "integrity": "sha512-/vBvz5Jfr9dT/aFWd0FIRf+T/Q2WBsLENygUaFUqstqsycmZAP/t5BvFJTK0viFmSUxiUKTUplWy5vt+rvKIxg==", - "dev": true, - "requires": { - "are-we-there-yet": "^3.0.0", - "console-control-strings": "^1.1.0", - "gauge": "^4.0.3", - "set-blocking": "^2.0.0" - } - } - } - }, - "nopt": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz", - "integrity": "sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ==", - "requires": { - "abbrev": "1" - } - }, - "normalize-package-data": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-5.0.0.tgz", - "integrity": "sha512-h9iPVIfrVZ9wVYQnxFgtw1ugSvGEMOlyPWWtm8BMJhnwyEL/FLbYbTY3V3PpjI/BUK67n9PEWDu6eHzu1fB15Q==", - "dev": true, - "requires": { - "hosted-git-info": "^6.0.0", - "is-core-module": "^2.8.1", - "semver": "^7.3.5", - "validate-npm-package-license": "^3.0.4" - }, - "dependencies": { - "hosted-git-info": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-6.1.1.tgz", - "integrity": "sha512-r0EI+HBMcXadMrugk0GCQ+6BQV39PiWAZVfq7oIckeGiN7sjRGyQxPdft3nQekFTCQbYxLBH+/axZMeH8UX6+w==", - "dev": true, - "requires": { - "lru-cache": "^7.5.1" - } - }, - "lru-cache": { - "version": "7.18.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", - "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", - "dev": true - } - } - }, - "normalize-url": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-8.0.0.tgz", - "integrity": "sha512-uVFpKhj5MheNBJRTiMZ9pE/7hD1QTeEvugSJW/OmLzAp78PB5O6adfMNTvmfKhXBkvCzC+rqifWcVYpGFwTjnw==", - "dev": true - }, - "npm-bundled": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-3.0.0.tgz", - "integrity": "sha512-Vq0eyEQy+elFpzsKjMss9kxqb9tG3YHg4dsyWuUENuzvSUWe1TCnW/vV9FkhvBk/brEDoDiVd+M1Btosa6ImdQ==", - "dev": true, - "requires": { - "npm-normalize-package-bin": "^3.0.0" - } - }, - "npm-check-updates": { - "version": "16.14.6", - "resolved": "https://registry.npmjs.org/npm-check-updates/-/npm-check-updates-16.14.6.tgz", - "integrity": "sha512-sJ6w4AmSDP7YzBXah94Ul2JhiIbjBDfx9XYgib15um2wtiQkOyjE7Lov3MNUSQ84Ry7T81mE4ynMbl/mGbK4HQ==", - "dev": true, - "requires": { - "chalk": "^5.3.0", - "cli-table3": "^0.6.3", - "commander": "^10.0.1", - "fast-memoize": "^2.5.2", - "find-up": "5.0.0", - "fp-and-or": "^0.1.4", - "get-stdin": "^8.0.0", - "globby": "^11.0.4", - "hosted-git-info": "^5.1.0", - "ini": "^4.1.1", - "js-yaml": "^4.1.0", - "json-parse-helpfulerror": "^1.0.3", - "jsonlines": "^0.1.1", - "lodash": "^4.17.21", - "make-fetch-happen": "^11.1.1", - "minimatch": "^9.0.3", - "p-map": "^4.0.0", - "pacote": "15.2.0", - "parse-github-url": "^1.0.2", - "progress": "^2.0.3", - "prompts-ncu": "^3.0.0", - "rc-config-loader": "^4.1.3", - "remote-git-tags": "^3.0.0", - "rimraf": "^5.0.5", - "semver": "^7.5.4", - "semver-utils": "^1.1.4", - "source-map-support": "^0.5.21", - "spawn-please": "^2.0.2", - "strip-ansi": "^7.1.0", - "strip-json-comments": "^5.0.1", - "untildify": "^4.0.0", - "update-notifier": "^6.0.2" - }, - "dependencies": { - "ansi-regex": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", - "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", - "dev": true - }, - "brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "dev": true, - "requires": { - "balanced-match": "^1.0.0" - } - }, - "chalk": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.3.0.tgz", - "integrity": "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==", - "dev": true - }, - "glob": { - "version": "10.3.10", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.3.10.tgz", - "integrity": "sha512-fa46+tv1Ak0UPK1TOy/pZrIybNNt4HCv7SDzwyfiOZkvZLEbjsZkJBPtDHVshZjbecAoAGSC20MjLDG/qr679g==", - "dev": true, - "requires": { - "foreground-child": "^3.1.0", - "jackspeak": "^2.3.5", - "minimatch": "^9.0.1", - "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0", - "path-scurry": "^1.10.1" - } - }, - "minimatch": { - "version": "9.0.3", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", - "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", - "dev": true, - "requires": { - "brace-expansion": "^2.0.1" - } - }, - "minipass": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.0.4.tgz", - "integrity": "sha512-jYofLM5Dam9279rdkWzqHozUo4ybjdZmCsDHePy5V/PbBcVMiSZR97gmAy45aqi8CK1lG2ECd356FU86avfwUQ==", - "dev": true - }, - "rimraf": { - "version": "5.0.5", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-5.0.5.tgz", - "integrity": "sha512-CqDakW+hMe/Bz202FPEymy68P+G50RfMQK+Qo5YUqc9SPipvbGjCGKd0RSKEelbsfQuw3g5NZDSrlZZAJurH1A==", - "dev": true, - "requires": { - "glob": "^10.3.7" - } - }, - "strip-ansi": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", - "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", - "dev": true, - "requires": { - "ansi-regex": "^6.0.1" - } - }, - "strip-json-comments": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-5.0.1.tgz", - "integrity": "sha512-0fk9zBqO67Nq5M/m45qHCJxylV/DhBlIOVExqgOMiCCrzrhU6tCibRXNqE3jwJLftzE9SNuZtYbpzcO+i9FiKw==", - "dev": true - } - } - }, - "npm-install-checks": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/npm-install-checks/-/npm-install-checks-6.1.1.tgz", - "integrity": "sha512-dH3GmQL4vsPtld59cOn8uY0iOqRmqKvV+DLGwNXV/Q7MDgD2QfOADWd/mFXcIE5LVhYYGjA3baz6W9JneqnuCw==", - "dev": true, - "requires": { - "semver": "^7.1.1" - } - }, - "npm-normalize-package-bin": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-3.0.1.tgz", - "integrity": "sha512-dMxCf+zZ+3zeQZXKxmyuCKlIDPGuv8EF940xbkC4kQVDTtqoh6rJFO+JTKSA6/Rwi0getWmtuy4Itup0AMcaDQ==", - "dev": true - }, - "npm-package-arg": { - "version": "10.1.0", - "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-10.1.0.tgz", - "integrity": "sha512-uFyyCEmgBfZTtrKk/5xDfHp6+MdrqGotX/VoOyEEl3mBwiEE5FlBaePanazJSVMPT7vKepcjYBY2ztg9A3yPIA==", - "dev": true, - "requires": { - "hosted-git-info": "^6.0.0", - "proc-log": "^3.0.0", - "semver": "^7.3.5", - "validate-npm-package-name": "^5.0.0" - }, - "dependencies": { - "hosted-git-info": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-6.1.1.tgz", - "integrity": "sha512-r0EI+HBMcXadMrugk0GCQ+6BQV39PiWAZVfq7oIckeGiN7sjRGyQxPdft3nQekFTCQbYxLBH+/axZMeH8UX6+w==", - "dev": true, - "requires": { - "lru-cache": "^7.5.1" - } - }, - "lru-cache": { - "version": "7.18.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", - "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", - "dev": true - } - } - }, - "npm-packlist": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-7.0.4.tgz", - "integrity": "sha512-d6RGEuRrNS5/N84iglPivjaJPxhDbZmlbTwTDX2IbcRHG5bZCdtysYMhwiPvcF4GisXHGn7xsxv+GQ7T/02M5Q==", - "dev": true, - "requires": { - "ignore-walk": "^6.0.0" - } - }, - "npm-pick-manifest": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-8.0.2.tgz", - "integrity": "sha512-1dKY+86/AIiq1tkKVD3l0WI+Gd3vkknVGAggsFeBkTvbhMQ1OND/LKkYv4JtXPKUJ8bOTCyLiqEg2P6QNdK+Gg==", - "dev": true, - "requires": { - "npm-install-checks": "^6.0.0", - "npm-normalize-package-bin": "^3.0.0", - "npm-package-arg": "^10.0.0", - "semver": "^7.3.5" - } - }, - "npm-registry-fetch": { - "version": "14.0.5", - "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-14.0.5.tgz", - "integrity": "sha512-kIDMIo4aBm6xg7jOttupWZamsZRkAqMqwqqbVXnUqstY5+tapvv6bkH/qMR76jdgV+YljEUCyWx3hRYMrJiAgA==", - "dev": true, - "requires": { - "make-fetch-happen": "^11.0.0", - "minipass": "^5.0.0", - "minipass-fetch": "^3.0.0", - "minipass-json-stream": "^1.0.1", - "minizlib": "^2.1.2", - "npm-package-arg": "^10.0.0", - "proc-log": "^3.0.0" - }, - "dependencies": { - "minipass": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", - "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", - "dev": true - } - } - }, - "npmlog": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.1.tgz", - "integrity": "sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==", - "requires": { - "are-we-there-yet": "^2.0.0", - "console-control-strings": "^1.1.0", - "gauge": "^3.0.0", - "set-blocking": "^2.0.0" - } - }, - "object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=" - }, - "object-inspect": { - "version": "1.12.3", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.3.tgz", - "integrity": "sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g==", - "dev": true - }, - "object-keys": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", - "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", - "dev": true - }, - "object.assign": { - "version": "4.1.4", - "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.4.tgz", - "integrity": "sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "has-symbols": "^1.0.3", - "object-keys": "^1.1.1" - } - }, - "object.fromentries": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.6.tgz", - "integrity": "sha512-VciD13dswC4j1Xt5394WR4MzmAQmlgN72phd/riNp9vtD7tp4QQWJ0R4wvclXcafgcYK8veHRed2W6XeGBvcfg==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" - } - }, - "object.groupby": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/object.groupby/-/object.groupby-1.0.0.tgz", - "integrity": "sha512-70MWG6NfRH9GnbZOikuhPPYzpUpof9iW2J9E4dW7FXTqPNb6rllE6u39SKwwiNh8lCwX3DDb5OgcKGiEBrTTyw==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.21.2", - "get-intrinsic": "^1.2.1" - } - }, - "object.values": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.6.tgz", - "integrity": "sha512-FVVTkD1vENCsAcwNs9k6jea2uHC/X0+JcjG8YA60FN5CMaJmG95wT9jek/xX9nornqGRrBkKtzuAu2wuHpKqvw==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" - } - }, - "once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", - "requires": { - "wrappy": "1" - } - }, - "optionator": { - "version": "0.9.3", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.3.tgz", - "integrity": "sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg==", - "dev": true, - "requires": { - "@aashutoshrathi/word-wrap": "^1.2.3", - "deep-is": "^0.1.3", - "fast-levenshtein": "^2.0.6", - "levn": "^0.4.1", - "prelude-ls": "^1.2.1", - "type-check": "^0.4.0" - } - }, - "p-cancelable": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-3.0.0.tgz", - "integrity": "sha512-mlVgR3PGuzlo0MmTdk4cXqXWlwQDLnONTAg6sm62XkMJEiRxN3GL3SffkYvqwonbkJBcrI7Uvv5Zh9yjvn2iUw==", - "dev": true - }, - "p-limit": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", - "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", - "dev": true, - "requires": { - "yocto-queue": "^0.1.0" - } - }, - "p-locate": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", - "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", - "dev": true, - "requires": { - "p-limit": "^3.0.2" - } - }, - "p-map": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", - "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", - "dev": true, - "requires": { - "aggregate-error": "^3.0.0" - } - }, - "package-json": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/package-json/-/package-json-8.1.0.tgz", - "integrity": "sha512-hySwcV8RAWeAfPsXb9/HGSPn8lwDnv6fabH+obUZKX169QknRkRhPxd1yMubpKDskLFATkl3jHpNtVtDPFA0Wg==", - "dev": true, - "requires": { - "got": "^12.1.0", - "registry-auth-token": "^5.0.1", - "registry-url": "^6.0.0", - "semver": "^7.3.7" - } - }, - "pacote": { - "version": "15.2.0", - "resolved": "https://registry.npmjs.org/pacote/-/pacote-15.2.0.tgz", - "integrity": "sha512-rJVZeIwHTUta23sIZgEIM62WYwbmGbThdbnkt81ravBplQv+HjyroqnLRNH2+sLJHcGZmLRmhPwACqhfTcOmnA==", - "dev": true, - "requires": { - "@npmcli/git": "^4.0.0", - "@npmcli/installed-package-contents": "^2.0.1", - "@npmcli/promise-spawn": "^6.0.1", - "@npmcli/run-script": "^6.0.0", - "cacache": "^17.0.0", - "fs-minipass": "^3.0.0", - "minipass": "^5.0.0", - "npm-package-arg": "^10.0.0", - "npm-packlist": "^7.0.0", - "npm-pick-manifest": "^8.0.0", - "npm-registry-fetch": "^14.0.0", - "proc-log": "^3.0.0", - "promise-retry": "^2.0.1", - "read-package-json": "^6.0.0", - "read-package-json-fast": "^3.0.0", - "sigstore": "^1.3.0", - "ssri": "^10.0.0", - "tar": "^6.1.11" - }, - "dependencies": { - "fs-minipass": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-3.0.2.tgz", - "integrity": "sha512-2GAfyfoaCDRrM6jaOS3UsBts8yJ55VioXdWcOL7dK9zdAuKT71+WBA4ifnNYqVjYv+4SsPxjK0JT4yIIn4cA/g==", - "dev": true, - "requires": { - "minipass": "^5.0.0" - } - }, - "minipass": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", - "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", - "dev": true - } - } - }, - "parent-module": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", - "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", - "dev": true, - "requires": { - "callsites": "^3.0.0" - } - }, - "parse-github-url": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/parse-github-url/-/parse-github-url-1.0.2.tgz", - "integrity": "sha512-kgBf6avCbO3Cn6+RnzRGLkUsv4ZVqv/VfAYkRsyBcgkshNvVBkRn1FEZcW0Jb+npXQWm2vHPnnOqFteZxRRGNw==", - "dev": true - }, - "path-exists": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", - "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", - "dev": true - }, - "path-is-absolute": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=" - }, - "path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "dev": true - }, - "path-parse": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", - "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", - "dev": true - }, - "path-scurry": { - "version": "1.10.1", - "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.10.1.tgz", - "integrity": "sha512-MkhCqzzBEpPvxxQ71Md0b1Kk51W01lrYvlMzSUaIzNsODdd7mqhiimSZlr+VegAz5Z6Vzt9Xg2ttE//XBhH3EQ==", - "dev": true, - "requires": { - "lru-cache": "^9.1.1 || ^10.0.0", - "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" - }, - "dependencies": { - "lru-cache": { - "version": "9.1.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-9.1.1.tgz", - "integrity": "sha512-65/Jky17UwSb0BuB9V+MyDpsOtXKmYwzhyl+cOa9XUiI4uV2Ouy/2voFP3+al0BjZbJgMBD8FojMpAf+Z+qn4A==", - "dev": true - }, - "minipass": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-6.0.2.tgz", - "integrity": "sha512-MzWSV5nYVT7mVyWCwn2o7JH13w2TBRmmSqSRCKzTw+lmft9X4z+3wjvs06Tzijo5z4W/kahUCDpRXTF+ZrmF/w==", - "dev": true - } - } - }, - "path-type": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", - "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", - "dev": true - }, - "picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", - "dev": true - }, - "prelude-ls": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", - "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", - "dev": true - }, - "proc-log": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-3.0.0.tgz", - "integrity": "sha512-++Vn7NS4Xf9NacaU9Xq3URUuqZETPsf8L4j5/ckhaRYsfPeRyzGw+iDjFhV/Jr3uNmTvvddEJFWh5R1gRgUH8A==", - "dev": true - }, - "progress": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", - "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==", - "dev": true - }, - "promise-inflight": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/promise-inflight/-/promise-inflight-1.0.1.tgz", - "integrity": "sha512-6zWPyEOFaQBJYcGMHBKTKJ3u6TBsnMFOIZSa6ce1e/ZrrsOlnHRHbabMjLiBYKp+n44X9eUI6VUPaukCXHuG4g==", - "dev": true - }, - "promise-retry": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/promise-retry/-/promise-retry-2.0.1.tgz", - "integrity": "sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g==", - "dev": true, - "requires": { - "err-code": "^2.0.2", - "retry": "^0.12.0" - } - }, - "prompts-ncu": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/prompts-ncu/-/prompts-ncu-3.0.0.tgz", - "integrity": "sha512-qyz9UxZ5MlPKWVhWrCmSZ1ahm2GVYdjLb8og2sg0IPth1KRuhcggHGuijz0e41dkx35p1t1q3GRISGH7QGALFA==", - "dev": true, - "requires": { - "kleur": "^4.0.1", - "sisteransi": "^1.0.5" - } - }, - "proto-list": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/proto-list/-/proto-list-1.2.4.tgz", - "integrity": "sha512-vtK/94akxsTMhe0/cbfpR+syPuszcuwhqVjJq26CuNDgFGj682oRBXOP5MJpv2r7JtE8MsiepGIqvvOTBwn2vA==", - "dev": true - }, - "punycode": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz", - "integrity": "sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==", - "dev": true - }, - "pupa": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/pupa/-/pupa-3.1.0.tgz", - "integrity": "sha512-FLpr4flz5xZTSJxSeaheeMKN/EDzMdK7b8PTOC6a5PYFKTucWbdqjgqaEyH0shFiSJrVB1+Qqi4Tk19ccU6Aug==", - "dev": true, - "requires": { - "escape-goat": "^4.0.0" - } - }, - "queue-microtask": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", - "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", - "dev": true - }, - "quick-lru": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-5.1.1.tgz", - "integrity": "sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==", - "dev": true - }, - "rc": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", - "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", - "dev": true, - "requires": { - "deep-extend": "^0.6.0", - "ini": "~1.3.0", - "minimist": "^1.2.0", - "strip-json-comments": "~2.0.1" - }, - "dependencies": { - "ini": { - "version": "1.3.8", - "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", - "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", - "dev": true - }, - "strip-json-comments": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", - "integrity": "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==", - "dev": true - } - } - }, - "rc-config-loader": { - "version": "4.1.3", - "resolved": "https://registry.npmjs.org/rc-config-loader/-/rc-config-loader-4.1.3.tgz", - "integrity": "sha512-kD7FqML7l800i6pS6pvLyIE2ncbk9Du8Q0gp/4hMPhJU6ZxApkoLcGD8ZeqgiAlfwZ6BlETq6qqe+12DUL207w==", - "dev": true, - "requires": { - "debug": "^4.3.4", - "js-yaml": "^4.1.0", - "json5": "^2.2.2", - "require-from-string": "^2.0.2" - }, - "dependencies": { - "json5": { - "version": "2.2.3", - "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", - "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", - "dev": true - } - } - }, - "read-package-json": { - "version": "6.0.4", - "resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-6.0.4.tgz", - "integrity": "sha512-AEtWXYfopBj2z5N5PbkAOeNHRPUg5q+Nen7QLxV8M2zJq1ym6/lCz3fYNTCXe19puu2d06jfHhrP7v/S2PtMMw==", - "dev": true, - "requires": { - "glob": "^10.2.2", - "json-parse-even-better-errors": "^3.0.0", - "normalize-package-data": "^5.0.0", - "npm-normalize-package-bin": "^3.0.0" - }, - "dependencies": { - "brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "dev": true, - "requires": { - "balanced-match": "^1.0.0" - } - }, - "glob": { - "version": "10.3.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.3.3.tgz", - "integrity": "sha512-92vPiMb/iqpmEgsOoIDvTjc50wf9CCCvMzsi6W0JLPeUKE8TWP1a73PgqSrqy7iAZxaSD1YdzU7QZR5LF51MJw==", - "dev": true, - "requires": { - "foreground-child": "^3.1.0", - "jackspeak": "^2.0.3", - "minimatch": "^9.0.1", - "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0", - "path-scurry": "^1.10.1" - } - }, - "minimatch": { - "version": "9.0.3", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", - "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", - "dev": true, - "requires": { - "brace-expansion": "^2.0.1" - } - }, - "minipass": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.0.2.tgz", - "integrity": "sha512-eL79dXrE1q9dBbDCLg7xfn/vl7MS4F1gvJAgjJrQli/jbQWdUttuVawphqpffoIYfRdq78LHx6GP4bU/EQ2ATA==", - "dev": true - } - } - }, - "read-package-json-fast": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/read-package-json-fast/-/read-package-json-fast-3.0.2.tgz", - "integrity": "sha512-0J+Msgym3vrLOUB3hzQCuZHII0xkNGCtz/HJH9xZshwv9DbDwkw1KaE3gx/e2J5rpEY5rtOy6cyhKOPrkP7FZw==", - "dev": true, - "requires": { - "json-parse-even-better-errors": "^3.0.0", - "npm-normalize-package-bin": "^3.0.0" - } - }, - "readable-stream": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", - "requires": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - } - }, - "regexp.prototype.flags": { - "version": "1.4.3", - "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz", - "integrity": "sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "functions-have-names": "^1.2.2" - } - }, - "regexpp": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz", - "integrity": "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==", - "dev": true - }, - "registry-auth-token": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-5.0.2.tgz", - "integrity": "sha512-o/3ikDxtXaA59BmZuZrJZDJv8NMDGSj+6j6XaeBmHw8eY1i1qd9+6H+LjVvQXx3HN6aRCGa1cUdJ9RaJZUugnQ==", - "dev": true, - "requires": { - "@pnpm/npm-conf": "^2.1.0" - } - }, - "registry-url": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/registry-url/-/registry-url-6.0.1.tgz", - "integrity": "sha512-+crtS5QjFRqFCoQmvGduwYWEBng99ZvmFvF+cUJkGYF1L1BfU8C6Zp9T7f5vPAwyLkUExpvK+ANVZmGU49qi4Q==", - "dev": true, - "requires": { - "rc": "1.2.8" - } - }, - "remote-git-tags": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/remote-git-tags/-/remote-git-tags-3.0.0.tgz", - "integrity": "sha512-C9hAO4eoEsX+OXA4rla66pXZQ+TLQ8T9dttgQj18yuKlPMTVkIkdYXvlMC55IuUsIkV6DpmQYi10JKFLaU+l7w==", - "dev": true - }, - "require-from-string": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", - "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", - "dev": true - }, - "resolve": { - "version": "1.22.4", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.4.tgz", - "integrity": "sha512-PXNdCiPqDqeUou+w1C2eTQbNfxKSuMxqTCuvlmmMsk1NWHL5fRrhY6Pl0qEYYc6+QqGClco1Qj8XnjPego4wfg==", - "dev": true, - "requires": { - "is-core-module": "^2.13.0", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" - } - }, - "resolve-alpn": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/resolve-alpn/-/resolve-alpn-1.2.1.tgz", - "integrity": "sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g==", - "dev": true - }, - "resolve-from": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", - "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", - "dev": true - }, - "responselike": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/responselike/-/responselike-3.0.0.tgz", - "integrity": "sha512-40yHxbNcl2+rzXvZuVkrYohathsSJlMTXKryG5y8uciHv1+xDLHQpgjG64JUO9nrEq2jGLH6IZ8BcZyw3wrweg==", - "dev": true, - "requires": { - "lowercase-keys": "^3.0.0" - } - }, - "retry": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz", - "integrity": "sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==", - "dev": true - }, - "reusify": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", - "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", - "dev": true - }, - "rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "requires": { - "glob": "^7.1.3" - } - }, - "run-parallel": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", - "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", - "dev": true, - "requires": { - "queue-microtask": "^1.2.2" - } - }, - "safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" - }, - "safe-regex-test": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.0.tgz", - "integrity": "sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.3", - "is-regex": "^1.1.4" - } - }, - "safer-buffer": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", - "optional": true - }, - "semver": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", - "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", - "requires": { - "lru-cache": "^6.0.0" - } - }, - "semver-diff": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/semver-diff/-/semver-diff-4.0.0.tgz", - "integrity": "sha512-0Ju4+6A8iOnpL/Thra7dZsSlOHYAHIeMxfhWQRI1/VLcT3WDBZKKtQt/QkBOsiIN9ZpuvHE6cGZ0x4glCMmfiA==", - "dev": true, - "requires": { - "semver": "^7.3.5" - } - }, - "semver-utils": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/semver-utils/-/semver-utils-1.1.4.tgz", - "integrity": "sha512-EjnoLE5OGmDAVV/8YDoN5KiajNadjzIp9BAHOhYeQHt7j0UWxjmgsx4YD48wp4Ue1Qogq38F1GNUJNqF1kKKxA==", - "dev": true - }, - "set-blocking": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", - "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=" - }, - "shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dev": true, - "requires": { - "shebang-regex": "^3.0.0" - } - }, - "shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "dev": true - }, - "side-channel": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", - "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", - "dev": true, - "requires": { - "call-bind": "^1.0.0", - "get-intrinsic": "^1.0.2", - "object-inspect": "^1.9.0" - } - }, - "signal-exit": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", - "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==" - }, - "sigstore": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-1.8.0.tgz", - "integrity": "sha512-ogU8qtQ3VFBawRJ8wjsBEX/vIFeHuGs1fm4jZtjWQwjo8pfAt7T/rh+udlAN4+QUe0IzA8qRSc/YZ7dHP6kh+w==", - "dev": true, - "requires": { - "@sigstore/bundle": "^1.0.0", - "@sigstore/protobuf-specs": "^0.2.0", - "@sigstore/tuf": "^1.0.3", - "make-fetch-happen": "^11.0.1" - } - }, - "simple-concat": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.1.tgz", - "integrity": "sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==" - }, - "simple-get": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/simple-get/-/simple-get-3.1.1.tgz", - "integrity": "sha512-CQ5LTKGfCpvE1K0n2us+kuMPbk/q0EKl82s4aheV9oXjFEz6W/Y7oQFVJuU6QG77hRT4Ghb5RURteF5vnWjupA==", - "requires": { - "decompress-response": "^4.2.0", - "once": "^1.3.1", - "simple-concat": "^1.0.0" - } - }, - "sisteransi": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", - "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==", - "dev": true - }, - "slash": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", - "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", - "dev": true - }, - "smart-buffer": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz", - "integrity": "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==", - "dev": true - }, - "socks": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/socks/-/socks-2.7.1.tgz", - "integrity": "sha512-7maUZy1N7uo6+WVEX6psASxtNlKaNVMlGQKkG/63nEDdLOWNbiUMoLK7X4uYoLhQstau72mLgfEWcXcwsaHbYQ==", - "dev": true, - "requires": { - "ip": "^2.0.0", - "smart-buffer": "^4.2.0" - } - }, - "socks-proxy-agent": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-7.0.0.tgz", - "integrity": "sha512-Fgl0YPZ902wEsAyiQ+idGd1A7rSFx/ayC1CQVMw5P+EQx2V0SgpGtf6OKFhVjPflPUl9YMmEOnmfjCdMUsygww==", - "dev": true, - "requires": { - "agent-base": "^6.0.2", - "debug": "^4.3.3", - "socks": "^2.6.2" - } - }, - "source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", - "dev": true - }, - "source-map-support": { - "version": "0.5.21", - "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", - "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", - "dev": true, - "requires": { - "buffer-from": "^1.0.0", - "source-map": "^0.6.0" - } - }, - "spawn-please": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/spawn-please/-/spawn-please-2.0.2.tgz", - "integrity": "sha512-KM8coezO6ISQ89c1BzyWNtcn2V2kAVtwIXd3cN/V5a0xPYc1F/vydrRc01wsKFEQ/p+V1a4sw4z2yMITIXrgGw==", - "dev": true, - "requires": { - "cross-spawn": "^7.0.3" - } - }, - "spdx-correct": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.2.0.tgz", - "integrity": "sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA==", - "dev": true, - "requires": { - "spdx-expression-parse": "^3.0.0", - "spdx-license-ids": "^3.0.0" - } - }, - "spdx-exceptions": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz", - "integrity": "sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A==", - "dev": true - }, - "spdx-expression-parse": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", - "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", - "dev": true, + "@octokit/graphql": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-7.1.1.tgz", + "integrity": "sha512-3mkDltSfcDUoa176nlGoA32RGjeWjl3K7F/BwHwRMJUW/IteSa4bnSV8p2ThNkcIcZU2umkZWxwETSSCJf2Q7g==", "requires": { - "spdx-exceptions": "^2.1.0", - "spdx-license-ids": "^3.0.0" + "@octokit/request": "^8.4.1", + "@octokit/types": "^13.0.0", + "universal-user-agent": "^6.0.0" } }, - "spdx-license-ids": { - "version": "3.0.13", - "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.13.tgz", - "integrity": "sha512-XkD+zwiqXHikFZm4AX/7JSCXA98U5Db4AFd5XUg/+9UNtnH75+Z9KxtpYiJZx36mUDVOwH83pl7yvCer6ewM3w==", - "dev": true + "@octokit/openapi-types": { + "version": "24.2.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz", + "integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==" }, - "ssri": { - "version": "10.0.4", - "resolved": "https://registry.npmjs.org/ssri/-/ssri-10.0.4.tgz", - "integrity": "sha512-12+IR2CB2C28MMAw0Ncqwj5QbTcs0nGIhgJzYWzDkb21vWmfNI83KS4f3Ci6GI98WreIfG7o9UXp3C0qbpA8nQ==", - "dev": true, + "@octokit/plugin-paginate-rest": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-9.2.2.tgz", + "integrity": "sha512-u3KYkGF7GcZnSD/3UP0S7K5XUFT2FkOQdcfXZGZQPGv3lm4F2Xbf71lvjldr8c1H3nNbF+33cLEkWYbokGWqiQ==", "requires": { - "minipass": "^5.0.0" + "@octokit/types": "^12.6.0" }, "dependencies": { - "minipass": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", - "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", - "dev": true + "@octokit/openapi-types": { + "version": "20.0.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-20.0.0.tgz", + "integrity": "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA==" + }, + "@octokit/types": { + "version": "12.6.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-12.6.0.tgz", + "integrity": "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw==", + "requires": { + "@octokit/openapi-types": "^20.0.0" + } } } }, - "string_decoder": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", - "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", - "requires": { - "safe-buffer": "~5.2.0" - } - }, - "string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "requires": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - } - }, - "string-width-cjs": { - "version": "npm:string-width@4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dev": true, - "requires": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - } - }, - "string.prototype.trim": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.7.tgz", - "integrity": "sha512-p6TmeT1T3411M8Cgg9wBTMRtY2q9+PNy9EV1i2lIXUN/btt763oIfxwN3RR8VU6wHX8j/1CFy0L+YuThm6bgOg==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" - } - }, - "string.prototype.trimend": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.6.tgz", - "integrity": "sha512-JySq+4mrPf9EsDBEDYMOb/lM7XQLulwg5R/m1r0PXEFqrV0qHvl58sdTilSXtKOflCsK2E8jxf+GKC0T07RWwQ==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" - } - }, - "string.prototype.trimstart": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.6.tgz", - "integrity": "sha512-omqjMDaY92pbn5HOX7f9IccLA+U1tA9GvtU4JrodiXFfYB7jPzzHpRzpglLAjtUV6bB557zwClJezTqnAiYnQA==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" - } - }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "@octokit/plugin-rest-endpoint-methods": { + "version": "10.4.1", + "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-10.4.1.tgz", + "integrity": "sha512-xV1b+ceKV9KytQe3zCVqjg+8GTGfDYwaT1ATU5isiUyVtlVAO3HNdzpS4sr4GBx4hxQ46s7ITtZrAsxG22+rVg==", "requires": { - "ansi-regex": "^5.0.1" + "@octokit/types": "^12.6.0" + }, + "dependencies": { + "@octokit/openapi-types": { + "version": "20.0.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-20.0.0.tgz", + "integrity": "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA==" + }, + "@octokit/types": { + "version": "12.6.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-12.6.0.tgz", + "integrity": "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw==", + "requires": { + "@octokit/openapi-types": "^20.0.0" + } + } } }, - "strip-ansi-cjs": { - "version": "npm:strip-ansi@6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, + "@octokit/request": { + "version": "8.4.1", + "resolved": "https://registry.npmjs.org/@octokit/request/-/request-8.4.1.tgz", + "integrity": "sha512-qnB2+SY3hkCmBxZsR/MPCybNmbJe4KAlfWErXq+rBKkQJlbjdJeS85VI9r8UqeLYLvnAenU8Q1okM/0MBsAGXw==", "requires": { - "ansi-regex": "^5.0.1" + "@octokit/endpoint": "^9.0.6", + "@octokit/request-error": "^5.1.1", + "@octokit/types": "^13.1.0", + "universal-user-agent": "^6.0.0" } }, - "strip-bom": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", - "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==", - "dev": true - }, - "strip-json-comments": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", - "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", - "dev": true - }, - "supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "dev": true, + "@octokit/request-error": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.1.tgz", + "integrity": "sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g==", "requires": { - "has-flag": "^4.0.0" + "@octokit/types": "^13.1.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" } }, - "supports-preserve-symlinks-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", - "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", - "dev": true - }, - "tar": { - "version": "6.1.11", - "resolved": "https://registry.npmjs.org/tar/-/tar-6.1.11.tgz", - "integrity": "sha512-an/KZQzQUkZCkuoAA64hM92X0Urb6VpRhAFllDzz44U2mcD5scmT3zBc4VgVpkugF580+DQn8eAFSyoQt0tznA==", + "@octokit/types": { + "version": "13.10.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz", + "integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==", "requires": { - "chownr": "^2.0.0", - "fs-minipass": "^2.0.0", - "minipass": "^3.0.0", - "minizlib": "^2.1.1", - "mkdirp": "^1.0.3", - "yallist": "^4.0.0" + "@octokit/openapi-types": "^24.2.0" } }, - "text-table": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", - "integrity": "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=", - "dev": true - }, - "to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "requires": { - "is-number": "^7.0.0" - } + "before-after-hook": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz", + "integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==" }, - "tr46": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", - "integrity": "sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o=" + "deprecation": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", + "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" }, - "tsconfig-paths": { - "version": "3.14.2", - "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.2.tgz", - "integrity": "sha512-o/9iXgCYc5L/JxCHPe3Hvh8Q/2xm5Z+p18PESBU6Ff33695QnCHBEjcytY2q19ua7Mbl/DavtBOLq+oG0RCL+g==", - "dev": true, - "requires": { - "@types/json5": "^0.0.29", - "json5": "^1.0.2", - "minimist": "^1.2.6", - "strip-bom": "^3.0.0" - } + "lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" }, - "tslib": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.3.1.tgz", - "integrity": "sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw==" + "luxon": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/luxon/-/luxon-3.7.1.tgz", + "integrity": "sha512-RkRWjA926cTvz5rAb1BqyWkKbbjzCGchDUIKMCUvNi17j6f6j8uHGDV82Aqcqtzd+icoYpELmG3ksgGiFNNcNg==" }, - "tuf-js": { - "version": "1.1.7", - "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-1.1.7.tgz", - "integrity": "sha512-i3P9Kgw3ytjELUfpuKVDNBJvk4u5bXL6gskv572mcevPbSKCV3zt3djhmlEQ65yERjIbOSncy7U4cQJaB1CBCg==", - "dev": true, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", "requires": { - "@tufjs/models": "1.0.4", - "debug": "^4.3.4", - "make-fetch-happen": "^11.1.1" + "wrappy": "1" } }, "tunnel": { @@ -10211,363 +496,23 @@ "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==" }, - "type-check": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", - "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", - "dev": true, - "requires": { - "prelude-ls": "^1.2.1" - } - }, - "type-fest": { - "version": "0.20.2", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", - "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", - "dev": true - }, - "typed-array-length": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.4.tgz", - "integrity": "sha512-KjZypGq+I/H7HI5HlOoGHkWUUGq+Q0TPhQurLbyrVrvnKTBgzLhIJ7j6J/XTQOi0d1RjyZ0wdas8bKs2p0x3Ng==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "for-each": "^0.3.3", - "is-typed-array": "^1.1.9" - } - }, - "typedarray-to-buffer": { - "version": "3.1.5", - "resolved": "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz", - "integrity": "sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==", - "dev": true, - "requires": { - "is-typedarray": "^1.0.0" - } - }, - "unbox-primitive": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz", - "integrity": "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "has-bigints": "^1.0.2", - "has-symbols": "^1.0.3", - "which-boxed-primitive": "^1.0.2" - } - }, "undici": { - "version": "5.26.4", - "resolved": "https://registry.npmjs.org/undici/-/undici-5.26.4.tgz", - "integrity": "sha512-OG+QOf0fTLtazL9P9X7yqWxQ+Z0395Wk6DSkyTxtaq3wQEjIroVe7Y4asCX/vcCxYpNGMnwz8F0qbRYUoaQVMw==", + "version": "5.29.0", + "resolved": "https://registry.npmjs.org/undici/-/undici-5.29.0.tgz", + "integrity": "sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg==", "requires": { "@fastify/busboy": "^2.0.0" } }, - "unique-filename": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-3.0.0.tgz", - "integrity": "sha512-afXhuC55wkAmZ0P18QsVE6kp8JaxrEokN2HGIoIVv2ijHQd419H0+6EigAFcIzXeMIkcIkNBpB3L/DXB3cTS/g==", - "dev": true, - "requires": { - "unique-slug": "^4.0.0" - } - }, - "unique-slug": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-4.0.0.tgz", - "integrity": "sha512-WrcA6AyEfqDX5bWige/4NQfPZMtASNVxdmWR76WESYQVAACSgWcR6e9i0mofqqBxYFtL4oAxPIptY73/0YE1DQ==", - "dev": true, - "requires": { - "imurmurhash": "^0.1.4" - } - }, - "unique-string": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/unique-string/-/unique-string-3.0.0.tgz", - "integrity": "sha512-VGXBUVwxKMBUznyffQweQABPRRW1vHZAbadFZud4pLFAqRGvv/96vafgjWFqzourzr8YonlQiPgH0YCJfawoGQ==", - "dev": true, - "requires": { - "crypto-random-string": "^4.0.0" - } - }, "universal-user-agent": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.0.tgz", - "integrity": "sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w==" - }, - "untildify": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/untildify/-/untildify-4.0.0.tgz", - "integrity": "sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw==", - "dev": true - }, - "update-notifier": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/update-notifier/-/update-notifier-6.0.2.tgz", - "integrity": "sha512-EDxhTEVPZZRLWYcJ4ZXjGFN0oP7qYvbXWzEgRm/Yql4dHX5wDbvh89YHP6PK1lzZJYrMtXUuZZz8XGK+U6U1og==", - "dev": true, - "requires": { - "boxen": "^7.0.0", - "chalk": "^5.0.1", - "configstore": "^6.0.0", - "has-yarn": "^3.0.0", - "import-lazy": "^4.0.0", - "is-ci": "^3.0.1", - "is-installed-globally": "^0.4.0", - "is-npm": "^6.0.0", - "is-yarn-global": "^0.4.0", - "latest-version": "^7.0.0", - "pupa": "^3.1.0", - "semver": "^7.3.7", - "semver-diff": "^4.0.0", - "xdg-basedir": "^5.1.0" - }, - "dependencies": { - "chalk": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.2.0.tgz", - "integrity": "sha512-ree3Gqw/nazQAPuJJEy+avdl7QfZMcUvmHIKgEZkGL+xOBzRvup5Hxo6LHuMceSxOabuJLJm5Yp/92R9eMmMvA==", - "dev": true - } - } - }, - "uri-js": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", - "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", - "dev": true, - "requires": { - "punycode": "^2.1.0" - } - }, - "util-deprecate": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" - }, - "uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" - }, - "validate-npm-package-license": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", - "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", - "dev": true, - "requires": { - "spdx-correct": "^3.0.0", - "spdx-expression-parse": "^3.0.0" - } - }, - "validate-npm-package-name": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-5.0.0.tgz", - "integrity": "sha512-YuKoXDAhBYxY7SfOKxHBDoSyENFeW5VvIIQp2TGQuit8gpK6MnWaQelBKxso72DoxTZfZdcP3W90LqpSkgPzLQ==", - "dev": true, - "requires": { - "builtins": "^5.0.0" - } - }, - "webidl-conversions": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", - "integrity": "sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE=" - }, - "whatwg-url": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", - "integrity": "sha1-lmRU6HZUYuN2RNNib2dCzotwll0=", - "requires": { - "tr46": "~0.0.3", - "webidl-conversions": "^3.0.0" - } - }, - "which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, - "requires": { - "isexe": "^2.0.0" - } - }, - "which-boxed-primitive": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", - "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", - "dev": true, - "requires": { - "is-bigint": "^1.0.1", - "is-boolean-object": "^1.1.0", - "is-number-object": "^1.0.4", - "is-string": "^1.0.5", - "is-symbol": "^1.0.3" - } - }, - "which-typed-array": { - "version": "1.1.9", - "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.9.tgz", - "integrity": "sha512-w9c4xkx6mPidwp7180ckYWfMmvxpjlZuIudNtDf4N/tTAUB8VJbX25qZoAsrtGuYNnGw3pa0AXgbGKRB8/EceA==", - "dev": true, - "requires": { - "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.2", - "for-each": "^0.3.3", - "gopd": "^1.0.1", - "has-tostringtag": "^1.0.0", - "is-typed-array": "^1.1.10" - } - }, - "wide-align": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz", - "integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==", - "requires": { - "string-width": "^1.0.2 || 2 || 3 || 4" - } - }, - "widest-line": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/widest-line/-/widest-line-4.0.1.tgz", - "integrity": "sha512-o0cyEG0e8GPzT4iGHphIOh0cJOV8fivsXxddQasHPHfoZf1ZexrfeA21w2NaEN1RHE+fXlfISmOE8R9N3u3Qig==", - "dev": true, - "requires": { - "string-width": "^5.0.1" - }, - "dependencies": { - "ansi-regex": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", - "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", - "dev": true - }, - "emoji-regex": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", - "dev": true - }, - "string-width": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", - "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", - "dev": true, - "requires": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" - } - }, - "strip-ansi": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.0.1.tgz", - "integrity": "sha512-cXNxvT8dFNRVfhVME3JAe98mkXDYN2O1l7jmcwMnOslDeESg1rF/OZMtK0nRAhiari1unG5cD4jG3rapUAkLbw==", - "dev": true, - "requires": { - "ansi-regex": "^6.0.1" - } - } - } - }, - "wrap-ansi": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", - "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", - "dev": true, - "requires": { - "ansi-styles": "^6.1.0", - "string-width": "^5.0.1", - "strip-ansi": "^7.0.1" - }, - "dependencies": { - "ansi-regex": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", - "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", - "dev": true - }, - "ansi-styles": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", - "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", - "dev": true - }, - "emoji-regex": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", - "dev": true - }, - "string-width": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", - "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", - "dev": true, - "requires": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" - } - }, - "strip-ansi": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.0.1.tgz", - "integrity": "sha512-cXNxvT8dFNRVfhVME3JAe98mkXDYN2O1l7jmcwMnOslDeESg1rF/OZMtK0nRAhiari1unG5cD4jG3rapUAkLbw==", - "dev": true, - "requires": { - "ansi-regex": "^6.0.1" - } - } - } - }, - "wrap-ansi-cjs": { - "version": "npm:wrap-ansi@7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "dev": true, - "requires": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - } + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz", + "integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==" }, "wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" - }, - "write-file-atomic": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.3.tgz", - "integrity": "sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==", - "dev": true, - "requires": { - "imurmurhash": "^0.1.4", - "is-typedarray": "^1.0.0", - "signal-exit": "^3.0.2", - "typedarray-to-buffer": "^3.1.5" - } - }, - "xdg-basedir": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-5.1.0.tgz", - "integrity": "sha512-GCPAHLvrIH13+c0SuacwvRYj2SxJXQ4kaVTT5xgL3kPrz56XxkF21IGhjSE1+W0aw7gpBWRGXLCPnPby6lSpmQ==", - "dev": true - }, - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - }, - "yocto-queue": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", - "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", - "dev": true + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" } } } diff --git a/dev/coverage-action/package.json b/dev/coverage-action/package.json index 8089553881..3f72b78028 100644 --- a/dev/coverage-action/package.json +++ b/dev/coverage-action/package.json @@ -6,19 +6,9 @@ "author": "IETF Trust", "license": "BSD-3-Clause", "dependencies": { - "@actions/core": "1.10.1", - "@actions/github": "6.0.0", - "chart.js": "3.5.1", - "chartjs-node-canvas": "4.1.6", + "@actions/core": "1.11.1", + "@actions/github": "6.0.1", "lodash": "4.17.21", - "luxon": "3.4.3" - }, - "devDependencies": { - "eslint": "8.51.0", - "eslint-config-standard": "17.1.0", - "eslint-plugin-import": "2.28.1", - "eslint-plugin-node": "11.1.0", - "eslint-plugin-promise": "6.1.1", - "npm-check-updates": "16.14.6" + "luxon": "3.7.1" } } diff --git a/dev/del-old-packages/package-lock.json b/dev/del-old-packages/package-lock.json index 969a713cb6..9899b290fb 100644 --- a/dev/del-old-packages/package-lock.json +++ b/dev/del-old-packages/package-lock.json @@ -10,7 +10,7 @@ "license": "ISC", "dependencies": { "@octokit/core": "^4.2.4", - "luxon": "^3.4.3" + "luxon": "^3.4.4" } }, "node_modules/@octokit/auth-token": { @@ -141,9 +141,9 @@ } }, "node_modules/luxon": { - "version": "3.4.3", - "resolved": "https://registry.npmjs.org/luxon/-/luxon-3.4.3.tgz", - "integrity": "sha512-tFWBiv3h7z+T/tDaoxA8rqTxy1CHV6gHS//QdaH4pulbq/JuBSGgQspQQqcgnwdAx6pNI7cmvz5Sv/addzHmUg==", + "version": "3.4.4", + "resolved": "https://registry.npmjs.org/luxon/-/luxon-3.4.4.tgz", + "integrity": "sha512-zobTr7akeGHnv7eBOXcRgMeCP6+uyYsczwmeRCauvpvaAltgNyTbLH/+VaEAPUeWBT+1GuNmz4wC/6jtQzbbVA==", "engines": { "node": ">=12" } @@ -315,9 +315,9 @@ "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==" }, "luxon": { - "version": "3.4.3", - "resolved": "https://registry.npmjs.org/luxon/-/luxon-3.4.3.tgz", - "integrity": "sha512-tFWBiv3h7z+T/tDaoxA8rqTxy1CHV6gHS//QdaH4pulbq/JuBSGgQspQQqcgnwdAx6pNI7cmvz5Sv/addzHmUg==" + "version": "3.4.4", + "resolved": "https://registry.npmjs.org/luxon/-/luxon-3.4.4.tgz", + "integrity": "sha512-zobTr7akeGHnv7eBOXcRgMeCP6+uyYsczwmeRCauvpvaAltgNyTbLH/+VaEAPUeWBT+1GuNmz4wC/6jtQzbbVA==" }, "node-fetch": { "version": "2.6.7", diff --git a/dev/del-old-packages/package.json b/dev/del-old-packages/package.json index 6f1dc9328d..c0b57b7f7b 100644 --- a/dev/del-old-packages/package.json +++ b/dev/del-old-packages/package.json @@ -11,6 +11,6 @@ "license": "ISC", "dependencies": { "@octokit/core": "^4.2.4", - "luxon": "^3.4.3" + "luxon": "^3.4.4" } } diff --git a/dev/deploy-to-container/cli.js b/dev/deploy-to-container/cli.js index d0d4cef548..1a2d993ac4 100644 --- a/dev/deploy-to-container/cli.js +++ b/dev/deploy-to-container/cli.js @@ -3,7 +3,7 @@ import Docker from 'dockerode' import path from 'path' import fs from 'fs-extra' -import tar from 'tar' +import * as tar from 'tar' import yargs from 'yargs/yargs' import { hideBin } from 'yargs/helpers' import slugify from 'slugify' @@ -23,7 +23,7 @@ async function main () { throw new Error('Missing --branch argument!') } if (branch.indexOf('/') >= 0) { - branch = branch.split('/')[1] + branch = branch.split('/').slice(1).join('-') } branch = slugify(branch, { lower: true, strict: true }) if (branch.length < 1) { @@ -67,8 +67,10 @@ async function main () { .replace('__DBHOST__', `dt-db-${branch}`) .replace('__SECRETKEY__', nanoid(36)) .replace('__MQCONNSTR__', `amqp://datatracker:${mqKey}@dt-mq-${branch}/dt`) + .replace('__HOSTNAME__', hostname) ) await fs.copy(path.join(basePath, 'docker/scripts/app-create-dirs.sh'), path.join(releasePath, 'app-create-dirs.sh')) + await fs.copy(path.join(basePath, 'docker/scripts/app-init-celery.sh'), path.join(releasePath, 'app-init-celery.sh')) await fs.copy(path.join(basePath, 'dev/deploy-to-container/start.sh'), path.join(releasePath, 'start.sh')) await fs.copy(path.join(basePath, 'test/data'), path.join(releasePath, 'test/data')) console.info('Updated configuration files.') @@ -97,14 +99,6 @@ async function main () { }) console.info('Pulled latest MQ docker image.') - // Pull latest Celery image - console.info('Pulling latest Celery docker image...') - const celeryImagePullStream = await dock.pull('ghcr.io/ietf-tools/datatracker-celery:latest') - await new Promise((resolve, reject) => { - dock.modem.followProgress(celeryImagePullStream, (err, res) => err ? reject(err) : resolve(res)) - }) - console.info('Pulled latest Celery docker image.') - // Terminate existing containers console.info('Ensuring existing containers with same name are terminated...') const containers = await dock.listContainers({ all: true }) @@ -175,6 +169,9 @@ async function main () { Image: 'ghcr.io/ietf-tools/datatracker-db:latest', name: `dt-db-${branch}`, Hostname: `dt-db-${branch}`, + Labels: { + ...argv.nodbrefresh === 'true' && { nodbrefresh: '1' } + }, HostConfig: { NetworkMode: 'shared', RestartPolicy: { @@ -194,6 +191,9 @@ async function main () { Env: [ `CELERY_PASSWORD=${mqKey}` ], + Labels: { + ...argv.nodbrefresh === 'true' && { nodbrefresh: '1' } + }, HostConfig: { Memory: 4 * (1024 ** 3), // in bytes NetworkMode: 'shared', @@ -214,7 +214,7 @@ async function main () { const celeryContainers = {} for (const conConf of conConfs) { celeryContainers[conConf.name] = await dock.createContainer({ - Image: 'ghcr.io/ietf-tools/datatracker-celery:latest', + Image: 'ghcr.io/ietf-tools/datatracker-app-base:latest', name: `dt-${conConf.name}-${branch}`, Hostname: `dt-${conConf.name}-${branch}`, Env: [ @@ -222,6 +222,9 @@ async function main () { `CELERY_ROLE=${conConf.role}`, 'UPDATE_REQUIREMENTS_FROM=requirements.txt' ], + Labels: { + ...argv.nodbrefresh === 'true' && { nodbrefresh: '1' } + }, HostConfig: { Binds: [ 'dt-assets:/assets', @@ -233,7 +236,7 @@ async function main () { Name: 'unless-stopped' } }, - Cmd: ['--loglevel=INFO'] + Entrypoint: ['bash', '-c', 'chmod +x ./app-init-celery.sh && ./app-init-celery.sh'] }) } console.info('Created Celery docker containers successfully.') @@ -245,7 +248,7 @@ async function main () { name: `dt-app-${branch}`, Hostname: `dt-app-${branch}`, Env: [ - `LETSENCRYPT_HOST=${hostname}`, + // `LETSENCRYPT_HOST=${hostname}`, `VIRTUAL_HOST=${hostname}`, `VIRTUAL_PORT=8000`, `PGHOST=dt-db-${branch}` @@ -254,7 +257,8 @@ async function main () { appversion: `${argv.appversion}` ?? '0.0.0', commit: `${argv.commit}` ?? 'unknown', ghrunid: `${argv.ghrunid}` ?? '0', - hostname + hostname, + ...argv.nodbrefresh === 'true' && { nodbrefresh: '1' } }, HostConfig: { Binds: [ diff --git a/dev/deploy-to-container/package-lock.json b/dev/deploy-to-container/package-lock.json index a41ea00e20..5d5bef5604 100644 --- a/dev/deploy-to-container/package-lock.json +++ b/dev/deploy-to-container/package-lock.json @@ -6,12 +6,12 @@ "": { "name": "deploy-to-container", "dependencies": { - "dockerode": "^4.0.0", - "fs-extra": "^11.1.1", - "nanoid": "5.0.2", - "nanoid-dictionary": "5.0.0-beta.1", - "slugify": "1.6.6", - "tar": "^6.2.0", + "dockerode": "^4.0.10", + "fs-extra": "^11.3.4", + "nanoid": "5.1.7", + "nanoid-dictionary": "5.0.0", + "slugify": "1.6.9", + "tar": "^7.5.13", "yargs": "^17.7.2" }, "engines": { @@ -23,6 +23,117 @@ "resolved": "https://registry.npmjs.org/@balena/dockerignore/-/dockerignore-1.0.2.tgz", "integrity": "sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q==" }, + "node_modules/@grpc/grpc-js": { + "version": "1.12.5", + "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.12.5.tgz", + "integrity": "sha512-d3iiHxdpg5+ZcJ6jnDSOT8Z0O0VMVGy34jAnYLUX8yd36b1qn8f1TwOA/Lc7TsOh03IkPJ38eGI5qD2EjNkoEA==", + "dependencies": { + "@grpc/proto-loader": "^0.7.13", + "@js-sdsl/ordered-map": "^4.4.2" + }, + "engines": { + "node": ">=12.10.0" + } + }, + "node_modules/@grpc/proto-loader": { + "version": "0.7.13", + "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.7.13.tgz", + "integrity": "sha512-AiXO/bfe9bmxBjxxtYxFAXGZvMaN5s8kO+jBHAJCON8rJoB5YS/D6X7ZNc6XQkuHNmyl4CYaMI1fJ/Gn27RGGw==", + "dependencies": { + "lodash.camelcase": "^4.3.0", + "long": "^5.0.0", + "protobufjs": "^7.2.5", + "yargs": "^17.7.2" + }, + "bin": { + "proto-loader-gen-types": "build/bin/proto-loader-gen-types.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/@isaacs/fs-minipass": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz", + "integrity": "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w==", + "dependencies": { + "minipass": "^7.0.4" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@js-sdsl/ordered-map": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/@js-sdsl/ordered-map/-/ordered-map-4.4.2.tgz", + "integrity": "sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/js-sdsl" + } + }, + "node_modules/@protobufjs/aspromise": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", + "integrity": "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==" + }, + "node_modules/@protobufjs/base64": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz", + "integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==" + }, + "node_modules/@protobufjs/codegen": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz", + "integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==" + }, + "node_modules/@protobufjs/eventemitter": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz", + "integrity": "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==" + }, + "node_modules/@protobufjs/fetch": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz", + "integrity": "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==", + "dependencies": { + "@protobufjs/aspromise": "^1.1.1", + "@protobufjs/inquire": "^1.1.0" + } + }, + "node_modules/@protobufjs/float": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz", + "integrity": "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==" + }, + "node_modules/@protobufjs/inquire": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz", + "integrity": "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==" + }, + "node_modules/@protobufjs/path": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz", + "integrity": "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==" + }, + "node_modules/@protobufjs/pool": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz", + "integrity": "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==" + }, + "node_modules/@protobufjs/utf8": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz", + "integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==" + }, + "node_modules/@types/node": { + "version": "22.10.5", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.10.5.tgz", + "integrity": "sha512-F8Q+SeGimwOo86fiovQh8qiXfFEh2/ocYv7tU5pJ3EXMSSxk1Joj5wefpFK2fHTf/N6HKGSxIDBT9f3gCxXPkQ==", + "dependencies": { + "undici-types": "~6.20.0" + } + }, "node_modules/ansi-regex": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", @@ -80,10 +191,43 @@ "tweetnacl": "^0.14.3" } }, + "node_modules/bl": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", + "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", + "dependencies": { + "buffer": "^5.5.0", + "inherits": "^2.0.4", + "readable-stream": "^3.4.0" + } + }, + "node_modules/buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, "node_modules/buildcheck": { - "version": "0.0.6", - "resolved": "https://registry.npmjs.org/buildcheck/-/buildcheck-0.0.6.tgz", - "integrity": "sha512-8f9ZJCUXyT1M35Jx7MkBgmBMo3oHTTBIPLiY9xyL0pl3T5RwcPEY8cUHr5LBNfu/fk6c2T4DJZuVM/8ZZT2D2A==", + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/buildcheck/-/buildcheck-0.0.7.tgz", + "integrity": "sha512-lHblz4ahamxpTmnsk+MNTRWsjYKv965MwOrSJyeD588rR3Jcu7swE+0wN5F+PbL5cjgu/9ObkhfzEPuofEMwLA==", "optional": true, "engines": { "node": ">=10.0.0" @@ -124,25 +268,25 @@ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, "node_modules/cpu-features": { - "version": "0.0.9", - "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.9.tgz", - "integrity": "sha512-AKjgn2rP2yJyfbepsmLfiYcmtNn/2eUvocUyM/09yB0YDiz39HteK/5/T4Onf0pmdYDMgkBoGvRLvEguzyL7wQ==", + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.10.tgz", + "integrity": "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==", "hasInstallScript": true, "optional": true, "dependencies": { "buildcheck": "~0.0.6", - "nan": "^2.17.0" + "nan": "^2.19.0" }, "engines": { "node": ">=10.0.0" } }, "node_modules/debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", "dependencies": { - "ms": "2.1.2" + "ms": "^2.1.3" }, "engines": { "node": ">=6.0" @@ -154,27 +298,31 @@ } }, "node_modules/docker-modem": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.1.tgz", - "integrity": "sha512-vqrE/nrweCyzmCpVpdFRC41qS+tfTF+IoUKlTZr52O82urbUqdfyJBGWMvT01pYUprWepLr8IkyVTEWJKRTQSg==", + "version": "5.0.7", + "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.7.tgz", + "integrity": "sha512-XJgGhoR/CLpqshm4d3L7rzH6t8NgDFUIIpztYlLHIApeJjMZKYJMz2zxPsYxnejq5h3ELYSw/RBsi3t5h7gNTA==", "dependencies": { "debug": "^4.1.1", "readable-stream": "^3.5.0", "split-ca": "^1.0.1", - "ssh2": "^1.11.0" + "ssh2": "^1.15.0" }, "engines": { "node": ">= 8.0" } }, "node_modules/dockerode": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.0.tgz", - "integrity": "sha512-3LF7/3MPz5+9RsUo91rD0MCcx0yxjC9bnbtgtVjOLKyKxlZSJ7/Kk3OPAgARlwlWHqXwAGYhmkAHYx7IwD0tJQ==", + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.10.tgz", + "integrity": "sha512-8L/P9JynLBiG7/coiA4FlQXegHltRqS0a+KqI44P1zgQh8QLHTg7FKOwhkBgSJwZTeHsq30WRoVFLuwkfK0YFg==", "dependencies": { "@balena/dockerignore": "^1.0.2", - "docker-modem": "^5.0.0", - "tar-fs": "~2.0.1" + "@grpc/grpc-js": "^1.11.1", + "@grpc/proto-loader": "^0.7.13", + "docker-modem": "^5.0.7", + "protobufjs": "^7.3.2", + "tar-fs": "^2.1.4", + "uuid": "^10.0.0" }, "engines": { "node": ">= 8.0" @@ -186,9 +334,9 @@ "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" }, "node_modules/end-of-stream": { - "version": "1.4.4", - "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", - "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", + "version": "1.4.5", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.5.tgz", + "integrity": "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==", "dependencies": { "once": "^1.4.0" } @@ -207,9 +355,9 @@ "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==" }, "node_modules/fs-extra": { - "version": "11.1.1", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.1.1.tgz", - "integrity": "sha512-MGIE4HOvQCeUCzmlHs0vXpih4ysz4wg9qiSAu6cd42lVwPbTM1TjV7RusoyQqMmk/95gdQZX72u+YW+c3eEpFQ==", + "version": "11.3.4", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.4.tgz", + "integrity": "sha512-CTXd6rk/M3/ULNQj8FBqBWHYBVYybQ3VPBw0xGKFe3tuH7ytT6ACnvzpIQ3UZtB8yvUKC2cXn1a+x+5EVQLovA==", "dependencies": { "graceful-fs": "^4.2.0", "jsonfile": "^6.0.1", @@ -219,17 +367,6 @@ "node": ">=14.14" } }, - "node_modules/fs-minipass": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", - "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", - "dependencies": { - "minipass": "^3.0.0" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/get-caller-file": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", @@ -286,38 +423,33 @@ "graceful-fs": "^4.1.6" } }, + "node_modules/lodash.camelcase": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", + "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==" + }, + "node_modules/long": { + "version": "5.2.4", + "resolved": "https://registry.npmjs.org/long/-/long-5.2.4.tgz", + "integrity": "sha512-qtzLbJE8hq7VabR3mISmVGtoXP8KGc2Z/AT8OuqlYD7JTR3oqrgwdjnk07wpj1twXxYmgDXgoKVWUG/fReSzHg==" + }, "node_modules/minipass": { - "version": "3.3.4", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.4.tgz", - "integrity": "sha512-I9WPbWHCGu8W+6k1ZiGpPu0GkoKBeorkfKNuAFBNS1HNFJvke82sxvI5bzcCNpWPorkOO5QQ+zomzzwRxejXiw==", - "dependencies": { - "yallist": "^4.0.0" - }, + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", + "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", "engines": { - "node": ">=8" + "node": ">=16 || 14 >=14.17" } }, "node_modules/minizlib": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", - "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.1.0.tgz", + "integrity": "sha512-KZxYo1BUkWD2TVFLr0MQoM8vUUigWD3LlD83a/75BqC+4qE0Hb1Vo5v1FgcfaNXvfXzr+5EhQ6ing/CaBijTlw==", "dependencies": { - "minipass": "^3.0.0", - "yallist": "^4.0.0" + "minipass": "^7.1.2" }, "engines": { - "node": ">= 8" - } - }, - "node_modules/mkdirp": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", - "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", - "bin": { - "mkdirp": "bin/cmd.js" - }, - "engines": { - "node": ">=10" + "node": ">= 18" } }, "node_modules/mkdirp-classic": { @@ -326,20 +458,20 @@ "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==" }, "node_modules/ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" }, "node_modules/nan": { - "version": "2.18.0", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.18.0.tgz", - "integrity": "sha512-W7tfG7vMOGtD30sHoZSSc/JVYiyDPEyQVso/Zz+/uQd0B0L46gtC+pHha5FFMRpil6fm/AoEcRWyOVi4+E/f8w==", + "version": "2.26.2", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.26.2.tgz", + "integrity": "sha512-0tTvBTYkt3tdGw22nrAy50x7gpbGCCFH3AFcyS5WiUu7Eu4vWlri1woE6qHBSfy11vksDqkiwjOnlR7WV8G1Hw==", "optional": true }, "node_modules/nanoid": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.2.tgz", - "integrity": "sha512-2ustYUX1R2rL/Br5B/FMhi8d5/QzvkJ912rBYxskcpu0myTHzSZfTr1LAS2Sm7jxRUObRrSBFoyzwAhL49aVSg==", + "version": "5.1.7", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.1.7.tgz", + "integrity": "sha512-ua3NDgISf6jdwezAheMOk4mbE1LXjm1DfMUDMuJf4AqxLFK3ccGpgWizwa5YV7Yz9EpXwEaWoRXSb/BnV0t5dQ==", "funding": [ { "type": "github", @@ -354,9 +486,10 @@ } }, "node_modules/nanoid-dictionary": { - "version": "5.0.0-beta.1", - "resolved": "https://registry.npmjs.org/nanoid-dictionary/-/nanoid-dictionary-5.0.0-beta.1.tgz", - "integrity": "sha512-xBkL9zzkNjzJ/UnmWyiOUDVX/COoi05eS0oU28RYKFFQhdnzO5dTOPbVZ/fCFgIOGr1zNinDHJ68mm/KQfcgcw==" + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/nanoid-dictionary/-/nanoid-dictionary-5.0.0.tgz", + "integrity": "sha512-/iCyQHwt36XkaIvSE9fcC8p6DiMPCZMTSMj9UT56Cv6T7f5CuxvOMhpNncaNieQ4z4d32p7ruEtAfRsb7Ya8Gw==", + "license": "MIT" }, "node_modules/once": { "version": "1.4.0", @@ -366,10 +499,33 @@ "wrappy": "1" } }, + "node_modules/protobufjs": { + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.4.0.tgz", + "integrity": "sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw==", + "hasInstallScript": true, + "dependencies": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/node": ">=13.7.0", + "long": "^5.0.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, "node_modules/pump": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", - "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.4.tgz", + "integrity": "sha512-VS7sjc6KR7e1ukRFhQSY5LM2uBWAUPiOPa/A3mkKmiMwSmRFUITt0xuj+/lesgnCv+dPIEYlkzrcyXgquIHMcA==", "dependencies": { "end-of-stream": "^1.1.0", "once": "^1.3.1" @@ -421,9 +577,9 @@ "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" }, "node_modules/slugify": { - "version": "1.6.6", - "resolved": "https://registry.npmjs.org/slugify/-/slugify-1.6.6.tgz", - "integrity": "sha512-h+z7HKHYXj6wJU+AnS/+IH8Uh9fdcX1Lrhg1/VMdf9PwoBQXFcXiAdsy2tSK0P6gKwJLXp02r90ahUCqHk9rrw==", + "version": "1.6.9", + "resolved": "https://registry.npmjs.org/slugify/-/slugify-1.6.9.tgz", + "integrity": "sha512-vZ7rfeehZui7wQs438JXBckYLkIIdfHOXsaVEUMyS5fHo1483l1bMdo0EDSWYclY0yZKFOipDy4KHuKs6ssvdg==", "engines": { "node": ">=8.0.0" } @@ -434,9 +590,9 @@ "integrity": "sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ==" }, "node_modules/ssh2": { - "version": "1.14.0", - "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.14.0.tgz", - "integrity": "sha512-AqzD1UCqit8tbOKoj6ztDDi1ffJZ2rV2SwlgrVVrHPkV5vWqGJOVp5pmtj18PunkPJAuKQsnInyKV+/Nb2bUnA==", + "version": "1.17.0", + "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.17.0.tgz", + "integrity": "sha512-wPldCk3asibAjQ/kziWQQt1Wh3PgDFpC0XpwclzKcdT1vql6KeYxf5LIt4nlFkUeR8WuphYMKqUA56X4rjbfgQ==", "hasInstallScript": true, "dependencies": { "asn1": "^0.2.6", @@ -446,8 +602,8 @@ "node": ">=10.16.0" }, "optionalDependencies": { - "cpu-features": "~0.0.8", - "nan": "^2.17.0" + "cpu-features": "~0.0.10", + "nan": "^2.23.0" } }, "node_modules/string_decoder": { @@ -483,30 +639,29 @@ } }, "node_modules/tar": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.0.tgz", - "integrity": "sha512-/Wo7DcT0u5HUV486xg675HtjNd3BXZ6xDbzsCUZPt5iw8bTQ63bP0Raut3mvro9u+CUyq7YQd8Cx55fsZXxqLQ==", + "version": "7.5.13", + "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.13.tgz", + "integrity": "sha512-tOG/7GyXpFevhXVh8jOPJrmtRpOTsYqUIkVdVooZYJS/z8WhfQUX8RJILmeuJNinGAMSu1veBr4asSHFt5/hng==", "dependencies": { - "chownr": "^2.0.0", - "fs-minipass": "^2.0.0", - "minipass": "^5.0.0", - "minizlib": "^2.1.1", - "mkdirp": "^1.0.3", - "yallist": "^4.0.0" + "@isaacs/fs-minipass": "^4.0.0", + "chownr": "^3.0.0", + "minipass": "^7.1.2", + "minizlib": "^3.1.0", + "yallist": "^5.0.0" }, "engines": { - "node": ">=10" + "node": ">=18" } }, "node_modules/tar-fs": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.0.1.tgz", - "integrity": "sha512-6tzWDMeroL87uF/+lin46k+Q+46rAJ0SyPGz7OW7wTgblI273hsBqk2C1j0/xNadNLKDTUL9BukSjB7cwgmlPA==", + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.4.tgz", + "integrity": "sha512-mDAjwmZdh7LTT6pNleZ05Yt65HC3E+NiQzl672vQG38jIrehtJk/J3mNwIg+vShQPcLF/LV7CMnDW6vjj6sfYQ==", "dependencies": { "chownr": "^1.1.1", "mkdirp-classic": "^0.5.2", "pump": "^3.0.0", - "tar-stream": "^2.0.0" + "tar-stream": "^2.1.4" } }, "node_modules/tar-stream": { @@ -524,53 +679,12 @@ "node": ">=6" } }, - "node_modules/tar-stream/node_modules/bl": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", - "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", - "dependencies": { - "buffer": "^5.5.0", - "inherits": "^2.0.4", - "readable-stream": "^3.4.0" - } - }, - "node_modules/tar-stream/node_modules/buffer": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", - "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.1.13" - } - }, "node_modules/tar/node_modules/chownr": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", - "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", - "engines": { - "node": ">=10" - } - }, - "node_modules/tar/node_modules/minipass": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", - "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz", + "integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==", "engines": { - "node": ">=8" + "node": ">=18" } }, "node_modules/tweetnacl": { @@ -578,6 +692,11 @@ "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==" }, + "node_modules/undici-types": { + "version": "6.20.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.20.0.tgz", + "integrity": "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==" + }, "node_modules/universalify": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz", @@ -591,6 +710,18 @@ "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" }, + "node_modules/uuid": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", + "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "bin": { + "uuid": "dist/bin/uuid" + } + }, "node_modules/wrap-ansi": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", @@ -621,9 +752,12 @@ } }, "node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz", + "integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==", + "engines": { + "node": ">=18" + } }, "node_modules/yargs": { "version": "17.7.2", @@ -657,6 +791,101 @@ "resolved": "https://registry.npmjs.org/@balena/dockerignore/-/dockerignore-1.0.2.tgz", "integrity": "sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q==" }, + "@grpc/grpc-js": { + "version": "1.12.5", + "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.12.5.tgz", + "integrity": "sha512-d3iiHxdpg5+ZcJ6jnDSOT8Z0O0VMVGy34jAnYLUX8yd36b1qn8f1TwOA/Lc7TsOh03IkPJ38eGI5qD2EjNkoEA==", + "requires": { + "@grpc/proto-loader": "^0.7.13", + "@js-sdsl/ordered-map": "^4.4.2" + } + }, + "@grpc/proto-loader": { + "version": "0.7.13", + "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.7.13.tgz", + "integrity": "sha512-AiXO/bfe9bmxBjxxtYxFAXGZvMaN5s8kO+jBHAJCON8rJoB5YS/D6X7ZNc6XQkuHNmyl4CYaMI1fJ/Gn27RGGw==", + "requires": { + "lodash.camelcase": "^4.3.0", + "long": "^5.0.0", + "protobufjs": "^7.2.5", + "yargs": "^17.7.2" + } + }, + "@isaacs/fs-minipass": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz", + "integrity": "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w==", + "requires": { + "minipass": "^7.0.4" + } + }, + "@js-sdsl/ordered-map": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/@js-sdsl/ordered-map/-/ordered-map-4.4.2.tgz", + "integrity": "sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==" + }, + "@protobufjs/aspromise": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", + "integrity": "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==" + }, + "@protobufjs/base64": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz", + "integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==" + }, + "@protobufjs/codegen": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz", + "integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==" + }, + "@protobufjs/eventemitter": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz", + "integrity": "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==" + }, + "@protobufjs/fetch": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz", + "integrity": "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==", + "requires": { + "@protobufjs/aspromise": "^1.1.1", + "@protobufjs/inquire": "^1.1.0" + } + }, + "@protobufjs/float": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz", + "integrity": "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==" + }, + "@protobufjs/inquire": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz", + "integrity": "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==" + }, + "@protobufjs/path": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz", + "integrity": "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==" + }, + "@protobufjs/pool": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz", + "integrity": "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==" + }, + "@protobufjs/utf8": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz", + "integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==" + }, + "@types/node": { + "version": "22.10.5", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.10.5.tgz", + "integrity": "sha512-F8Q+SeGimwOo86fiovQh8qiXfFEh2/ocYv7tU5pJ3EXMSSxk1Joj5wefpFK2fHTf/N6HKGSxIDBT9f3gCxXPkQ==", + "requires": { + "undici-types": "~6.20.0" + } + }, "ansi-regex": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", @@ -691,10 +920,29 @@ "tweetnacl": "^0.14.3" } }, + "bl": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", + "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", + "requires": { + "buffer": "^5.5.0", + "inherits": "^2.0.4", + "readable-stream": "^3.4.0" + } + }, + "buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "requires": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, "buildcheck": { - "version": "0.0.6", - "resolved": "https://registry.npmjs.org/buildcheck/-/buildcheck-0.0.6.tgz", - "integrity": "sha512-8f9ZJCUXyT1M35Jx7MkBgmBMo3oHTTBIPLiY9xyL0pl3T5RwcPEY8cUHr5LBNfu/fk6c2T4DJZuVM/8ZZT2D2A==", + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/buildcheck/-/buildcheck-0.0.7.tgz", + "integrity": "sha512-lHblz4ahamxpTmnsk+MNTRWsjYKv965MwOrSJyeD588rR3Jcu7swE+0wN5F+PbL5cjgu/9ObkhfzEPuofEMwLA==", "optional": true }, "chownr": { @@ -726,42 +974,46 @@ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, "cpu-features": { - "version": "0.0.9", - "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.9.tgz", - "integrity": "sha512-AKjgn2rP2yJyfbepsmLfiYcmtNn/2eUvocUyM/09yB0YDiz39HteK/5/T4Onf0pmdYDMgkBoGvRLvEguzyL7wQ==", + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.10.tgz", + "integrity": "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==", "optional": true, "requires": { "buildcheck": "~0.0.6", - "nan": "^2.17.0" + "nan": "^2.19.0" } }, "debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", "requires": { - "ms": "2.1.2" + "ms": "^2.1.3" } }, "docker-modem": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.1.tgz", - "integrity": "sha512-vqrE/nrweCyzmCpVpdFRC41qS+tfTF+IoUKlTZr52O82urbUqdfyJBGWMvT01pYUprWepLr8IkyVTEWJKRTQSg==", + "version": "5.0.7", + "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.7.tgz", + "integrity": "sha512-XJgGhoR/CLpqshm4d3L7rzH6t8NgDFUIIpztYlLHIApeJjMZKYJMz2zxPsYxnejq5h3ELYSw/RBsi3t5h7gNTA==", "requires": { "debug": "^4.1.1", "readable-stream": "^3.5.0", "split-ca": "^1.0.1", - "ssh2": "^1.11.0" + "ssh2": "^1.15.0" } }, "dockerode": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.0.tgz", - "integrity": "sha512-3LF7/3MPz5+9RsUo91rD0MCcx0yxjC9bnbtgtVjOLKyKxlZSJ7/Kk3OPAgARlwlWHqXwAGYhmkAHYx7IwD0tJQ==", + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.10.tgz", + "integrity": "sha512-8L/P9JynLBiG7/coiA4FlQXegHltRqS0a+KqI44P1zgQh8QLHTg7FKOwhkBgSJwZTeHsq30WRoVFLuwkfK0YFg==", "requires": { "@balena/dockerignore": "^1.0.2", - "docker-modem": "^5.0.0", - "tar-fs": "~2.0.1" + "@grpc/grpc-js": "^1.11.1", + "@grpc/proto-loader": "^0.7.13", + "docker-modem": "^5.0.7", + "protobufjs": "^7.3.2", + "tar-fs": "^2.1.4", + "uuid": "^10.0.0" } }, "emoji-regex": { @@ -770,9 +1022,9 @@ "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" }, "end-of-stream": { - "version": "1.4.4", - "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", - "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", + "version": "1.4.5", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.5.tgz", + "integrity": "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==", "requires": { "once": "^1.4.0" } @@ -788,23 +1040,15 @@ "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==" }, "fs-extra": { - "version": "11.1.1", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.1.1.tgz", - "integrity": "sha512-MGIE4HOvQCeUCzmlHs0vXpih4ysz4wg9qiSAu6cd42lVwPbTM1TjV7RusoyQqMmk/95gdQZX72u+YW+c3eEpFQ==", + "version": "11.3.4", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.4.tgz", + "integrity": "sha512-CTXd6rk/M3/ULNQj8FBqBWHYBVYybQ3VPBw0xGKFe3tuH7ytT6ACnvzpIQ3UZtB8yvUKC2cXn1a+x+5EVQLovA==", "requires": { "graceful-fs": "^4.2.0", "jsonfile": "^6.0.1", "universalify": "^2.0.0" } }, - "fs-minipass": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", - "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", - "requires": { - "minipass": "^3.0.0" - } - }, "get-caller-file": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", @@ -839,53 +1083,54 @@ "universalify": "^2.0.0" } }, + "lodash.camelcase": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", + "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==" + }, + "long": { + "version": "5.2.4", + "resolved": "https://registry.npmjs.org/long/-/long-5.2.4.tgz", + "integrity": "sha512-qtzLbJE8hq7VabR3mISmVGtoXP8KGc2Z/AT8OuqlYD7JTR3oqrgwdjnk07wpj1twXxYmgDXgoKVWUG/fReSzHg==" + }, "minipass": { - "version": "3.3.4", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.4.tgz", - "integrity": "sha512-I9WPbWHCGu8W+6k1ZiGpPu0GkoKBeorkfKNuAFBNS1HNFJvke82sxvI5bzcCNpWPorkOO5QQ+zomzzwRxejXiw==", - "requires": { - "yallist": "^4.0.0" - } + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", + "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==" }, "minizlib": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", - "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.1.0.tgz", + "integrity": "sha512-KZxYo1BUkWD2TVFLr0MQoM8vUUigWD3LlD83a/75BqC+4qE0Hb1Vo5v1FgcfaNXvfXzr+5EhQ6ing/CaBijTlw==", "requires": { - "minipass": "^3.0.0", - "yallist": "^4.0.0" + "minipass": "^7.1.2" } }, - "mkdirp": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", - "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==" - }, "mkdirp-classic": { "version": "0.5.3", "resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz", "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==" }, "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" }, "nan": { - "version": "2.18.0", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.18.0.tgz", - "integrity": "sha512-W7tfG7vMOGtD30sHoZSSc/JVYiyDPEyQVso/Zz+/uQd0B0L46gtC+pHha5FFMRpil6fm/AoEcRWyOVi4+E/f8w==", + "version": "2.26.2", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.26.2.tgz", + "integrity": "sha512-0tTvBTYkt3tdGw22nrAy50x7gpbGCCFH3AFcyS5WiUu7Eu4vWlri1woE6qHBSfy11vksDqkiwjOnlR7WV8G1Hw==", "optional": true }, "nanoid": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.2.tgz", - "integrity": "sha512-2ustYUX1R2rL/Br5B/FMhi8d5/QzvkJ912rBYxskcpu0myTHzSZfTr1LAS2Sm7jxRUObRrSBFoyzwAhL49aVSg==" + "version": "5.1.7", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.1.7.tgz", + "integrity": "sha512-ua3NDgISf6jdwezAheMOk4mbE1LXjm1DfMUDMuJf4AqxLFK3ccGpgWizwa5YV7Yz9EpXwEaWoRXSb/BnV0t5dQ==" }, "nanoid-dictionary": { - "version": "5.0.0-beta.1", - "resolved": "https://registry.npmjs.org/nanoid-dictionary/-/nanoid-dictionary-5.0.0-beta.1.tgz", - "integrity": "sha512-xBkL9zzkNjzJ/UnmWyiOUDVX/COoi05eS0oU28RYKFFQhdnzO5dTOPbVZ/fCFgIOGr1zNinDHJ68mm/KQfcgcw==" + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/nanoid-dictionary/-/nanoid-dictionary-5.0.0.tgz", + "integrity": "sha512-/iCyQHwt36XkaIvSE9fcC8p6DiMPCZMTSMj9UT56Cv6T7f5CuxvOMhpNncaNieQ4z4d32p7ruEtAfRsb7Ya8Gw==" }, "once": { "version": "1.4.0", @@ -895,10 +1140,29 @@ "wrappy": "1" } }, + "protobufjs": { + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.4.0.tgz", + "integrity": "sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw==", + "requires": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/node": ">=13.7.0", + "long": "^5.0.0" + } + }, "pump": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", - "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.4.tgz", + "integrity": "sha512-VS7sjc6KR7e1ukRFhQSY5LM2uBWAUPiOPa/A3mkKmiMwSmRFUITt0xuj+/lesgnCv+dPIEYlkzrcyXgquIHMcA==", "requires": { "end-of-stream": "^1.1.0", "once": "^1.3.1" @@ -930,9 +1194,9 @@ "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" }, "slugify": { - "version": "1.6.6", - "resolved": "https://registry.npmjs.org/slugify/-/slugify-1.6.6.tgz", - "integrity": "sha512-h+z7HKHYXj6wJU+AnS/+IH8Uh9fdcX1Lrhg1/VMdf9PwoBQXFcXiAdsy2tSK0P6gKwJLXp02r90ahUCqHk9rrw==" + "version": "1.6.9", + "resolved": "https://registry.npmjs.org/slugify/-/slugify-1.6.9.tgz", + "integrity": "sha512-vZ7rfeehZui7wQs438JXBckYLkIIdfHOXsaVEUMyS5fHo1483l1bMdo0EDSWYclY0yZKFOipDy4KHuKs6ssvdg==" }, "split-ca": { "version": "1.0.1", @@ -940,14 +1204,14 @@ "integrity": "sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ==" }, "ssh2": { - "version": "1.14.0", - "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.14.0.tgz", - "integrity": "sha512-AqzD1UCqit8tbOKoj6ztDDi1ffJZ2rV2SwlgrVVrHPkV5vWqGJOVp5pmtj18PunkPJAuKQsnInyKV+/Nb2bUnA==", + "version": "1.17.0", + "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.17.0.tgz", + "integrity": "sha512-wPldCk3asibAjQ/kziWQQt1Wh3PgDFpC0XpwclzKcdT1vql6KeYxf5LIt4nlFkUeR8WuphYMKqUA56X4rjbfgQ==", "requires": { "asn1": "^0.2.6", "bcrypt-pbkdf": "^1.0.2", - "cpu-features": "~0.0.8", - "nan": "^2.17.0" + "cpu-features": "~0.0.10", + "nan": "^2.23.0" } }, "string_decoder": { @@ -977,39 +1241,33 @@ } }, "tar": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.0.tgz", - "integrity": "sha512-/Wo7DcT0u5HUV486xg675HtjNd3BXZ6xDbzsCUZPt5iw8bTQ63bP0Raut3mvro9u+CUyq7YQd8Cx55fsZXxqLQ==", + "version": "7.5.13", + "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.13.tgz", + "integrity": "sha512-tOG/7GyXpFevhXVh8jOPJrmtRpOTsYqUIkVdVooZYJS/z8WhfQUX8RJILmeuJNinGAMSu1veBr4asSHFt5/hng==", "requires": { - "chownr": "^2.0.0", - "fs-minipass": "^2.0.0", - "minipass": "^5.0.0", - "minizlib": "^2.1.1", - "mkdirp": "^1.0.3", - "yallist": "^4.0.0" + "@isaacs/fs-minipass": "^4.0.0", + "chownr": "^3.0.0", + "minipass": "^7.1.2", + "minizlib": "^3.1.0", + "yallist": "^5.0.0" }, "dependencies": { "chownr": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", - "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==" - }, - "minipass": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", - "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==" + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz", + "integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==" } } }, "tar-fs": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.0.1.tgz", - "integrity": "sha512-6tzWDMeroL87uF/+lin46k+Q+46rAJ0SyPGz7OW7wTgblI273hsBqk2C1j0/xNadNLKDTUL9BukSjB7cwgmlPA==", + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.4.tgz", + "integrity": "sha512-mDAjwmZdh7LTT6pNleZ05Yt65HC3E+NiQzl672vQG38jIrehtJk/J3mNwIg+vShQPcLF/LV7CMnDW6vjj6sfYQ==", "requires": { "chownr": "^1.1.1", "mkdirp-classic": "^0.5.2", "pump": "^3.0.0", - "tar-stream": "^2.0.0" + "tar-stream": "^2.1.4" } }, "tar-stream": { @@ -1022,27 +1280,6 @@ "fs-constants": "^1.0.0", "inherits": "^2.0.3", "readable-stream": "^3.1.1" - }, - "dependencies": { - "bl": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", - "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", - "requires": { - "buffer": "^5.5.0", - "inherits": "^2.0.4", - "readable-stream": "^3.4.0" - } - }, - "buffer": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", - "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", - "requires": { - "base64-js": "^1.3.1", - "ieee754": "^1.1.13" - } - } } }, "tweetnacl": { @@ -1050,6 +1287,11 @@ "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==" }, + "undici-types": { + "version": "6.20.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.20.0.tgz", + "integrity": "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==" + }, "universalify": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz", @@ -1060,6 +1302,11 @@ "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" }, + "uuid": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", + "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==" + }, "wrap-ansi": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", @@ -1081,9 +1328,9 @@ "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==" }, "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz", + "integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==" }, "yargs": { "version": "17.7.2", diff --git a/dev/deploy-to-container/package.json b/dev/deploy-to-container/package.json index e9b1c3546a..ccc78fc63b 100644 --- a/dev/deploy-to-container/package.json +++ b/dev/deploy-to-container/package.json @@ -2,12 +2,12 @@ "name": "deploy-to-container", "type": "module", "dependencies": { - "dockerode": "^4.0.0", - "fs-extra": "^11.1.1", - "nanoid": "5.0.2", - "nanoid-dictionary": "5.0.0-beta.1", - "slugify": "1.6.6", - "tar": "^6.2.0", + "dockerode": "^4.0.10", + "fs-extra": "^11.3.4", + "nanoid": "5.1.7", + "nanoid-dictionary": "5.0.0", + "slugify": "1.6.9", + "tar": "^7.5.13", "yargs": "^17.7.2" }, "engines": { diff --git a/dev/deploy-to-container/refresh.js b/dev/deploy-to-container/refresh.js index 827d85de6c..7ea13c885a 100644 --- a/dev/deploy-to-container/refresh.js +++ b/dev/deploy-to-container/refresh.js @@ -24,7 +24,8 @@ async function main () { const containersToRestart = [] for (const container of containers) { if ( - container.Names.some(n => n.startsWith('/dt-db-')) + container.Names.some(n => n.startsWith('/dt-db-')) && + container.Labels?.nodbrefresh !== '1' ) { console.info(`Terminating DB container ${container.Id}...`) dbContainersToCreate.push(container.Names.find(n => n.startsWith('/dt-db-')).substring(1)) @@ -37,9 +38,11 @@ async function main () { v: true }) } else if ( - container.Names.some(n => n.startsWith('/dt-app-')) || - container.Names.some(n => n.startsWith('/dt-celery-')) || - container.Names.some(n => n.startsWith('/dt-beat-')) + ( + container.Names.some(n => n.startsWith('/dt-app-')) || + container.Names.some(n => n.startsWith('/dt-celery-')) || + container.Names.some(n => n.startsWith('/dt-beat-')) + ) && container.Labels?.nodbrefresh !== '1' ) { if (container.State === 'running') { const appContainer = dock.getContainer(container.Id) diff --git a/dev/deploy-to-container/settings_local.py b/dev/deploy-to-container/settings_local.py index 60981ba567..055b48d0f5 100644 --- a/dev/deploy-to-container/settings_local.py +++ b/dev/deploy-to-container/settings_local.py @@ -1,7 +1,7 @@ # Copyright The IETF Trust 2007-2019, All Rights Reserved # -*- coding: utf-8 -*- -from ietf.settings import * # pyflakes:ignore +from ietf.settings import * # pyflakes:ignore ALLOWED_HOSTS = ['*'] @@ -40,7 +40,6 @@ SUBMIT_YANG_CATALOG_MODEL_DIR = '/assets/ietf-ftp/yang/catalogmod/' SUBMIT_YANG_DRAFT_MODEL_DIR = '/assets/ietf-ftp/yang/draftmod/' -SUBMIT_YANG_INVAL_MODEL_DIR = '/assets/ietf-ftp/yang/invalmod/' SUBMIT_YANG_IANA_MODEL_DIR = '/assets/ietf-ftp/yang/ianamod/' SUBMIT_YANG_RFC_MODEL_DIR = '/assets/ietf-ftp/yang/rfcmod/' @@ -53,18 +52,30 @@ # 'ietf.context_processors.sql_debug', # ] -DOCUMENT_PATH_PATTERN = '/assets/ietf-ftp/{doc.type_id}/' +DOCUMENT_PATH_PATTERN = '/assets/ietfdata/doc/{doc.type_id}/' INTERNET_DRAFT_PATH = '/assets/ietf-ftp/internet-drafts/' RFC_PATH = '/assets/ietf-ftp/rfc/' CHARTER_PATH = '/assets/ietf-ftp/charter/' BOFREQ_PATH = '/assets/ietf-ftp/bofreq/' CONFLICT_REVIEW_PATH = '/assets/ietf-ftp/conflict-reviews/' STATUS_CHANGE_PATH = '/assets/ietf-ftp/status-changes/' -INTERNET_DRAFT_ARCHIVE_DIR = '/assets/ietf-ftp/internet-drafts/' -INTERNET_ALL_DRAFTS_ARCHIVE_DIR = '/assets/ietf-ftp/internet-drafts/' +INTERNET_DRAFT_ARCHIVE_DIR = '/assets/collection/draft-archive' +INTERNET_ALL_DRAFTS_ARCHIVE_DIR = '/assets/archive/id' BIBXML_BASE_PATH = '/assets/ietfdata/derived/bibxml' +IDSUBMIT_REPOSITORY_PATH = INTERNET_DRAFT_PATH +FTP_DIR = '/assets/ftp' +NFS_METRICS_TMP_DIR = '/assets/tmp' NOMCOM_PUBLIC_KEYS_DIR = 'data/nomcom_keys/public_keys/' SLIDE_STAGING_PATH = '/test/staging/' DE_GFM_BINARY = '/usr/local/bin/de-gfm' + +APP_API_TOKENS = { + "ietf.api.red_api" : ["devtoken", "redtoken"], # Not a real secret + "ietf.api.views.ingest_email_test": ["ingestion-test-token"], # Not a real secret + "ietf.api.views_rpc" : ["devtoken"], # Not a real secret +} + +# OIDC configuration +SITE_URL = 'https://__HOSTNAME__' diff --git a/dev/deploy-to-container/start.sh b/dev/deploy-to-container/start.sh index 5621c68fa5..5d976f80ea 100644 --- a/dev/deploy-to-container/start.sh +++ b/dev/deploy-to-container/start.sh @@ -35,8 +35,20 @@ echo "Running Datatracker checks..." # Migrate, adjusting to what the current state of the underlying database might be: +# On production, the blobdb tables are in a separate database. Manipulate migration +# history to ensure that they're created for the sandbox environment that runs it +# all from a single database. +echo "Ensuring blobdb relations exist..." +/usr/local/bin/python ./ietf/manage.py migrate --settings=settings_local --fake blobdb zero +if ! /usr/local/bin/python ./ietf/manage.py migrate --settings=settings_local blobdb; then + # If we are restarting a sandbox, the migration may already have run and re-running + # it will fail. Assume that happened and fake it. + /usr/local/bin/python ./ietf/manage.py migrate --settings=settings_local --fake blobdb +fi + +# Now run the migrations for real echo "Running Datatracker migrations..." -/usr/local/bin/python ./ietf/manage.py migrate --fake-initial --settings=settings_local +/usr/local/bin/python ./ietf/manage.py migrate --settings=settings_local echo "Starting Datatracker..." ./ietf/manage.py runserver 0.0.0.0:8000 --settings=settings_local diff --git a/dev/deploy/build.sh b/dev/deploy/build.sh deleted file mode 100644 index a802acb46b..0000000000 --- a/dev/deploy/build.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash - -echo "Compiling native node packages..." -yarn rebuild -echo "Packaging static assets..." -if [ "${SHOULD_DEPLOY}" = "true" ]; then - yarn build --base=https://www.ietf.org/lib/dt/$PKG_VERSION/ -else - yarn build -fi -yarn legacy:build diff --git a/dev/diff/package-lock.json b/dev/diff/package-lock.json index a816cd012c..d1c2fbd763 100644 --- a/dev/diff/package-lock.json +++ b/dev/diff/package-lock.json @@ -6,18 +6,18 @@ "": { "name": "diff", "dependencies": { - "chalk": "^5.3.0", - "dockerode": "^4.0.0", + "chalk": "^5.4.1", + "dockerode": "^4.0.6", "enquirer": "^2.4.1", "extract-zip": "^2.0.1", - "fs-extra": "^11.1.1", + "fs-extra": "^11.3.0", "got": "^13.0.0", "keypress": "^0.2.1", "listr2": "^6.6.1", "lodash-es": "^4.17.21", - "luxon": "^3.4.3", + "luxon": "^3.6.1", "pretty-bytes": "^6.1.1", - "tar": "^6.2.0", + "tar": "^7.4.3", "yargs": "^17.7.2" }, "engines": { @@ -29,6 +29,220 @@ "resolved": "https://registry.npmjs.org/@balena/dockerignore/-/dockerignore-1.0.2.tgz", "integrity": "sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q==" }, + "node_modules/@grpc/grpc-js": { + "version": "1.12.5", + "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.12.5.tgz", + "integrity": "sha512-d3iiHxdpg5+ZcJ6jnDSOT8Z0O0VMVGy34jAnYLUX8yd36b1qn8f1TwOA/Lc7TsOh03IkPJ38eGI5qD2EjNkoEA==", + "license": "Apache-2.0", + "dependencies": { + "@grpc/proto-loader": "^0.7.13", + "@js-sdsl/ordered-map": "^4.4.2" + }, + "engines": { + "node": ">=12.10.0" + } + }, + "node_modules/@grpc/proto-loader": { + "version": "0.7.13", + "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.7.13.tgz", + "integrity": "sha512-AiXO/bfe9bmxBjxxtYxFAXGZvMaN5s8kO+jBHAJCON8rJoB5YS/D6X7ZNc6XQkuHNmyl4CYaMI1fJ/Gn27RGGw==", + "license": "Apache-2.0", + "dependencies": { + "lodash.camelcase": "^4.3.0", + "long": "^5.0.0", + "protobufjs": "^7.2.5", + "yargs": "^17.7.2" + }, + "bin": { + "proto-loader-gen-types": "build/bin/proto-loader-gen-types.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/@isaacs/cliui": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", + "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "dependencies": { + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@isaacs/cliui/node_modules/ansi-regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/ansi-styles": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" + }, + "node_modules/@isaacs/cliui/node_modules/string-width": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@isaacs/cliui/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/wrap-ansi": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", + "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "dependencies": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/@isaacs/fs-minipass": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz", + "integrity": "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w==", + "dependencies": { + "minipass": "^7.0.4" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@js-sdsl/ordered-map": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/@js-sdsl/ordered-map/-/ordered-map-4.4.2.tgz", + "integrity": "sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==", + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/js-sdsl" + } + }, + "node_modules/@pkgjs/parseargs": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", + "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", + "optional": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/@protobufjs/aspromise": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", + "integrity": "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/base64": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz", + "integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/codegen": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz", + "integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/eventemitter": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz", + "integrity": "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/fetch": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz", + "integrity": "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==", + "license": "BSD-3-Clause", + "dependencies": { + "@protobufjs/aspromise": "^1.1.1", + "@protobufjs/inquire": "^1.1.0" + } + }, + "node_modules/@protobufjs/float": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz", + "integrity": "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/inquire": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz", + "integrity": "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/path": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz", + "integrity": "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/pool": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz", + "integrity": "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/utf8": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz", + "integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==", + "license": "BSD-3-Clause" + }, "node_modules/@sindresorhus/is": { "version": "5.3.0", "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-5.3.0.tgz", @@ -59,8 +273,7 @@ "node_modules/@types/node": { "version": "18.6.5", "resolved": "https://registry.npmjs.org/@types/node/-/node-18.6.5.tgz", - "integrity": "sha512-Xjt5ZGUa5WusGZJ4WJPbOT8QOqp6nDynVFRKcUt32bOgvXEoc6o085WNkYTMO7ifAj2isEfQQ2cseE+wT6jsRw==", - "optional": true + "integrity": "sha512-Xjt5ZGUa5WusGZJ4WJPbOT8QOqp6nDynVFRKcUt32bOgvXEoc6o085WNkYTMO7ifAj2isEfQQ2cseE+wT6jsRw==" }, "node_modules/@types/yauzl": { "version": "2.10.0", @@ -119,10 +332,16 @@ "version": "0.2.6", "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz", "integrity": "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==", + "license": "MIT", "dependencies": { "safer-buffer": "~2.1.0" } }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + }, "node_modules/base64-js": { "version": "1.5.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", @@ -140,16 +359,61 @@ "type": "consulting", "url": "https://feross.org/support" } - ] + ], + "license": "MIT" }, "node_modules/bcrypt-pbkdf": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", "integrity": "sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==", + "license": "BSD-3-Clause", "dependencies": { "tweetnacl": "^0.14.3" } }, + "node_modules/bl": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", + "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", + "license": "MIT", + "dependencies": { + "buffer": "^5.5.0", + "inherits": "^2.0.4", + "readable-stream": "^3.4.0" + } + }, + "node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, "node_modules/buffer-crc32": { "version": "0.2.13", "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", @@ -193,9 +457,10 @@ } }, "node_modules/chalk": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.3.0.tgz", - "integrity": "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==", + "version": "5.4.1", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.4.1.tgz", + "integrity": "sha512-zgVZuo2WcZgfUEmsn6eO3kINexW8RAE4maiQ8QNs8CtpPCSyMiYsULR3HQYkm3w8FIA3SberyMJMSldGsW+U3w==", + "license": "MIT", "engines": { "node": "^12.17.0 || ^14.13 || >=16.0.0" }, @@ -206,7 +471,8 @@ "node_modules/chownr": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz", - "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==" + "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==", + "license": "ISC" }, "node_modules/cli-cursor": { "version": "4.0.0", @@ -318,19 +584,32 @@ "integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==" }, "node_modules/cpu-features": { - "version": "0.0.9", - "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.9.tgz", - "integrity": "sha512-AKjgn2rP2yJyfbepsmLfiYcmtNn/2eUvocUyM/09yB0YDiz39HteK/5/T4Onf0pmdYDMgkBoGvRLvEguzyL7wQ==", + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.10.tgz", + "integrity": "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==", "hasInstallScript": true, "optional": true, "dependencies": { "buildcheck": "~0.0.6", - "nan": "^2.17.0" + "nan": "^2.19.0" }, "engines": { "node": ">=10.0.0" } }, + "node_modules/cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, "node_modules/debug": { "version": "4.3.4", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", @@ -381,27 +660,33 @@ } }, "node_modules/docker-modem": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.1.tgz", - "integrity": "sha512-vqrE/nrweCyzmCpVpdFRC41qS+tfTF+IoUKlTZr52O82urbUqdfyJBGWMvT01pYUprWepLr8IkyVTEWJKRTQSg==", + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.6.tgz", + "integrity": "sha512-ens7BiayssQz/uAxGzH8zGXCtiV24rRWXdjNha5V4zSOcxmAZsfGVm/PPFbwQdqEkDnhG+SyR9E3zSHUbOKXBQ==", + "license": "Apache-2.0", "dependencies": { "debug": "^4.1.1", "readable-stream": "^3.5.0", "split-ca": "^1.0.1", - "ssh2": "^1.11.0" + "ssh2": "^1.15.0" }, "engines": { "node": ">= 8.0" } }, "node_modules/dockerode": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.0.tgz", - "integrity": "sha512-3LF7/3MPz5+9RsUo91rD0MCcx0yxjC9bnbtgtVjOLKyKxlZSJ7/Kk3OPAgARlwlWHqXwAGYhmkAHYx7IwD0tJQ==", + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.6.tgz", + "integrity": "sha512-FbVf3Z8fY/kALB9s+P9epCpWhfi/r0N2DgYYcYpsAUlaTxPjdsitsFobnltb+lyCgAIvf9C+4PSWlTnHlJMf1w==", + "license": "Apache-2.0", "dependencies": { "@balena/dockerignore": "^1.0.2", - "docker-modem": "^5.0.0", - "tar-fs": "~2.0.1" + "@grpc/grpc-js": "^1.11.1", + "@grpc/proto-loader": "^0.7.13", + "docker-modem": "^5.0.6", + "protobufjs": "^7.3.2", + "tar-fs": "~2.1.2", + "uuid": "^10.0.0" }, "engines": { "node": ">= 8.0" @@ -491,6 +776,32 @@ "pend": "~1.2.0" } }, + "node_modules/foreground-child": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.1.1.tgz", + "integrity": "sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg==", + "dependencies": { + "cross-spawn": "^7.0.0", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/foreground-child/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/form-data-encoder": { "version": "2.1.4", "resolved": "https://registry.npmjs.org/form-data-encoder/-/form-data-encoder-2.1.4.tgz", @@ -502,12 +813,14 @@ "node_modules/fs-constants": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", - "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==" + "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==", + "license": "MIT" }, "node_modules/fs-extra": { - "version": "11.1.1", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.1.1.tgz", - "integrity": "sha512-MGIE4HOvQCeUCzmlHs0vXpih4ysz4wg9qiSAu6cd42lVwPbTM1TjV7RusoyQqMmk/95gdQZX72u+YW+c3eEpFQ==", + "version": "11.3.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.0.tgz", + "integrity": "sha512-Z4XaCL6dUDHfP/jT25jJKMmtxvuwbkrD1vNSMFlo9lNLY2c5FHYSQgHPRZUjAB26TpDEoW9HCOgplrdbaPV/ew==", + "license": "MIT", "dependencies": { "graceful-fs": "^4.2.0", "jsonfile": "^6.0.1", @@ -517,17 +830,6 @@ "node": ">=14.14" } }, - "node_modules/fs-minipass": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", - "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", - "dependencies": { - "minipass": "^3.0.0" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/get-caller-file": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", @@ -547,6 +849,27 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/glob": { + "version": "10.3.12", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.3.12.tgz", + "integrity": "sha512-TCNv8vJ+xz4QiqTpfOJA7HvYv+tNIRHKfUWw/q+v2jdgN4ebz+KY9tGx5J4rHP0o84mNP+ApH66HRX8us3Khqg==", + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^2.3.6", + "minimatch": "^9.0.1", + "minipass": "^7.0.4", + "path-scurry": "^1.10.2" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/got": { "version": "13.0.0", "resolved": "https://registry.npmjs.org/got/-/got-13.0.0.tgz", @@ -610,7 +933,8 @@ "type": "consulting", "url": "https://feross.org/support" } - ] + ], + "license": "BSD-3-Clause" }, "node_modules/inherits": { "version": "2.0.4", @@ -625,6 +949,28 @@ "node": ">=8" } }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" + }, + "node_modules/jackspeak": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-2.3.6.tgz", + "integrity": "sha512-N3yCS/NegsOBokc8GAdM8UcmfsKiSS8cipheD/nivzr700H+nsMOxJjQnvwOcRYVuFkdH0wGUvW2WbXGmrZGbQ==", + "dependencies": { + "@isaacs/cliui": "^8.0.2" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "optionalDependencies": { + "@pkgjs/parseargs": "^0.11.0" + } + }, "node_modules/json-buffer": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", @@ -756,6 +1102,12 @@ "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz", "integrity": "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==" }, + "node_modules/lodash.camelcase": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", + "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==", + "license": "MIT" + }, "node_modules/log-update": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/log-update/-/log-update-5.0.1.tgz", @@ -847,6 +1199,12 @@ "url": "https://github.com/chalk/wrap-ansi?sponsor=1" } }, + "node_modules/long": { + "version": "5.2.4", + "resolved": "https://registry.npmjs.org/long/-/long-5.2.4.tgz", + "integrity": "sha512-qtzLbJE8hq7VabR3mISmVGtoXP8KGc2Z/AT8OuqlYD7JTR3oqrgwdjnk07wpj1twXxYmgDXgoKVWUG/fReSzHg==", + "license": "Apache-2.0" + }, "node_modules/lowercase-keys": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-3.0.0.tgz", @@ -858,10 +1216,19 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/lru-cache": { + "version": "10.2.2", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.2.2.tgz", + "integrity": "sha512-9hp3Vp2/hFQUiIwKo8XCeFVnrg8Pk3TYNPIR7tJADKi5YfcF7vEaK7avFHTlSy3kOKYaJQaalfEo6YuXdceBOQ==", + "engines": { + "node": "14 || >=16.14" + } + }, "node_modules/luxon": { - "version": "3.4.3", - "resolved": "https://registry.npmjs.org/luxon/-/luxon-3.4.3.tgz", - "integrity": "sha512-tFWBiv3h7z+T/tDaoxA8rqTxy1CHV6gHS//QdaH4pulbq/JuBSGgQspQQqcgnwdAx6pNI7cmvz5Sv/addzHmUg==", + "version": "3.6.1", + "resolved": "https://registry.npmjs.org/luxon/-/luxon-3.6.1.tgz", + "integrity": "sha512-tJLxrKJhO2ukZ5z0gyjY1zPh3Rh88Ej9P7jNrZiHMUXHae1yvI2imgOZtL1TO8TW6biMMKfTtAOoEJANgtWBMQ==", + "license": "MIT", "engines": { "node": ">=12" } @@ -885,44 +1252,59 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/minipass": { - "version": "3.3.4", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.4.tgz", - "integrity": "sha512-I9WPbWHCGu8W+6k1ZiGpPu0GkoKBeorkfKNuAFBNS1HNFJvke82sxvI5bzcCNpWPorkOO5QQ+zomzzwRxejXiw==", + "node_modules/minimatch": { + "version": "9.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.4.tgz", + "integrity": "sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==", "dependencies": { - "yallist": "^4.0.0" + "brace-expansion": "^2.0.1" }, "engines": { - "node": ">=8" + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/minipass": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", + "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", + "engines": { + "node": ">=16 || 14 >=14.17" } }, "node_modules/minizlib": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", - "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.1.tgz", + "integrity": "sha512-umcy022ILvb5/3Djuu8LWeqUa8D68JaBzlttKeMWen48SjabqS3iY5w/vzeMzMUNhLDifyhbOwKDSznB1vvrwg==", "dependencies": { - "minipass": "^3.0.0", - "yallist": "^4.0.0" + "minipass": "^7.0.4", + "rimraf": "^5.0.5" }, "engines": { - "node": ">= 8" + "node": ">= 18" } }, "node_modules/mkdirp": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", - "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz", + "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==", "bin": { - "mkdirp": "bin/cmd.js" + "mkdirp": "dist/cjs/src/bin.js" }, "engines": { "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, "node_modules/mkdirp-classic": { "version": "0.5.3", "resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz", - "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==" + "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==", + "license": "MIT" }, "node_modules/ms": { "version": "2.1.2", @@ -930,9 +1312,10 @@ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" }, "node_modules/nan": { - "version": "2.18.0", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.18.0.tgz", - "integrity": "sha512-W7tfG7vMOGtD30sHoZSSc/JVYiyDPEyQVso/Zz+/uQd0B0L46gtC+pHha5FFMRpil6fm/AoEcRWyOVi4+E/f8w==", + "version": "2.22.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.22.0.tgz", + "integrity": "sha512-nbajikzWTMwsW+eSsNm3QwlOs7het9gGJU5dDZzRTQGk03vyBOauxgI4VakDzE0PtsGTmXPsXTbbjVhRwR5mpw==", + "license": "MIT", "optional": true }, "node_modules/normalize-url": { @@ -976,6 +1359,29 @@ "node": ">=12.20" } }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-scurry": { + "version": "1.10.2", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.10.2.tgz", + "integrity": "sha512-7xTavNy5RQXnsjANvVvMkEjvloOinkAjv/Z6Ildz9v2RinZ4SBKTWFOVRbaF8p0vpHnyjV/UwNDdKuUv6M5qcA==", + "dependencies": { + "lru-cache": "^10.2.0", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/pend": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz", @@ -992,6 +1398,30 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/protobufjs": { + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.4.0.tgz", + "integrity": "sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw==", + "hasInstallScript": true, + "license": "BSD-3-Clause", + "dependencies": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/node": ">=13.7.0", + "long": "^5.0.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, "node_modules/pump": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", @@ -1072,12 +1502,29 @@ "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.3.0.tgz", "integrity": "sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA==" }, - "node_modules/safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", - "funding": [ - { + "node_modules/rimraf": { + "version": "5.0.5", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-5.0.5.tgz", + "integrity": "sha512-CqDakW+hMe/Bz202FPEymy68P+G50RfMQK+Qo5YUqc9SPipvbGjCGKd0RSKEelbsfQuw3g5NZDSrlZZAJurH1A==", + "dependencies": { + "glob": "^10.3.7" + }, + "bin": { + "rimraf": "dist/esm/bin.mjs" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { "type": "github", "url": "https://github.com/sponsors/feross" }, @@ -1094,7 +1541,27 @@ "node_modules/safer-buffer": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "license": "MIT" + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "engines": { + "node": ">=8" + } }, "node_modules/signal-exit": { "version": "3.0.7", @@ -1141,12 +1608,13 @@ "node_modules/split-ca": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/split-ca/-/split-ca-1.0.1.tgz", - "integrity": "sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ==" + "integrity": "sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ==", + "license": "ISC" }, "node_modules/ssh2": { - "version": "1.14.0", - "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.14.0.tgz", - "integrity": "sha512-AqzD1UCqit8tbOKoj6ztDDi1ffJZ2rV2SwlgrVVrHPkV5vWqGJOVp5pmtj18PunkPJAuKQsnInyKV+/Nb2bUnA==", + "version": "1.16.0", + "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.16.0.tgz", + "integrity": "sha512-r1X4KsBGedJqo7h8F5c4Ybpcr5RjyP+aWIG007uBPRjmdQWfEiVLzSK71Zji1B9sKxwaCvD8y8cwSkYrlLiRRg==", "hasInstallScript": true, "dependencies": { "asn1": "^0.2.6", @@ -1156,8 +1624,8 @@ "node": ">=10.16.0" }, "optionalDependencies": { - "cpu-features": "~0.0.8", - "nan": "^2.17.0" + "cpu-features": "~0.0.10", + "nan": "^2.20.0" } }, "node_modules/string_decoder": { @@ -1181,6 +1649,20 @@ "node": ">=8" } }, + "node_modules/string-width-cjs": { + "name": "string-width", + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/strip-ansi": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", @@ -1192,37 +1674,51 @@ "node": ">=8" } }, + "node_modules/strip-ansi-cjs": { + "name": "strip-ansi", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/tar": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.0.tgz", - "integrity": "sha512-/Wo7DcT0u5HUV486xg675HtjNd3BXZ6xDbzsCUZPt5iw8bTQ63bP0Raut3mvro9u+CUyq7YQd8Cx55fsZXxqLQ==", + "version": "7.4.3", + "resolved": "https://registry.npmjs.org/tar/-/tar-7.4.3.tgz", + "integrity": "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw==", "dependencies": { - "chownr": "^2.0.0", - "fs-minipass": "^2.0.0", - "minipass": "^5.0.0", - "minizlib": "^2.1.1", - "mkdirp": "^1.0.3", - "yallist": "^4.0.0" + "@isaacs/fs-minipass": "^4.0.0", + "chownr": "^3.0.0", + "minipass": "^7.1.2", + "minizlib": "^3.0.1", + "mkdirp": "^3.0.1", + "yallist": "^5.0.0" }, "engines": { - "node": ">=10" + "node": ">=18" } }, "node_modules/tar-fs": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.0.1.tgz", - "integrity": "sha512-6tzWDMeroL87uF/+lin46k+Q+46rAJ0SyPGz7OW7wTgblI273hsBqk2C1j0/xNadNLKDTUL9BukSjB7cwgmlPA==", + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.2.tgz", + "integrity": "sha512-EsaAXwxmx8UB7FRKqeozqEPop69DXcmYwTQwXvyAPF352HJsPdkVhvTaDPYqfNgruveJIJy3TA2l+2zj8LJIJA==", + "license": "MIT", "dependencies": { "chownr": "^1.1.1", "mkdirp-classic": "^0.5.2", "pump": "^3.0.0", - "tar-stream": "^2.0.0" + "tar-stream": "^2.1.4" } }, "node_modules/tar-stream": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz", "integrity": "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==", + "license": "MIT", "dependencies": { "bl": "^4.0.3", "end-of-stream": "^1.4.1", @@ -1234,59 +1730,19 @@ "node": ">=6" } }, - "node_modules/tar-stream/node_modules/bl": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", - "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", - "dependencies": { - "buffer": "^5.5.0", - "inherits": "^2.0.4", - "readable-stream": "^3.4.0" - } - }, - "node_modules/tar-stream/node_modules/buffer": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", - "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.1.13" - } - }, "node_modules/tar/node_modules/chownr": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", - "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", - "engines": { - "node": ">=10" - } - }, - "node_modules/tar/node_modules/minipass": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", - "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz", + "integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==", "engines": { - "node": ">=8" + "node": ">=18" } }, "node_modules/tweetnacl": { "version": "0.14.5", "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", - "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==" + "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==", + "license": "Unlicense" }, "node_modules/type-fest": { "version": "1.4.0", @@ -1312,6 +1768,33 @@ "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" }, + "node_modules/uuid": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", + "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, "node_modules/wrap-ansi": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", @@ -1328,6 +1811,23 @@ "url": "https://github.com/chalk/wrap-ansi?sponsor=1" } }, + "node_modules/wrap-ansi-cjs": { + "name": "wrap-ansi", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", @@ -1342,9 +1842,12 @@ } }, "node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz", + "integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==", + "engines": { + "node": ">=18" + } }, "node_modules/yargs": { "version": "17.7.2", @@ -1387,6 +1890,157 @@ "resolved": "https://registry.npmjs.org/@balena/dockerignore/-/dockerignore-1.0.2.tgz", "integrity": "sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q==" }, + "@grpc/grpc-js": { + "version": "1.12.5", + "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.12.5.tgz", + "integrity": "sha512-d3iiHxdpg5+ZcJ6jnDSOT8Z0O0VMVGy34jAnYLUX8yd36b1qn8f1TwOA/Lc7TsOh03IkPJ38eGI5qD2EjNkoEA==", + "requires": { + "@grpc/proto-loader": "^0.7.13", + "@js-sdsl/ordered-map": "^4.4.2" + } + }, + "@grpc/proto-loader": { + "version": "0.7.13", + "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.7.13.tgz", + "integrity": "sha512-AiXO/bfe9bmxBjxxtYxFAXGZvMaN5s8kO+jBHAJCON8rJoB5YS/D6X7ZNc6XQkuHNmyl4CYaMI1fJ/Gn27RGGw==", + "requires": { + "lodash.camelcase": "^4.3.0", + "long": "^5.0.0", + "protobufjs": "^7.2.5", + "yargs": "^17.7.2" + } + }, + "@isaacs/cliui": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", + "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "requires": { + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" + }, + "dependencies": { + "ansi-regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==" + }, + "ansi-styles": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==" + }, + "emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" + }, + "string-width": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "requires": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + } + }, + "strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "requires": { + "ansi-regex": "^6.0.1" + } + }, + "wrap-ansi": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", + "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "requires": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" + } + } + } + }, + "@isaacs/fs-minipass": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz", + "integrity": "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w==", + "requires": { + "minipass": "^7.0.4" + } + }, + "@js-sdsl/ordered-map": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/@js-sdsl/ordered-map/-/ordered-map-4.4.2.tgz", + "integrity": "sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==" + }, + "@pkgjs/parseargs": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", + "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", + "optional": true + }, + "@protobufjs/aspromise": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", + "integrity": "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==" + }, + "@protobufjs/base64": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz", + "integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==" + }, + "@protobufjs/codegen": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz", + "integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==" + }, + "@protobufjs/eventemitter": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz", + "integrity": "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==" + }, + "@protobufjs/fetch": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz", + "integrity": "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==", + "requires": { + "@protobufjs/aspromise": "^1.1.1", + "@protobufjs/inquire": "^1.1.0" + } + }, + "@protobufjs/float": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz", + "integrity": "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==" + }, + "@protobufjs/inquire": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz", + "integrity": "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==" + }, + "@protobufjs/path": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz", + "integrity": "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==" + }, + "@protobufjs/pool": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz", + "integrity": "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==" + }, + "@protobufjs/utf8": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz", + "integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==" + }, "@sindresorhus/is": { "version": "5.3.0", "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-5.3.0.tgz", @@ -1408,8 +2062,7 @@ "@types/node": { "version": "18.6.5", "resolved": "https://registry.npmjs.org/@types/node/-/node-18.6.5.tgz", - "integrity": "sha512-Xjt5ZGUa5WusGZJ4WJPbOT8QOqp6nDynVFRKcUt32bOgvXEoc6o085WNkYTMO7ifAj2isEfQQ2cseE+wT6jsRw==", - "optional": true + "integrity": "sha512-Xjt5ZGUa5WusGZJ4WJPbOT8QOqp6nDynVFRKcUt32bOgvXEoc6o085WNkYTMO7ifAj2isEfQQ2cseE+wT6jsRw==" }, "@types/yauzl": { "version": "2.10.0", @@ -1454,6 +2107,11 @@ "safer-buffer": "~2.1.0" } }, + "balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + }, "base64-js": { "version": "1.5.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", @@ -1467,6 +2125,33 @@ "tweetnacl": "^0.14.3" } }, + "bl": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", + "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", + "requires": { + "buffer": "^5.5.0", + "inherits": "^2.0.4", + "readable-stream": "^3.4.0" + } + }, + "brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "requires": { + "balanced-match": "^1.0.0" + } + }, + "buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "requires": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, "buffer-crc32": { "version": "0.2.13", "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", @@ -1498,9 +2183,9 @@ } }, "chalk": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.3.0.tgz", - "integrity": "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==" + "version": "5.4.1", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.4.1.tgz", + "integrity": "sha512-zgVZuo2WcZgfUEmsn6eO3kINexW8RAE4maiQ8QNs8CtpPCSyMiYsULR3HQYkm3w8FIA3SberyMJMSldGsW+U3w==" }, "chownr": { "version": "1.1.4", @@ -1583,13 +2268,23 @@ "integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==" }, "cpu-features": { - "version": "0.0.9", - "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.9.tgz", - "integrity": "sha512-AKjgn2rP2yJyfbepsmLfiYcmtNn/2eUvocUyM/09yB0YDiz39HteK/5/T4Onf0pmdYDMgkBoGvRLvEguzyL7wQ==", + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.10.tgz", + "integrity": "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==", "optional": true, "requires": { "buildcheck": "~0.0.6", - "nan": "^2.17.0" + "nan": "^2.19.0" + } + }, + "cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "requires": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" } }, "debug": { @@ -1621,24 +2316,28 @@ "integrity": "sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==" }, "docker-modem": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.1.tgz", - "integrity": "sha512-vqrE/nrweCyzmCpVpdFRC41qS+tfTF+IoUKlTZr52O82urbUqdfyJBGWMvT01pYUprWepLr8IkyVTEWJKRTQSg==", + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.6.tgz", + "integrity": "sha512-ens7BiayssQz/uAxGzH8zGXCtiV24rRWXdjNha5V4zSOcxmAZsfGVm/PPFbwQdqEkDnhG+SyR9E3zSHUbOKXBQ==", "requires": { "debug": "^4.1.1", "readable-stream": "^3.5.0", "split-ca": "^1.0.1", - "ssh2": "^1.11.0" + "ssh2": "^1.15.0" } }, "dockerode": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.0.tgz", - "integrity": "sha512-3LF7/3MPz5+9RsUo91rD0MCcx0yxjC9bnbtgtVjOLKyKxlZSJ7/Kk3OPAgARlwlWHqXwAGYhmkAHYx7IwD0tJQ==", + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.6.tgz", + "integrity": "sha512-FbVf3Z8fY/kALB9s+P9epCpWhfi/r0N2DgYYcYpsAUlaTxPjdsitsFobnltb+lyCgAIvf9C+4PSWlTnHlJMf1w==", "requires": { "@balena/dockerignore": "^1.0.2", - "docker-modem": "^5.0.0", - "tar-fs": "~2.0.1" + "@grpc/grpc-js": "^1.11.1", + "@grpc/proto-loader": "^0.7.13", + "docker-modem": "^5.0.6", + "protobufjs": "^7.3.2", + "tar-fs": "~2.1.2", + "uuid": "^10.0.0" } }, "eastasianwidth": { @@ -1707,6 +2406,22 @@ "pend": "~1.2.0" } }, + "foreground-child": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.1.1.tgz", + "integrity": "sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg==", + "requires": { + "cross-spawn": "^7.0.0", + "signal-exit": "^4.0.1" + }, + "dependencies": { + "signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==" + } + } + }, "form-data-encoder": { "version": "2.1.4", "resolved": "https://registry.npmjs.org/form-data-encoder/-/form-data-encoder-2.1.4.tgz", @@ -1718,23 +2433,15 @@ "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==" }, "fs-extra": { - "version": "11.1.1", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.1.1.tgz", - "integrity": "sha512-MGIE4HOvQCeUCzmlHs0vXpih4ysz4wg9qiSAu6cd42lVwPbTM1TjV7RusoyQqMmk/95gdQZX72u+YW+c3eEpFQ==", + "version": "11.3.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.0.tgz", + "integrity": "sha512-Z4XaCL6dUDHfP/jT25jJKMmtxvuwbkrD1vNSMFlo9lNLY2c5FHYSQgHPRZUjAB26TpDEoW9HCOgplrdbaPV/ew==", "requires": { "graceful-fs": "^4.2.0", "jsonfile": "^6.0.1", "universalify": "^2.0.0" } }, - "fs-minipass": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", - "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", - "requires": { - "minipass": "^3.0.0" - } - }, "get-caller-file": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", @@ -1745,6 +2452,18 @@ "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==" }, + "glob": { + "version": "10.3.12", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.3.12.tgz", + "integrity": "sha512-TCNv8vJ+xz4QiqTpfOJA7HvYv+tNIRHKfUWw/q+v2jdgN4ebz+KY9tGx5J4rHP0o84mNP+ApH66HRX8us3Khqg==", + "requires": { + "foreground-child": "^3.1.0", + "jackspeak": "^2.3.6", + "minimatch": "^9.0.1", + "minipass": "^7.0.4", + "path-scurry": "^1.10.2" + } + }, "got": { "version": "13.0.0", "resolved": "https://registry.npmjs.org/got/-/got-13.0.0.tgz", @@ -1797,6 +2516,20 @@ "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" }, + "isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" + }, + "jackspeak": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-2.3.6.tgz", + "integrity": "sha512-N3yCS/NegsOBokc8GAdM8UcmfsKiSS8cipheD/nivzr700H+nsMOxJjQnvwOcRYVuFkdH0wGUvW2WbXGmrZGbQ==", + "requires": { + "@isaacs/cliui": "^8.0.2", + "@pkgjs/parseargs": "^0.11.0" + } + }, "json-buffer": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", @@ -1887,6 +2620,11 @@ "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz", "integrity": "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==" }, + "lodash.camelcase": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", + "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==" + }, "log-update": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/log-update/-/log-update-5.0.1.tgz", @@ -1944,15 +2682,25 @@ } } }, + "long": { + "version": "5.2.4", + "resolved": "https://registry.npmjs.org/long/-/long-5.2.4.tgz", + "integrity": "sha512-qtzLbJE8hq7VabR3mISmVGtoXP8KGc2Z/AT8OuqlYD7JTR3oqrgwdjnk07wpj1twXxYmgDXgoKVWUG/fReSzHg==" + }, "lowercase-keys": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-3.0.0.tgz", "integrity": "sha512-ozCC6gdQ+glXOQsveKD0YsDy8DSQFjDTz4zyzEHNV5+JP5D62LmfDZ6o1cycFx9ouG940M5dE8C8CTewdj2YWQ==" }, + "lru-cache": { + "version": "10.2.2", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.2.2.tgz", + "integrity": "sha512-9hp3Vp2/hFQUiIwKo8XCeFVnrg8Pk3TYNPIR7tJADKi5YfcF7vEaK7avFHTlSy3kOKYaJQaalfEo6YuXdceBOQ==" + }, "luxon": { - "version": "3.4.3", - "resolved": "https://registry.npmjs.org/luxon/-/luxon-3.4.3.tgz", - "integrity": "sha512-tFWBiv3h7z+T/tDaoxA8rqTxy1CHV6gHS//QdaH4pulbq/JuBSGgQspQQqcgnwdAx6pNI7cmvz5Sv/addzHmUg==" + "version": "3.6.1", + "resolved": "https://registry.npmjs.org/luxon/-/luxon-3.6.1.tgz", + "integrity": "sha512-tJLxrKJhO2ukZ5z0gyjY1zPh3Rh88Ej9P7jNrZiHMUXHae1yvI2imgOZtL1TO8TW6biMMKfTtAOoEJANgtWBMQ==" }, "mimic-fn": { "version": "2.1.0", @@ -1964,27 +2712,32 @@ "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-4.0.0.tgz", "integrity": "sha512-e5ISH9xMYU0DzrT+jl8q2ze9D6eWBto+I8CNpe+VI+K2J/F/k3PdkdTdz4wvGVH4NTpo+NRYTVIuMQEMMcsLqg==" }, - "minipass": { - "version": "3.3.4", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.4.tgz", - "integrity": "sha512-I9WPbWHCGu8W+6k1ZiGpPu0GkoKBeorkfKNuAFBNS1HNFJvke82sxvI5bzcCNpWPorkOO5QQ+zomzzwRxejXiw==", + "minimatch": { + "version": "9.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.4.tgz", + "integrity": "sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==", "requires": { - "yallist": "^4.0.0" + "brace-expansion": "^2.0.1" } }, + "minipass": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", + "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==" + }, "minizlib": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", - "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.1.tgz", + "integrity": "sha512-umcy022ILvb5/3Djuu8LWeqUa8D68JaBzlttKeMWen48SjabqS3iY5w/vzeMzMUNhLDifyhbOwKDSznB1vvrwg==", "requires": { - "minipass": "^3.0.0", - "yallist": "^4.0.0" + "minipass": "^7.0.4", + "rimraf": "^5.0.5" } }, "mkdirp": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", - "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==" + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz", + "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==" }, "mkdirp-classic": { "version": "0.5.3", @@ -1997,9 +2750,9 @@ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" }, "nan": { - "version": "2.18.0", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.18.0.tgz", - "integrity": "sha512-W7tfG7vMOGtD30sHoZSSc/JVYiyDPEyQVso/Zz+/uQd0B0L46gtC+pHha5FFMRpil6fm/AoEcRWyOVi4+E/f8w==", + "version": "2.22.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.22.0.tgz", + "integrity": "sha512-nbajikzWTMwsW+eSsNm3QwlOs7het9gGJU5dDZzRTQGk03vyBOauxgI4VakDzE0PtsGTmXPsXTbbjVhRwR5mpw==", "optional": true }, "normalize-url": { @@ -2028,6 +2781,20 @@ "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-3.0.0.tgz", "integrity": "sha512-mlVgR3PGuzlo0MmTdk4cXqXWlwQDLnONTAg6sm62XkMJEiRxN3GL3SffkYvqwonbkJBcrI7Uvv5Zh9yjvn2iUw==" }, + "path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==" + }, + "path-scurry": { + "version": "1.10.2", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.10.2.tgz", + "integrity": "sha512-7xTavNy5RQXnsjANvVvMkEjvloOinkAjv/Z6Ildz9v2RinZ4SBKTWFOVRbaF8p0vpHnyjV/UwNDdKuUv6M5qcA==", + "requires": { + "lru-cache": "^10.2.0", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" + } + }, "pend": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz", @@ -2038,6 +2805,25 @@ "resolved": "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-6.1.1.tgz", "integrity": "sha512-mQUvGU6aUFQ+rNvTIAcZuWGRT9a6f6Yrg9bHs4ImKF+HZCEK+plBvnAZYSIQztknZF2qnzNtr6F8s0+IuptdlQ==" }, + "protobufjs": { + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.4.0.tgz", + "integrity": "sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw==", + "requires": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/node": ">=13.7.0", + "long": "^5.0.0" + } + }, "pump": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", @@ -2094,6 +2880,14 @@ "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.3.0.tgz", "integrity": "sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA==" }, + "rimraf": { + "version": "5.0.5", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-5.0.5.tgz", + "integrity": "sha512-CqDakW+hMe/Bz202FPEymy68P+G50RfMQK+Qo5YUqc9SPipvbGjCGKd0RSKEelbsfQuw3g5NZDSrlZZAJurH1A==", + "requires": { + "glob": "^10.3.7" + } + }, "safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", @@ -2104,6 +2898,19 @@ "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" }, + "shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "requires": { + "shebang-regex": "^3.0.0" + } + }, + "shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==" + }, "signal-exit": { "version": "3.0.7", "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", @@ -2136,14 +2943,14 @@ "integrity": "sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ==" }, "ssh2": { - "version": "1.14.0", - "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.14.0.tgz", - "integrity": "sha512-AqzD1UCqit8tbOKoj6ztDDi1ffJZ2rV2SwlgrVVrHPkV5vWqGJOVp5pmtj18PunkPJAuKQsnInyKV+/Nb2bUnA==", + "version": "1.16.0", + "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.16.0.tgz", + "integrity": "sha512-r1X4KsBGedJqo7h8F5c4Ybpcr5RjyP+aWIG007uBPRjmdQWfEiVLzSK71Zji1B9sKxwaCvD8y8cwSkYrlLiRRg==", "requires": { "asn1": "^0.2.6", "bcrypt-pbkdf": "^1.0.2", - "cpu-features": "~0.0.8", - "nan": "^2.17.0" + "cpu-features": "~0.0.10", + "nan": "^2.20.0" } }, "string_decoder": { @@ -2164,6 +2971,16 @@ "strip-ansi": "^6.0.1" } }, + "string-width-cjs": { + "version": "npm:string-width@4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + } + }, "strip-ansi": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", @@ -2172,40 +2989,43 @@ "ansi-regex": "^5.0.1" } }, + "strip-ansi-cjs": { + "version": "npm:strip-ansi@6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "requires": { + "ansi-regex": "^5.0.1" + } + }, "tar": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.0.tgz", - "integrity": "sha512-/Wo7DcT0u5HUV486xg675HtjNd3BXZ6xDbzsCUZPt5iw8bTQ63bP0Raut3mvro9u+CUyq7YQd8Cx55fsZXxqLQ==", + "version": "7.4.3", + "resolved": "https://registry.npmjs.org/tar/-/tar-7.4.3.tgz", + "integrity": "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw==", "requires": { - "chownr": "^2.0.0", - "fs-minipass": "^2.0.0", - "minipass": "^5.0.0", - "minizlib": "^2.1.1", - "mkdirp": "^1.0.3", - "yallist": "^4.0.0" + "@isaacs/fs-minipass": "^4.0.0", + "chownr": "^3.0.0", + "minipass": "^7.1.2", + "minizlib": "^3.0.1", + "mkdirp": "^3.0.1", + "yallist": "^5.0.0" }, "dependencies": { "chownr": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", - "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==" - }, - "minipass": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", - "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==" + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz", + "integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==" } } }, "tar-fs": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.0.1.tgz", - "integrity": "sha512-6tzWDMeroL87uF/+lin46k+Q+46rAJ0SyPGz7OW7wTgblI273hsBqk2C1j0/xNadNLKDTUL9BukSjB7cwgmlPA==", + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.2.tgz", + "integrity": "sha512-EsaAXwxmx8UB7FRKqeozqEPop69DXcmYwTQwXvyAPF352HJsPdkVhvTaDPYqfNgruveJIJy3TA2l+2zj8LJIJA==", "requires": { "chownr": "^1.1.1", "mkdirp-classic": "^0.5.2", "pump": "^3.0.0", - "tar-stream": "^2.0.0" + "tar-stream": "^2.1.4" } }, "tar-stream": { @@ -2218,27 +3038,6 @@ "fs-constants": "^1.0.0", "inherits": "^2.0.3", "readable-stream": "^3.1.1" - }, - "dependencies": { - "bl": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", - "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", - "requires": { - "buffer": "^5.5.0", - "inherits": "^2.0.4", - "readable-stream": "^3.4.0" - } - }, - "buffer": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", - "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", - "requires": { - "base64-js": "^1.3.1", - "ieee754": "^1.1.13" - } - } } }, "tweetnacl": { @@ -2261,6 +3060,19 @@ "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" }, + "uuid": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", + "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==" + }, + "which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "requires": { + "isexe": "^2.0.0" + } + }, "wrap-ansi": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", @@ -2271,6 +3083,16 @@ "strip-ansi": "^6.0.0" } }, + "wrap-ansi-cjs": { + "version": "npm:wrap-ansi@7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "requires": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + } + }, "wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", @@ -2282,9 +3104,9 @@ "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==" }, "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz", + "integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==" }, "yargs": { "version": "17.7.2", diff --git a/dev/diff/package.json b/dev/diff/package.json index 98e1c844a7..a5a7beb0df 100644 --- a/dev/diff/package.json +++ b/dev/diff/package.json @@ -2,18 +2,18 @@ "name": "diff", "type": "module", "dependencies": { - "chalk": "^5.3.0", - "dockerode": "^4.0.0", + "chalk": "^5.4.1", + "dockerode": "^4.0.6", "enquirer": "^2.4.1", "extract-zip": "^2.0.1", - "fs-extra": "^11.1.1", + "fs-extra": "^11.3.0", "got": "^13.0.0", "keypress": "^0.2.1", "listr2": "^6.6.1", "lodash-es": "^4.17.21", - "luxon": "^3.4.3", + "luxon": "^3.6.1", "pretty-bytes": "^6.1.1", - "tar": "^6.2.0", + "tar": "^7.4.3", "yargs": "^17.7.2" }, "engines": { diff --git a/dev/diff/settings_local.py b/dev/diff/settings_local.py index 593ccadd7f..c255cac23d 100644 --- a/dev/diff/settings_local.py +++ b/dev/diff/settings_local.py @@ -1,7 +1,7 @@ # Copyright The IETF Trust 2007-2019, All Rights Reserved # -*- coding: utf-8 -*- -from ietf.settings import * # pyflakes:ignore +from ietf.settings import * # pyflakes:ignore ALLOWED_HOSTS = ['*'] @@ -37,7 +37,6 @@ SUBMIT_YANG_CATALOG_MODEL_DIR = '/assets/ietf-ftp/yang/catalogmod/' SUBMIT_YANG_DRAFT_MODEL_DIR = '/assets/ietf-ftp/yang/draftmod/' -SUBMIT_YANG_INVAL_MODEL_DIR = '/assets/ietf-ftp/yang/invalmod/' SUBMIT_YANG_IANA_MODEL_DIR = '/assets/ietf-ftp/yang/ianamod/' SUBMIT_YANG_RFC_MODEL_DIR = '/assets/ietf-ftp/yang/rfcmod/' @@ -57,9 +56,11 @@ BOFREQ_PATH = '/assets/ietf-ftp/bofreq/' CONFLICT_REVIEW_PATH = '/assets/ietf-ftp/conflict-reviews/' STATUS_CHANGE_PATH = '/assets/ietf-ftp/status-changes/' -INTERNET_DRAFT_ARCHIVE_DIR = '/assets/ietf-ftp/internet-drafts/' +INTERNET_DRAFT_ARCHIVE_DIR = '/assets/collection/draft-archive' INTERNET_ALL_DRAFTS_ARCHIVE_DIR = '/assets/ietf-ftp/internet-drafts/' BIBXML_BASE_PATH = '/assets/ietfdata/derived/bibxml' +FTP_DIR = '/assets/ftp' +NFS_METRICS_TMP_DIR = '/assets/tmp' NOMCOM_PUBLIC_KEYS_DIR = 'data/nomcom_keys/public_keys/' SLIDE_STAGING_PATH = 'test/staging/' diff --git a/dev/k8s-get-deploy-name/.editorconfig b/dev/k8s-get-deploy-name/.editorconfig new file mode 100644 index 0000000000..fec5c66519 --- /dev/null +++ b/dev/k8s-get-deploy-name/.editorconfig @@ -0,0 +1,7 @@ +[*] +indent_size = 2 +indent_style = space +charset = utf-8 +trim_trailing_whitespace = false +end_of_line = lf +insert_final_newline = true diff --git a/dev/k8s-get-deploy-name/.gitignore b/dev/k8s-get-deploy-name/.gitignore new file mode 100644 index 0000000000..07e6e472cc --- /dev/null +++ b/dev/k8s-get-deploy-name/.gitignore @@ -0,0 +1 @@ +/node_modules diff --git a/dev/k8s-get-deploy-name/.npmrc b/dev/k8s-get-deploy-name/.npmrc new file mode 100644 index 0000000000..580a68c499 --- /dev/null +++ b/dev/k8s-get-deploy-name/.npmrc @@ -0,0 +1,3 @@ +audit = false +fund = false +save-exact = true diff --git a/dev/k8s-get-deploy-name/README.md b/dev/k8s-get-deploy-name/README.md new file mode 100644 index 0000000000..a6605e4dd2 --- /dev/null +++ b/dev/k8s-get-deploy-name/README.md @@ -0,0 +1,16 @@ +# Datatracker Get Deploy Name + +This tool process and slugify a git branch into an appropriate subdomain name. + +## Usage + +1. From the `dev/k8s-get-deploy-name` directory, install the dependencies: +```sh +npm install +``` +2. Run the command: (replacing the `branch` argument) +```sh +node /cli.js --branch feat/fooBar-123 +``` + +The subdomain name will be output. It can then be used in a workflow as a namespace name and subdomain value. diff --git a/dev/k8s-get-deploy-name/cli.js b/dev/k8s-get-deploy-name/cli.js new file mode 100644 index 0000000000..b6c3b5119e --- /dev/null +++ b/dev/k8s-get-deploy-name/cli.js @@ -0,0 +1,22 @@ +#!/usr/bin/env node + +import yargs from 'yargs/yargs' +import { hideBin } from 'yargs/helpers' +import slugify from 'slugify' + +const argv = yargs(hideBin(process.argv)).argv + +let branch = argv.branch +if (!branch) { + throw new Error('Missing --branch argument!') +} +if (branch.indexOf('/') >= 0) { + branch = branch.split('/').slice(1).join('-') +} +branch = slugify(branch, { lower: true, strict: true }) +if (branch.length < 1) { + throw new Error('Branch name is empty!') +} +process.stdout.write(`dt-${branch}`) + +process.exit(0) diff --git a/dev/k8s-get-deploy-name/package-lock.json b/dev/k8s-get-deploy-name/package-lock.json new file mode 100644 index 0000000000..e492a4cd38 --- /dev/null +++ b/dev/k8s-get-deploy-name/package-lock.json @@ -0,0 +1,303 @@ +{ + "name": "k8s-get-deploy-name", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "name": "k8s-get-deploy-name", + "dependencies": { + "slugify": "1.6.6", + "yargs": "17.7.2" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + }, + "node_modules/escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "engines": { + "node": ">=6" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "engines": { + "node": ">=8" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/slugify": { + "version": "1.6.6", + "resolved": "https://registry.npmjs.org/slugify/-/slugify-1.6.6.tgz", + "integrity": "sha512-h+z7HKHYXj6wJU+AnS/+IH8Uh9fdcX1Lrhg1/VMdf9PwoBQXFcXiAdsy2tSK0P6gKwJLXp02r90ahUCqHk9rrw==", + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "engines": { + "node": ">=12" + } + } + }, + "dependencies": { + "ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" + }, + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "requires": { + "color-convert": "^2.0.1" + } + }, + "cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "requires": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + }, + "escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==" + }, + "get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==" + }, + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" + }, + "require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==" + }, + "slugify": { + "version": "1.6.6", + "resolved": "https://registry.npmjs.org/slugify/-/slugify-1.6.6.tgz", + "integrity": "sha512-h+z7HKHYXj6wJU+AnS/+IH8Uh9fdcX1Lrhg1/VMdf9PwoBQXFcXiAdsy2tSK0P6gKwJLXp02r90ahUCqHk9rrw==" + }, + "string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + } + }, + "strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "requires": { + "ansi-regex": "^5.0.1" + } + }, + "wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "requires": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + } + }, + "y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==" + }, + "yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "requires": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + } + }, + "yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==" + } + } +} diff --git a/dev/k8s-get-deploy-name/package.json b/dev/k8s-get-deploy-name/package.json new file mode 100644 index 0000000000..849f5d9b8d --- /dev/null +++ b/dev/k8s-get-deploy-name/package.json @@ -0,0 +1,8 @@ +{ + "name": "k8s-get-deploy-name", + "type": "module", + "dependencies": { + "slugify": "1.6.6", + "yargs": "17.7.2" + } +} diff --git a/bin/add-old-drafts-from-archive.py b/dev/legacy/add-old-drafts-from-archive.py similarity index 100% rename from bin/add-old-drafts-from-archive.py rename to dev/legacy/add-old-drafts-from-archive.py diff --git a/dev/legacy/notes/notes.html b/dev/legacy/notes/notes.html index 85980a5b1b..cb10a18689 100644 --- a/dev/legacy/notes/notes.html +++ b/dev/legacy/notes/notes.html @@ -355,7 +355,7 @@

Introduction

in one place.

With my recent investigations of code analysis tools, I thought it might be a good idea to start collecting these in one place for the project.

-
+
Henrik <henrik@levkowetz.com>, 23 Mar 2014
@@ -398,8 +398,9 @@

PyChecker

do the right thing, but once it was made to run on the datatracker code, and ignore the django code, it didn't report anything that PyFlakes hadn't already caught.

-
-Henrik <henrik@levkowetz.com>, 23 Mar 2014
+
+ Henrik <henrik@levkowetz.com>, 23 Mar 2014 +
diff --git a/ietf/bin/recalculate-rfc-authors-snapshot b/dev/legacy/recalculate-rfc-authors-snapshot similarity index 100% rename from ietf/bin/recalculate-rfc-authors-snapshot rename to dev/legacy/recalculate-rfc-authors-snapshot diff --git a/dev/tests/debug.sh b/dev/tests/debug.sh index 405daae377..d87c504bb9 100644 --- a/dev/tests/debug.sh +++ b/dev/tests/debug.sh @@ -3,7 +3,7 @@ # This script recreate the same environment used during tests on GitHub Actions # and drops you into a terminal at the point where the actual tests would be run. # -# Refer to https://github.com/ietf-tools/datatracker/blob/main/.github/workflows/build.yml#L141-L155 +# Refer to https://github.com/ietf-tools/datatracker/blob/main/.github/workflows/tests.yml#L47-L66 # for the commands to run next. # # Simply type "exit" + ENTER to exit and shutdown this test environment. @@ -12,7 +12,7 @@ echo "Fetching latest images..." docker pull ghcr.io/ietf-tools/datatracker-app-base:latest docker pull ghcr.io/ietf-tools/datatracker-db:latest echo "Starting containers..." -docker compose -f docker-compose.debug.yml -p dtdebug up -d +docker compose -f docker-compose.debug.yml -p dtdebug --compatibility up -d echo "Copying working directory into container..." docker compose -p dtdebug cp ../../. app:/__w/datatracker/datatracker/ echo "Run prepare script..." diff --git a/dev/tests/docker-compose.debug.yml b/dev/tests/docker-compose.debug.yml index 74491a5b2e..8117b92375 100644 --- a/dev/tests/docker-compose.debug.yml +++ b/dev/tests/docker-compose.debug.yml @@ -1,26 +1,35 @@ -# This docker-compose replicates the test workflow happening on GitHub during a PR / build check. -# To be used from the debug.sh script. - -version: '3.8' - -services: - app: - image: ghcr.io/ietf-tools/datatracker-app-base:latest - command: -f /dev/null - working_dir: /__w/datatracker/datatracker - entrypoint: tail - hostname: app - volumes: - - /var/run/docker.sock:/var/run/docker.sock - environment: - CI: 'true' - GITHUB_ACTIONS: 'true' - HOME: /github/home - db: - image: ghcr.io/ietf-tools/datatracker-db:latest - restart: unless-stopped - volumes: - - postgresdb-data:/var/lib/postgresql/data - -volumes: - postgresdb-data: +# This docker-compose replicates the test workflow happening on GitHub during a PR / build check. +# To be used from the debug.sh script. + +version: '3.8' + +services: + app: + image: ghcr.io/ietf-tools/datatracker-app-base:latest + command: -f /dev/null + working_dir: /__w/datatracker/datatracker + entrypoint: tail + hostname: app + volumes: + - /var/run/docker.sock:/var/run/docker.sock + environment: + CI: 'true' + GITHUB_ACTIONS: 'true' + HOME: /github/home + deploy: + resources: + limits: + cpus: '2' + memory: '7GB' + + db: + image: ghcr.io/ietf-tools/datatracker-db:latest + restart: unless-stopped + volumes: + - postgresdb-data:/var/lib/postgresql/data + + blobstore: + image: ghcr.io/ietf-tools/datatracker-devblobstore:latest + +volumes: + postgresdb-data: diff --git a/dev/tests/settings_local.py b/dev/tests/settings_local.py index 0cd761c0a9..e1ffd60edb 100644 --- a/dev/tests/settings_local.py +++ b/dev/tests/settings_local.py @@ -1,7 +1,7 @@ # Copyright The IETF Trust 2007-2019, All Rights Reserved # -*- coding: utf-8 -*- -from ietf.settings import * # pyflakes:ignore +from ietf.settings import * # pyflakes:ignore ALLOWED_HOSTS = ['*'] @@ -17,8 +17,8 @@ } IDSUBMIT_IDNITS_BINARY = "/usr/local/bin/idnits" -IDSUBMIT_REPOSITORY_PATH = "test/id/" -IDSUBMIT_STAGING_PATH = "test/staging/" +IDSUBMIT_REPOSITORY_PATH = "/assets/ietfdata/doc/draft/repository" +IDSUBMIT_STAGING_PATH = "/assets/www6s/staging/" AGENDA_PATH = '/assets/www6s/proceedings/' MEETINGHOST_LOGO_PATH = AGENDA_PATH @@ -36,7 +36,6 @@ SUBMIT_YANG_CATALOG_MODEL_DIR = '/assets/ietf-ftp/yang/catalogmod/' SUBMIT_YANG_DRAFT_MODEL_DIR = '/assets/ietf-ftp/yang/draftmod/' -SUBMIT_YANG_INVAL_MODEL_DIR = '/assets/ietf-ftp/yang/invalmod/' SUBMIT_YANG_IANA_MODEL_DIR = '/assets/ietf-ftp/yang/ianamod/' SUBMIT_YANG_RFC_MODEL_DIR = '/assets/ietf-ftp/yang/rfcmod/' @@ -56,9 +55,11 @@ BOFREQ_PATH = '/assets/ietf-ftp/bofreq/' CONFLICT_REVIEW_PATH = '/assets/ietf-ftp/conflict-reviews/' STATUS_CHANGE_PATH = '/assets/ietf-ftp/status-changes/' -INTERNET_DRAFT_ARCHIVE_DIR = '/assets/ietf-ftp/internet-drafts/' +INTERNET_DRAFT_ARCHIVE_DIR = '/assets/collection/draft-archive' INTERNET_ALL_DRAFTS_ARCHIVE_DIR = '/assets/ietf-ftp/internet-drafts/' BIBXML_BASE_PATH = '/assets/ietfdata/derived/bibxml' +FTP_DIR = '/assets/ftp' +NFS_METRICS_TMP_DIR = '/assets/tmp' NOMCOM_PUBLIC_KEYS_DIR = 'data/nomcom_keys/public_keys/' SLIDE_STAGING_PATH = 'test/staging/' diff --git a/docker-compose.yml b/docker-compose.yml index 413c04ff63..073d04b896 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,5 +1,3 @@ -version: '3.8' - services: app: build: @@ -15,6 +13,8 @@ services: # network_mode: service:db depends_on: + - blobdb + - blobstore - db - mq @@ -37,7 +37,7 @@ services: db: image: ghcr.io/ietf-tools/datatracker-db:latest # build: - # context: .. + # context: . # dockerfile: docker/db.Dockerfile restart: unless-stopped volumes: @@ -68,7 +68,9 @@ services: restart: unless-stopped celery: - image: ghcr.io/ietf-tools/datatracker-celery:latest + build: + context: . + dockerfile: docker/celery.Dockerfile init: true environment: CELERY_APP: ietf @@ -78,18 +80,75 @@ services: command: - '--loglevel=INFO' depends_on: + - blobdb + - blobstore - db + - mq restart: unless-stopped stop_grace_period: 1m volumes: - .:/workspace - app-assets:/assets + replicator: + build: + context: . + dockerfile: docker/celery.Dockerfile + init: true + environment: + CELERY_APP: ietf + CELERY_ROLE: worker + UPDATE_REQUIREMENTS_FROM: requirements.txt + DEV_MODE: "yes" + command: + - '--loglevel=INFO' + - '--queues=blobdb' + - '--concurrency=1' + + depends_on: + - blobdb + - blobstore + - db + - mq + restart: unless-stopped + stop_grace_period: 1m + volumes: + - .:/workspace + - app-assets:/assets + + blobstore: + image: ghcr.io/ietf-tools/datatracker-devblobstore:latest + restart: unless-stopped + volumes: + - "minio-data:/data" + + blobdb: + image: postgres:17 + restart: unless-stopped + environment: + POSTGRES_DB: blob + POSTGRES_USER: dt + POSTGRES_PASSWORD: abcd1234 + volumes: + - blobdb-data:/var/lib/postgresql/data + +# typesense: +# image: typesense/typesense:30.1 +# restart: on-failure +# ports: +# - "8108:8108" +# volumes: +# - ./typesense-data:/data +# command: +# - '--data-dir=/data' +# - '--api-key=typesense-api-key' +# - '--enable-cors' + # Celery Beat is a periodic task runner. It is not normally needed for development, # but can be enabled by uncommenting the following. # # beat: -# image: ghcr.io/ietf-tools/datatracker-celery:latest +# image: "${COMPOSE_PROJECT_NAME}-celery" # init: true # environment: # CELERY_APP: ietf @@ -103,7 +162,10 @@ services: # stop_grace_period: 1m # volumes: # - .:/workspace +# - app-assets:/assets volumes: postgresdb-data: app-assets: + minio-data: + blobdb-data: diff --git a/docker/README.md b/docker/README.md index bc9af7c212..0ca79a6e89 100644 --- a/docker/README.md +++ b/docker/README.md @@ -1,14 +1,28 @@ # Datatracker Development in Docker +- [Getting started](#getting-started) +- [Using Visual Studio Code](#using-visual-studio-code) + - [Initial Setup](#initial-setup) + - [Subsequent Launch](#subsequent-launch) + - [Usage](#usage) +- [Using Other Editors / Generic](#using-other-editors--generic) + - [Exit Environment](#exit-environment) + - [Accessing PostgreSQL Port](#accessing-postgresql-port) +- [Clean and Rebuild DB from latest image](#clean-and-rebuild-db-from-latest-image) +- [Clean all](#clean-all) +- [Updating an older environment](#updating-an-older-environment) +- [Notes / Troubleshooting](#notes--troubleshooting) + ## Getting started 1. [Set up Docker](https://docs.docker.com/get-started/) on your preferred platform. On Windows, it is highly recommended to use the [WSL 2 *(Windows Subsystem for Linux)*](https://docs.docker.com/desktop/windows/wsl/) backend. +> [!IMPORTANT] > See the [IETF Tools Windows Dev guide](https://github.com/ietf-tools/.github/blob/main/docs/windows-dev.md) on how to get started when using Windows. -2. On Linux, you must also install [Docker Compose](https://docs.docker.com/compose/install/). Docker Desktop for Mac and Windows already include Docker Compose. +2. On Linux, you must [install Docker Compose manually](https://docs.docker.com/compose/install/linux/#install-the-plugin-manually) and not install Docker Desktop. On Mac and Windows install Docker Desktop which already includes Docker Compose. -2. If you have a copy of the datatracker code checked out already, simply `cd` to the top-level directory. +3. If you have a copy of the datatracker code checked out already, simply `cd` to the top-level directory. If not, check out a datatracker branch as usual. We'll check out `main` below, but you can use any branch: @@ -18,7 +32,7 @@ git checkout main ``` -3. Follow the instructions for your preferred editor: +4. Follow the instructions for your preferred editor: - [Visual Studio Code](#using-visual-studio-code) - [Other Editors / Generic](#using-other-editors--generic) @@ -122,7 +136,14 @@ docker compose down to terminate the containers. -### Clean and Rebuild DB from latest image +### Accessing PostgreSQL Port + +The port is exposed but not automatically mapped to `5432` to avoid potential conflicts with the host. To get the mapped port, run the command *(from the project `/docker` directory)*: +```sh +docker compose port db 5432 +``` + +## Clean and Rebuild DB from latest image To delete the active DB container, its volume and get the latest image / DB dump, simply run the following command: @@ -140,7 +161,7 @@ docker compose pull db docker compose build --no-cache db ``` -### Clean all +## Clean all To delete all containers for this project, its associated images and purge any remaining dangling images, simply run the following command: @@ -157,11 +178,19 @@ docker compose down -v --rmi all docker image prune ``` -### Accessing PostgreSQL Port +## Updating an older environment + +If you already have a clone, such as from a previous codesprint, and are updating that clone, before starting the datatracker from the updated image: +1. `rm ietf/settings_local.py` *(The startup script will put a new one, appropriate to the current release, in place)* +1. Execute the [Clean all](#clean-all) sequence above. + +If the dev environment fails to start, even after running the [Clean all](#clean-all) sequence above, you can fully purge all docker cache, containers, images and volumes by running the command below. + +> [!CAUTION] +> Note that this will delete everything docker-related, including non-datatracker docker resources you might have. -The port is exposed but not automatically mapped to `5432` to avoid potential conflicts with the host. To get the mapped port, run the command *(from the project `/docker` directory)*: ```sh -docker compose port db 5432 +docker system prune -a --volumes ``` ## Notes / Troubleshooting @@ -183,3 +212,17 @@ The content of the source files will be copied into the target `.ics` files. Mak ### Missing assets in the data folder Because including all assets in the image would significantly increase the file size, they are not included by default. You can however fetch them by running the **Fetch assets via rsync** task in VS Code or run manually the script `docker/scripts/app-rsync-extras.sh` + +### Linux file permissions leaking to the host system + +If on the host filesystem you have permissions that look like this, + +```bash +$ ls -la +total 4624 +drwxrwxr-x 2 100999 100999 4096 May 25 07:56 bin +drwxrwxr-x 5 100999 100999 4096 May 25 07:56 client +(etc...) +``` + +Try uninstalling Docker Desktop and installing Docker Compose manually. The Docker Compose bundled with Docker Desktop is incompatible with our software. See also [Rootless Docker: file ownership changes #3343](https://github.com/lando/lando/issues/3343), [Docker context desktop-linux has container permission issues #75](https://github.com/docker/desktop-linux/issues/75). diff --git a/docker/app.Dockerfile b/docker/app.Dockerfile index c8e0fe7ad3..dd4cf72ffd 100644 --- a/docker/app.Dockerfile +++ b/docker/app.Dockerfile @@ -10,12 +10,7 @@ ARG USER_GID=$USER_UID COPY docker/scripts/app-setup-debian.sh /tmp/library-scripts/docker-setup-debian.sh RUN sed -i 's/\r$//' /tmp/library-scripts/docker-setup-debian.sh && chmod +x /tmp/library-scripts/docker-setup-debian.sh -# Add Postgresql Apt Repository to get 14 -RUN echo "deb http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" | tee /etc/apt/sources.list.d/pgdg.list -RUN wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - - RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ - && apt-get install -y --no-install-recommends postgresql-client-14 pgloader \ # Remove imagemagick due to https://security-tracker.debian.org/tracker/CVE-2019-10131 && apt-get purge -y imagemagick imagemagick-6-common \ # Install common packages, non-root user @@ -43,8 +38,8 @@ RUN rm -rf /tmp/library-scripts # Copy the startup file COPY docker/scripts/app-init.sh /docker-init.sh COPY docker/scripts/app-start.sh /docker-start.sh -RUN sed -i 's/\r$//' /docker-init.sh && chmod +x /docker-init.sh -RUN sed -i 's/\r$//' /docker-start.sh && chmod +x /docker-start.sh +RUN sed -i 's/\r$//' /docker-init.sh && chmod +rx /docker-init.sh +RUN sed -i 's/\r$//' /docker-start.sh && chmod +rx /docker-start.sh # Fix user UID / GID to match host RUN groupmod --gid $USER_GID $USERNAME \ diff --git a/docker/base.Dockerfile b/docker/base.Dockerfile index 5401007fd8..2501636049 100644 --- a/docker/base.Dockerfile +++ b/docker/base.Dockerfile @@ -1,155 +1,159 @@ -FROM python:3.9-bullseye -LABEL maintainer="IETF Tools Team " - -ENV DEBIAN_FRONTEND=noninteractive -ENV NODE_MAJOR=16 - -# Update system packages -RUN apt-get update \ - && apt-get -qy upgrade \ - && apt-get -y install --no-install-recommends apt-utils dialog 2>&1 - -# Add Node.js Source -RUN apt-get install -y --no-install-recommends ca-certificates curl gnupg \ - && mkdir -p /etc/apt/keyrings\ - && curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg -RUN echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_$NODE_MAJOR.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list - -# Add Docker Source -RUN curl -fsSL https://download.docker.com/linux/debian/gpg | gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg -RUN echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/debian \ - $(. /etc/os-release && echo "$VERSION_CODENAME") stable" | tee /etc/apt/sources.list.d/docker.list > /dev/null - -# Add PostgreSQL Source -RUN echo "deb http://apt.postgresql.org/pub/repos/apt $(. /etc/os-release && echo "$VERSION_CODENAME")-pgdg main" | tee /etc/apt/sources.list.d/pgdg.list -RUN wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - - -# Install the packages we need -RUN apt-get update --fix-missing && apt-get install -qy --no-install-recommends \ - apache2-utils \ - apt-file \ - bash \ - build-essential \ - curl \ - default-jdk \ - docker-ce-cli \ - enscript \ - firefox-esr \ - gawk \ - g++ \ - gcc \ - ghostscript \ - git \ - gnupg \ - jq \ - less \ - libcairo2-dev \ - libgtk2.0-0 \ - libgtk-3-0 \ - libnotify-dev \ - libgconf-2-4 \ - libgbm-dev \ - libnss3 \ - libxss1 \ - libasound2 \ - libxtst6 \ - libmagic-dev \ - libmariadb-dev \ - libmemcached-tools \ - locales \ - make \ - mariadb-client \ - memcached \ - nano \ - netcat \ - nodejs \ - pgloader \ - pigz \ - postgresql-client-14 \ - pv \ - python3-ipython \ - ripgrep \ - rsync \ - rsyslog \ - ruby \ - ruby-rubygems \ - unzip \ - wget \ - xauth \ - xvfb \ - yang-tools \ - zsh - -# Install kramdown-rfc2629 (ruby) -RUN gem install kramdown-rfc2629 - -# Install chromedriver -COPY docker/scripts/app-install-chromedriver.sh /tmp/app-install-chromedriver.sh -RUN sed -i 's/\r$//' /tmp/app-install-chromedriver.sh && \ - chmod +x /tmp/app-install-chromedriver.sh -RUN /tmp/app-install-chromedriver.sh - -# Fix /dev/shm permissions for chromedriver -RUN chmod 1777 /dev/shm - -# GeckoDriver -ARG GECKODRIVER_VERSION=latest -RUN GK_VERSION=$(if [ ${GECKODRIVER_VERSION:-latest} = "latest" ]; then echo "0.33.0"; else echo $GECKODRIVER_VERSION; fi) \ - && echo "Using GeckoDriver version: "$GK_VERSION \ - && wget --no-verbose -O /tmp/geckodriver.tar.gz https://github.com/mozilla/geckodriver/releases/download/v$GK_VERSION/geckodriver-v$GK_VERSION-linux64.tar.gz \ - && rm -rf /opt/geckodriver \ - && tar -C /opt -zxf /tmp/geckodriver.tar.gz \ - && rm /tmp/geckodriver.tar.gz \ - && mv /opt/geckodriver /opt/geckodriver-$GK_VERSION \ - && chmod 755 /opt/geckodriver-$GK_VERSION \ - && ln -fs /opt/geckodriver-$GK_VERSION /usr/bin/geckodriver - -# Activate Yarn -RUN corepack enable - -# Get rid of installation files we don't need in the image, to reduce size -RUN apt-get autoremove -y && apt-get clean -y && rm -rf /var/lib/apt/lists/* /var/cache/apt/* - -# "fake" dbus address to prevent errors -# https://github.com/SeleniumHQ/docker-selenium/issues/87 -ENV DBUS_SESSION_BUS_ADDRESS=/dev/null - -# avoid million NPM install messages -ENV npm_config_loglevel warn -# allow installing when the main user is root -ENV npm_config_unsafe_perm true -# disable NPM funding messages -ENV npm_config_fund false - -# Set locale to en_US.UTF-8 -RUN echo "LC_ALL=en_US.UTF-8" >> /etc/environment && \ - echo "en_US.UTF-8 UTF-8" >> /etc/locale.gen && \ - echo "LANG=en_US.UTF-8" > /etc/locale.conf && \ - dpkg-reconfigure locales && \ - locale-gen en_US.UTF-8 && \ - update-locale LC_ALL en_US.UTF-8 -ENV LC_ALL en_US.UTF-8 - -# Install idnits -ADD https://raw.githubusercontent.com/ietf-tools/idnits-mirror/main/idnits /usr/local/bin/ -RUN chmod +rx /usr/local/bin/idnits - -# Turn off rsyslog kernel logging (doesn't work in Docker) -RUN sed -i '/imklog/s/^/#/' /etc/rsyslog.conf - -# Colorize the bash shell -RUN sed -i 's/#force_color_prompt=/force_color_prompt=/' /root/.bashrc - -# Turn off rsyslog kernel logging (doesn't work in Docker) -RUN sed -i '/imklog/s/^/#/' /etc/rsyslog.conf - -# Fetch wait-for utility -ADD https://raw.githubusercontent.com/eficode/wait-for/v2.1.3/wait-for /usr/local/bin/ -RUN chmod +rx /usr/local/bin/wait-for - -# Create assets directory -RUN mkdir -p /assets - -# Create workspace -RUN mkdir -p /workspace -WORKDIR /workspace +FROM python:3.12-bookworm +LABEL maintainer="IETF Tools Team " + +ENV DEBIAN_FRONTEND=noninteractive +ENV NODE_MAJOR=16 + +# Update system packages +RUN apt-get update \ + && apt-get -qy upgrade \ + && apt-get -y install --no-install-recommends apt-utils dialog 2>&1 + +# Add Node.js Source +RUN apt-get install -y --no-install-recommends ca-certificates curl gnupg \ + && mkdir -p /etc/apt/keyrings \ + && curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg \ + && echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_$NODE_MAJOR.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list +RUN echo "Package: nodejs" >> /etc/apt/preferences.d/preferences \ + && echo "Pin: origin deb.nodesource.com" >> /etc/apt/preferences.d/preferences \ + && echo "Pin-Priority: 1001" >> /etc/apt/preferences.d/preferences + +# Add Docker Source +RUN mkdir -p /etc/apt/keyrings \ + && curl -fsSL https://download.docker.com/linux/debian/gpg | gpg --dearmor -o /etc/apt/keyrings/docker-archive-keyring.gpg \ + && echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/debian $(. /etc/os-release && echo "$VERSION_CODENAME") stable" | tee /etc/apt/sources.list.d/docker.list + +# Add PostgreSQL Source +RUN mkdir -p /etc/apt/keyrings \ + && curl -fsSL https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor -o /etc/apt/keyrings/apt.postgresql.org.gpg \ + && echo "deb [signed-by=/etc/apt/keyrings/apt.postgresql.org.gpg] https://apt.postgresql.org/pub/repos/apt $(. /etc/os-release && echo "$VERSION_CODENAME")-pgdg main" | tee /etc/apt/sources.list.d/pgdg.list + +# Install the packages we need +RUN apt-get update --fix-missing && apt-get install -qy --no-install-recommends \ + apache2-utils \ + apt-file \ + bash \ + build-essential \ + curl \ + default-jdk \ + docker-ce-cli \ + enscript \ + firefox-esr \ + gawk \ + g++ \ + gcc \ + ghostscript \ + git \ + gnupg \ + jq \ + less \ + libcairo2-dev \ + libgtk2.0-0 \ + libgtk-3-0 \ + libnotify-dev \ + libgconf-2-4 \ + libgbm-dev \ + libnss3 \ + libxss1 \ + libasound2 \ + libxtst6 \ + libmagic-dev \ + libmariadb-dev \ + libmemcached-tools \ + libyang2-tools \ + locales \ + make \ + mariadb-client \ + memcached \ + nano \ + netcat-traditional \ + nodejs \ + pgloader \ + pigz \ + postgresql-client-17 \ + pv \ + python3-ipython \ + ripgrep \ + rsync \ + rsyslog \ + ruby \ + ruby-rubygems \ + unzip \ + wget \ + xauth \ + xvfb \ + zsh + +# Install kramdown-rfc2629 (ruby) +RUN gem install kramdown-rfc2629 + +# GeckoDriver +ARG GECKODRIVER_VERSION=latest +RUN GK_VERSION=$(if [ ${GECKODRIVER_VERSION:-latest} = "latest" ]; then echo "0.34.0"; else echo $GECKODRIVER_VERSION; fi) \ + && echo "Using GeckoDriver version: "$GK_VERSION \ + && wget --no-verbose -O /tmp/geckodriver.tar.gz https://github.com/mozilla/geckodriver/releases/download/v$GK_VERSION/geckodriver-v$GK_VERSION-linux64.tar.gz \ + && rm -rf /opt/geckodriver \ + && tar -C /opt -zxf /tmp/geckodriver.tar.gz \ + && rm /tmp/geckodriver.tar.gz \ + && mv /opt/geckodriver /opt/geckodriver-$GK_VERSION \ + && chmod 755 /opt/geckodriver-$GK_VERSION \ + && ln -fs /opt/geckodriver-$GK_VERSION /usr/bin/geckodriver + +# Activate Yarn +RUN corepack enable + +# Get rid of installation files we don't need in the image, to reduce size +RUN apt-get autoremove -y && apt-get clean -y && rm -rf /var/lib/apt/lists/* /var/cache/apt/* + +# "fake" dbus address to prevent errors +# https://github.com/SeleniumHQ/docker-selenium/issues/87 +ENV DBUS_SESSION_BUS_ADDRESS=/dev/null + +# avoid million NPM install messages +ENV npm_config_loglevel=warn +# allow installing when the main user is root +ENV npm_config_unsafe_perm=true +# disable NPM funding messages +ENV npm_config_fund=false + +# Set locale to en_US.UTF-8 +RUN echo "LC_ALL=en_US.UTF-8" >> /etc/environment && \ + echo "en_US.UTF-8 UTF-8" >> /etc/locale.gen && \ + echo "LANG=en_US.UTF-8" > /etc/locale.conf && \ + dpkg-reconfigure locales && \ + locale-gen en_US.UTF-8 && \ + update-locale LC_ALL en_US.UTF-8 +ENV LC_ALL=en_US.UTF-8 + +# Install idnits +ADD https://raw.githubusercontent.com/ietf-tools/idnits-mirror/main/idnits /usr/local/bin/ +RUN chmod +rx /usr/local/bin/idnits + +# Install required fonts +RUN mkdir -p /tmp/fonts && \ + wget -q -O /tmp/fonts.tar.gz https://github.com/ietf-tools/xml2rfc-fonts/archive/refs/tags/3.22.0.tar.gz && \ + tar zxf /tmp/fonts.tar.gz -C /tmp/fonts && \ + mv /tmp/fonts/*/noto/* /usr/local/share/fonts/ && \ + mv /tmp/fonts/*/roboto_mono/* /usr/local/share/fonts/ && \ + rm -rf /tmp/fonts.tar.gz /tmp/fonts/ && \ + fc-cache -f + +# Turn off rsyslog kernel logging (doesn't work in Docker) +RUN sed -i '/imklog/s/^/#/' /etc/rsyslog.conf + +# Colorize the bash shell +RUN sed -i 's/#force_color_prompt=/force_color_prompt=/' /root/.bashrc + +# Turn off rsyslog kernel logging (doesn't work in Docker) +RUN sed -i '/imklog/s/^/#/' /etc/rsyslog.conf + +# Fetch wait-for utility +ADD https://raw.githubusercontent.com/eficode/wait-for/v2.1.3/wait-for /usr/local/bin/ +RUN chmod +rx /usr/local/bin/wait-for + +# Create assets directory +RUN mkdir -p /assets + +# Create workspace +RUN mkdir -p /workspace +WORKDIR /workspace diff --git a/docker/celery.Dockerfile b/docker/celery.Dockerfile new file mode 100644 index 0000000000..e93ca3cf77 --- /dev/null +++ b/docker/celery.Dockerfile @@ -0,0 +1,55 @@ +FROM ghcr.io/ietf-tools/datatracker-app-base:latest +LABEL maintainer="IETF Tools Team " + +ENV DEBIAN_FRONTEND=noninteractive + +# Install needed packages and setup non-root user. +ARG USERNAME=dev +ARG USER_UID=1000 +ARG USER_GID=$USER_UID +COPY docker/scripts/app-setup-debian.sh /tmp/library-scripts/docker-setup-debian.sh +RUN sed -i 's/\r$//' /tmp/library-scripts/docker-setup-debian.sh && chmod +x /tmp/library-scripts/docker-setup-debian.sh + +RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ + # Remove imagemagick due to https://security-tracker.debian.org/tracker/CVE-2019-10131 + && apt-get purge -y imagemagick imagemagick-6-common \ + # Install common packages, non-root user + # Syntax: ./docker-setup-debian.sh [install zsh flag] [username] [user UID] [user GID] [upgrade packages flag] [install Oh My Zsh! flag] [Add non-free packages] + && bash /tmp/library-scripts/docker-setup-debian.sh "true" "${USERNAME}" "${USER_UID}" "${USER_GID}" "false" "true" "true" + +# Setup default python tools in a venv via pipx to avoid conflicts +ENV PIPX_HOME=/usr/local/py-utils \ + PIPX_BIN_DIR=/usr/local/py-utils/bin +ENV PATH=${PATH}:${PIPX_BIN_DIR} +COPY docker/scripts/app-setup-python.sh /tmp/library-scripts/docker-setup-python.sh +RUN sed -i 's/\r$//' /tmp/library-scripts/docker-setup-python.sh && chmod +x /tmp/library-scripts/docker-setup-python.sh +RUN bash /tmp/library-scripts/docker-setup-python.sh "none" "/usr/local" "${PIPX_HOME}" "${USERNAME}" + +# Remove library scripts for final image +RUN rm -rf /tmp/library-scripts + +# Copy the startup file +COPY docker/scripts/app-init-celery.sh /docker-init.sh +RUN sed -i 's/\r$//' /docker-init.sh && \ + chmod +x /docker-init.sh + +ENTRYPOINT [ "/docker-init.sh" ] + +# Fix user UID / GID to match host +RUN groupmod --gid $USER_GID $USERNAME \ + && usermod --uid $USER_UID --gid $USER_GID $USERNAME \ + && chown -R $USER_UID:$USER_GID /home/$USERNAME \ + || exit 0 + +# Switch to local dev user +USER dev:dev + +# Install current datatracker python dependencies +COPY requirements.txt /tmp/pip-tmp/ +RUN pip3 --disable-pip-version-check --no-cache-dir install --user --no-warn-script-location -r /tmp/pip-tmp/requirements.txt +RUN pip3 --disable-pip-version-check --no-cache-dir install --user --no-warn-script-location watchdog[watchmedo] + +RUN sudo rm -rf /tmp/pip-tmp + +VOLUME [ "/assets" ] + diff --git a/docker/cleanall b/docker/cleanall index 91eac1764b..c6104aaef9 100755 --- a/docker/cleanall +++ b/docker/cleanall @@ -1,5 +1,11 @@ #!/bin/bash +if test $(basename $PWD ) != "docker" +then + echo "Run this from the docker directory" 1>&2 + exit 1 +fi + read -p "Stop and remove all containers, volumes and images for this project? [y/N] " -n 1 -r echo if [[ $REPLY =~ ^[Yy]$ ]] @@ -7,6 +13,5 @@ then cd .. echo "Shutting down any instance still running and purge images..." docker compose down -v --rmi all - cd docker echo "Done!" fi diff --git a/docker/cleandb b/docker/cleandb index 322e4639a2..c881503eae 100755 --- a/docker/cleandb +++ b/docker/cleandb @@ -1,5 +1,11 @@ #!/bin/bash +if test $(basename $PWD ) != "docker" +then + echo "Run this from the docker directory" 1>&2 + exit 1 +fi + cd .. echo "Shutting down any instance still running..." docker compose down @@ -9,5 +15,5 @@ docker volume rm -f "${PROJNAME}_postgresdb-data" echo "Rebuilding the DB image..." docker compose pull db docker compose build --no-cache db -cd docker + echo "Done!" diff --git a/docker/configs/nginx-proxy.conf b/docker/configs/nginx-proxy.conf index 3068cc71d7..5a9ae31ad0 100644 --- a/docker/configs/nginx-proxy.conf +++ b/docker/configs/nginx-proxy.conf @@ -4,6 +4,7 @@ server { proxy_read_timeout 1d; proxy_send_timeout 1d; + client_max_body_size 0; # disable checking root /var/www/html; index index.html index.htm index.nginx-debian.html; diff --git a/docker/configs/pgadmin-servers.json b/docker/configs/pgadmin-servers.json index 8b1c181d13..b4458af923 100644 --- a/docker/configs/pgadmin-servers.json +++ b/docker/configs/pgadmin-servers.json @@ -1,22 +1,22 @@ -{ - "Servers": { - "1": { - "Name": "Local Dev", - "Group": "Servers", - "Host": "db", - "Port": 5432, - "MaintenanceDB": "postgres", - "Username": "django", - "UseSSHTunnel": 0, - "TunnelPort": "22", - "TunnelAuthentication": 0, - "KerberosAuthentication": false, - "ConnectionParameters": { - "sslmode": "prefer", - "connect_timeout": 10, - "sslcert": "/.postgresql/postgresql.crt", - "sslkey": "/.postgresql/postgresql.key" - } - } - } -} +{ + "Servers": { + "1": { + "Name": "Local Dev", + "Group": "Servers", + "Host": "db", + "Port": 5432, + "MaintenanceDB": "postgres", + "Username": "django", + "UseSSHTunnel": 0, + "TunnelPort": "22", + "TunnelAuthentication": 0, + "KerberosAuthentication": false, + "ConnectionParameters": { + "sslmode": "prefer", + "connect_timeout": 10, + "sslcert": "/.postgresql/postgresql.crt", + "sslkey": "/.postgresql/postgresql.key" + } + } + } +} diff --git a/docker/configs/settings_local.py b/docker/configs/settings_local.py index 07c16c2e9a..94adc516a4 100644 --- a/docker/configs/settings_local.py +++ b/docker/configs/settings_local.py @@ -1,14 +1,30 @@ -# Copyright The IETF Trust 2007-2019, All Rights Reserved +# Copyright The IETF Trust 2007-2025, All Rights Reserved # -*- coding: utf-8 -*- -from ietf.settings import * # pyflakes:ignore +from ietf.settings import * # pyflakes:ignore +from ietf.settings import ( + ARTIFACT_STORAGE_NAMES, + STORAGES, + BLOBSTORAGE_MAX_ATTEMPTS, + BLOBSTORAGE_READ_TIMEOUT, + BLOBSTORAGE_CONNECT_TIMEOUT, +) ALLOWED_HOSTS = ['*'] -from ietf.settings_postgresqldb import DATABASES # pyflakes:ignore +from ietf.settings_postgresqldb import DATABASES # pyflakes:ignore +DATABASE_ROUTERS = ["ietf.blobdb.routers.BlobdbStorageRouter"] +BLOBDB_DATABASE = "blobdb" +BLOBDB_REPLICATION = { + "ENABLED": True, + "DEST_STORAGE_PATTERN": "r2-{bucket}", + "INCLUDE_BUCKETS": ARTIFACT_STORAGE_NAMES, + "EXCLUDE_BUCKETS": ["staging"], + "VERBOSE_LOGGING": True, +} IDSUBMIT_IDNITS_BINARY = "/usr/local/bin/idnits" -IDSUBMIT_STAGING_PATH = "test/staging/" +IDSUBMIT_STAGING_PATH = "/assets/www6s/staging/" AGENDA_PATH = '/assets/www6s/proceedings/' MEETINGHOST_LOGO_PATH = AGENDA_PATH @@ -26,7 +42,6 @@ SUBMIT_YANG_CATALOG_MODEL_DIR = '/assets/ietf-ftp/yang/catalogmod/' SUBMIT_YANG_DRAFT_MODEL_DIR = '/assets/ietf-ftp/yang/draftmod/' -SUBMIT_YANG_INVAL_MODEL_DIR = '/assets/ietf-ftp/yang/invalmod/' SUBMIT_YANG_IANA_MODEL_DIR = '/assets/ietf-ftp/yang/ianamod/' SUBMIT_YANG_RFC_MODEL_DIR = '/assets/ietf-ftp/yang/rfcmod/' @@ -46,15 +61,62 @@ BOFREQ_PATH = '/assets/ietf-ftp/bofreq/' CONFLICT_REVIEW_PATH = '/assets/ietf-ftp/conflict-reviews/' STATUS_CHANGE_PATH = '/assets/ietf-ftp/status-changes/' -INTERNET_DRAFT_ARCHIVE_DIR = '/assets/archive/id' +INTERNET_DRAFT_ARCHIVE_DIR = '/assets/collection/draft-archive' INTERNET_ALL_DRAFTS_ARCHIVE_DIR = '/assets/archive/id' BIBXML_BASE_PATH = '/assets/ietfdata/derived/bibxml' IDSUBMIT_REPOSITORY_PATH = INTERNET_DRAFT_PATH +FTP_DIR = '/assets/ftp' +NFS_METRICS_TMP_DIR = '/assets/tmp' NOMCOM_PUBLIC_KEYS_DIR = 'data/nomcom_keys/public_keys/' -SLIDE_STAGING_PATH = 'test/staging/' +SLIDE_STAGING_PATH = '/assets/www6s/staging/' DE_GFM_BINARY = '/usr/local/bin/de-gfm' STATIC_IETF_ORG = "/_static" STATIC_IETF_ORG_INTERNAL = "http://static" + + +# Blob replication storage for dev +import botocore.config +for storagename in ARTIFACT_STORAGE_NAMES: + replica_storagename = f"r2-{storagename}" + STORAGES[replica_storagename] = { + "BACKEND": "ietf.doc.storage.MetadataS3Storage", + "OPTIONS": dict( + endpoint_url="http://blobstore:9000", + access_key="minio_root", + secret_key="minio_pass", + security_token=None, + client_config=botocore.config.Config( + request_checksum_calculation="when_required", + response_checksum_validation="when_required", + signature_version="s3v4", + connect_timeout=BLOBSTORAGE_CONNECT_TIMEOUT, + read_timeout=BLOBSTORAGE_READ_TIMEOUT, + retries={"total_max_attempts": BLOBSTORAGE_MAX_ATTEMPTS}, + ), + verify=False, + bucket_name=f"{storagename}", + ), + } + +# For dev on rfc-index generation, create a red_bucket/ directory in the project root +# and uncomment these settings. Generated files will appear in this directory. To +# generate an accurate index, put up-to-date copies of unusable-rfc-numbers.json, +# april-first-rfc-numbers.json, and publication-std-levels.json in this directory +# before generating the index. +# +# STORAGES["red_bucket"] = { +# "BACKEND": "django.core.files.storage.FileSystemStorage", +# "OPTIONS": {"location": "red_bucket"}, +# } + +APP_API_TOKENS = { + "ietf.api.red_api" : ["devtoken", "redtoken"], # Not a real secret + "ietf.api.views_rpc" : ["devtoken"], # Not a real secret +} + +# Errata system api configuration +ERRATA_METADATA_NOTIFICATION_URL = "http://host.docker.internal:8808/api/rfc_metadata_update/" +ERRATA_METADATA_NOTIFICATION_API_KEY = "not a real secret" diff --git a/docker/configs/settings_local_vite.py b/docker/configs/settings_local_vite.py index 7fb12a003d..9116905b12 100644 --- a/docker/configs/settings_local_vite.py +++ b/docker/configs/settings_local_vite.py @@ -2,5 +2,9 @@ # -*- coding: utf-8 -*- from ietf.settings_local import * # pyflakes:ignore +from ietf.settings_local import DJANGO_VITE -DJANGO_VITE_DEV_MODE = True +DJANGO_VITE["default"] |= { + "dev_mode": True, + "dev_server_port": 3000, +} diff --git a/docker/configs/settings_postgresqldb.py b/docker/configs/settings_postgresqldb.py index 05d19b9a86..9b98586658 100644 --- a/docker/configs/settings_postgresqldb.py +++ b/docker/configs/settings_postgresqldb.py @@ -7,4 +7,12 @@ 'USER': 'django', 'PASSWORD': 'RkTkDPFnKpko', }, + 'blobdb': { + 'HOST': 'blobdb', + 'PORT': 5432, + 'NAME': 'blob', + 'ENGINE': 'django.db.backends.postgresql', + 'USER': 'dt', + 'PASSWORD': 'abcd1234', + }, } diff --git a/docker/db.Dockerfile b/docker/db.Dockerfile index 58d7f2728c..48ab298780 100644 --- a/docker/db.Dockerfile +++ b/docker/db.Dockerfile @@ -1,7 +1,7 @@ # ===================== # --- Builder Stage --- # ===================== -FROM postgres:14.6 AS builder +FROM postgres:17 AS builder ENV POSTGRES_PASSWORD=hk2j22sfiv ENV POSTGRES_USER=django @@ -19,7 +19,7 @@ RUN ["/usr/local/bin/docker-entrypoint.sh", "postgres"] # =================== # --- Final Image --- # =================== -FROM postgres:14.6 +FROM postgres:17 LABEL maintainer="IETF Tools Team " COPY --from=builder /data $PGDATA @@ -28,3 +28,10 @@ ENV POSTGRES_PASSWORD=hk2j22sfiv ENV POSTGRES_USER=django ENV POSTGRES_DB=datatracker ENV POSTGRES_HOST_AUTH_METHOD=trust + +# build-args for db dump tagging - exposed in the environment and +# in image metadata +ARG datatracker_dumpinfo_date="" +ENV DATATRACKER_DUMPINFO_DATE=$datatracker_dumpinfo_date +ARG datatracker_snapshot="" +ENV DATATRACKER_SNAPSHOT=$datatracker_snapshot diff --git a/docker/devblobstore.Dockerfile b/docker/devblobstore.Dockerfile new file mode 100644 index 0000000000..40bfbd0e96 --- /dev/null +++ b/docker/devblobstore.Dockerfile @@ -0,0 +1,9 @@ +ARG MINIO_VERSION=latest +FROM quay.io/minio/minio:${MINIO_VERSION} +LABEL maintainer="IETF Tools Team " + +ENV MINIO_ROOT_USER=minio_root +ENV MINIO_ROOT_PASSWORD=minio_pass +ENV MINIO_DEFAULT_BUCKETS=defaultbucket + +CMD ["server", "--console-address", ":9001", "/data"] diff --git a/docker/docker-compose.celery.yml b/docker/docker-compose.celery.yml deleted file mode 100644 index dedae2d004..0000000000 --- a/docker/docker-compose.celery.yml +++ /dev/null @@ -1,51 +0,0 @@ -version: '2.4' -# Use version 2.4 for mem_limit setting. Version 3+ uses deploy.resources.limits.memory -# instead, but that only works for swarm with docker-compose 1.25.1. - -services: - mq: - image: rabbitmq:3-alpine - user: '${RABBITMQ_UID:-499:499}' - hostname: datatracker-mq -# deploy: -# resources: -# limits: -# memory: 1gb # coordinate with settings in rabbitmq.conf -# reservations: -# memory: 512mb - mem_limit: 1gb # coordinate with settings in rabbitmq.conf - ports: - - '${MQ_PORT:-5672}:5672' - volumes: - - ./lib.rabbitmq:/var/lib/rabbitmq - - ./rabbitmq.conf:/etc/rabbitmq/conf.d/90-ietf.conf - - ./definitions.json:/ietf-conf/definitions.json - restart: unless-stopped - logging: - driver: "syslog" - options: - syslog-address: 'unixgram:///dev/log' - tag: 'docker/{{.Name}}' -# syslog-address: "tcp://ietfa.amsl.com:514" - - celery: - image: ghcr.io/ietf-tools/datatracker-celery:latest - environment: - CELERY_APP: ietf - # UPDATE_REQUIREMENTS: 1 # uncomment to update Python requirements on startup - command: - - '--loglevel=INFO' - user: '${CELERY_UID:-499:499}' - volumes: - - '${DATATRACKER_PATH:-..}:/workspace' - - '${MYSQL_SOCKET_PATH:-/run/mysql}:/run/mysql' - depends_on: - - mq - network_mode: 'service:mq' - restart: unless-stopped - logging: - driver: "syslog" - options: - syslog-address: 'unixgram:///dev/log' - tag: 'docker/{{.Name}}' -# syslog-address: "tcp://ietfa.amsl.com:514" diff --git a/docker/docker-compose.extend.yml b/docker/docker-compose.extend.yml index d055c976f4..12ebe447d5 100644 --- a/docker/docker-compose.extend.yml +++ b/docker/docker-compose.extend.yml @@ -1,5 +1,3 @@ -version: '3.8' - services: app: ports: @@ -18,6 +16,10 @@ services: pgadmin: ports: - '5433' + blobstore: + ports: + - '9000:9000' + - '9001:9001' celery: volumes: - .:/workspace diff --git a/docker/scripts/app-configure-blobstore.py b/docker/scripts/app-configure-blobstore.py new file mode 100755 index 0000000000..9ae64e0041 --- /dev/null +++ b/docker/scripts/app-configure-blobstore.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python +# Copyright The IETF Trust 2024, All Rights Reserved + +import boto3 +import botocore.config +import botocore.exceptions +import os +import sys + +from ietf.settings import ARTIFACT_STORAGE_NAMES + + +def init_blobstore(): + blobstore = boto3.resource( + "s3", + endpoint_url=os.environ.get("BLOB_STORE_ENDPOINT_URL", "http://blobstore:9000"), + aws_access_key_id=os.environ.get("BLOB_STORE_ACCESS_KEY", "minio_root"), + aws_secret_access_key=os.environ.get("BLOB_STORE_SECRET_KEY", "minio_pass"), + aws_session_token=None, + config=botocore.config.Config( + request_checksum_calculation="when_required", + response_checksum_validation="when_required", + signature_version="s3v4", + ), + ) + for bucketname in ARTIFACT_STORAGE_NAMES: + adjusted_bucket_name = ( + os.environ.get("BLOB_STORE_BUCKET_PREFIX", "") + + bucketname + + os.environ.get("BLOB_STORE_BUCKET_SUFFIX", "") + ).strip() + try: + blobstore.create_bucket(Bucket=adjusted_bucket_name) + except botocore.exceptions.ClientError as err: + if err.response["Error"]["Code"] == "BucketAlreadyExists": + print(f"Bucket {bucketname} already exists") + else: + print(f"Error creating {bucketname}: {err.response['Error']['Code']}") + else: + print(f"Bucket {bucketname} created") + + +if __name__ == "__main__": + sys.exit(init_blobstore()) diff --git a/docker/scripts/app-create-dirs.sh b/docker/scripts/app-create-dirs.sh index d9296ecffe..3eb328a280 100755 --- a/docker/scripts/app-create-dirs.sh +++ b/docker/scripts/app-create-dirs.sh @@ -1,14 +1,9 @@ #!/bin/bash for sub in \ - test/id \ - test/staging \ - test/archive \ - test/rfc \ - test/media \ - test/wiki/ietf \ - data/nomcom_keys/public_keys \ /assets/archive/id \ + /assets/collection \ + /assets/collection/draft-archive \ /assets/ietf-ftp \ /assets/ietf-ftp/bofreq \ /assets/ietf-ftp/charter \ @@ -25,6 +20,7 @@ for sub in \ /assets/ietfdata/derived \ /assets/ietfdata/derived/bibxml \ /assets/ietfdata/derived/bibxml/bibxml-ids \ + /assets/ietfdata/doc/draft/repository \ /assets/www6s \ /assets/www6s/staging \ /assets/www6s/wg-descriptions \ @@ -33,6 +29,11 @@ for sub in \ /assets/www6/iesg \ /assets/www6/iesg/evaluation \ /assets/media/photo \ + /assets/tmp \ + /assets/ftp \ + /assets/ftp/charter \ + /assets/ftp/internet-drafts \ + /assets/ftp/review \ ; do if [ ! -d "$sub" ]; then echo "Creating dir $sub" diff --git a/dev/celery/docker-init.sh b/docker/scripts/app-init-celery.sh similarity index 78% rename from dev/celery/docker-init.sh rename to docker/scripts/app-init-celery.sh index 4fd1f1294f..17925633d2 100755 --- a/dev/celery/docker-init.sh +++ b/docker/scripts/app-init-celery.sh @@ -49,11 +49,16 @@ if [[ -n "${CELERY_GID}" ]]; then fi run_as_celery_uid () { - SU_OPTS=() - if [[ -n "${CELERY_GROUP}" ]]; then - SU_OPTS+=("-g" "${CELERY_GROUP}") + IAM=$(whoami) + if [ "${IAM}" = "${CELERY_USERNAME:-root}" ]; then + SU_OPTS=() + if [[ -n "${CELERY_GROUP}" ]]; then + SU_OPTS+=("-g" "${CELERY_GROUP}") + fi + su "${SU_OPTS[@]}" "${CELERY_USERNAME:-root}" -s /bin/sh -c "$*" + else + /bin/sh -c "$*" fi - su "${SU_OPTS[@]}" "${CELERY_USERNAME:-root}" -s /bin/sh -c "$@" } log_term_timing_msgs () { @@ -85,19 +90,29 @@ if [[ "${CELERY_ROLE}" == "worker" ]]; then run_as_celery_uid /usr/local/bin/python $WORKSPACEDIR/ietf/manage.py check fi +USER_BIN_PATH="/home/dev/.local/bin" +WATCHMEDO="$USER_BIN_PATH/watchmedo" +# Find a celery that works +if [[ -x "$USER_BIN_PATH/celery" ]]; then + # This branch is used for dev + CELERY="$USER_BIN_PATH/celery" +else + # This branch is used for sandbox instances + CELERY="/usr/local/bin/celery" +fi trap 'trap "" TERM; cleanup' TERM # start celery in the background so we can trap the TERM signal -if [[ -n "${DEV_MODE}" ]]; then - watchmedo auto-restart \ +if [[ -n "${DEV_MODE}" && -x "${WATCHMEDO}" ]]; then + $WATCHMEDO auto-restart \ --patterns '*.py' \ --directory 'ietf' \ --recursive \ --debounce-interval 5 \ -- \ - celery --app="${CELERY_APP:-ietf}" "${CELERY_OPTS[@]}" "$@" & + $CELERY --app="${CELERY_APP:-ietf}" "${CELERY_OPTS[@]}" $@ & celery_pid=$! else - celery --app="${CELERY_APP:-ietf}" "${CELERY_OPTS[@]}" "$@" & + $CELERY --app="${CELERY_APP:-ietf}" "${CELERY_OPTS[@]}" "$@" & celery_pid=$! fi diff --git a/docker/scripts/app-init.sh b/docker/scripts/app-init.sh index 7e58e797ca..1d895cdf53 100755 --- a/docker/scripts/app-init.sh +++ b/docker/scripts/app-init.sh @@ -2,6 +2,12 @@ WORKSPACEDIR="/workspace" +# Handle Linux host mounting the workspace dir as root +if [ ! -O "${WORKSPACEDIR}/ietf" ]; then + sudo chown -R dev:dev $WORKSPACEDIR +fi + +# Start rsyslog service sudo service rsyslog start &>/dev/null # Add /workspace as a safe git directory @@ -18,9 +24,6 @@ sudo chown -R dev:dev "$WORKSPACEDIR/.vite" sudo chown -R dev:dev "$WORKSPACEDIR/.yarn/unplugged" sudo chown dev:dev "/assets" -echo "Fix chromedriver /dev/shm permissions..." -sudo chmod 1777 /dev/shm - # Run nginx echo "Starting nginx..." sudo nginx @@ -70,6 +73,11 @@ echo "Creating data directories..." chmod +x ./docker/scripts/app-create-dirs.sh ./docker/scripts/app-create-dirs.sh +# Configure the development blobstore + +echo "Configuring blobstore..." +PYTHONPATH=/workspace python ./docker/scripts/app-configure-blobstore.py + # Download latest coverage results file echo "Downloading latest coverage results file..." @@ -93,12 +101,14 @@ echo "Running initial checks..." /usr/local/bin/python $WORKSPACEDIR/ietf/manage.py check --settings=settings_local # Migrate, adjusting to what the current state of the underlying database might be: - /usr/local/bin/python $WORKSPACEDIR/ietf/manage.py migrate --fake-initial --settings=settings_local +# Apply migrations to the blobdb database as well (most are skipped) +/usr/local/bin/python $WORKSPACEDIR/ietf/manage.py migrate --settings=settings_local --database=blobdb + if [ -z "$EDITOR_VSCODE" ]; then CODE=0 - python -m smtpd -n -c DebuggingServer localhost:2025 & + python -m aiosmtpd -n -c ietf.utils.aiosmtpd.DevDebuggingHandler -l localhost:2025 & if [ -z "$*" ]; then echo "-----------------------------------------------------------------" echo "Ready!" diff --git a/docker/scripts/app-install-chromedriver.sh b/docker/scripts/app-install-chromedriver.sh deleted file mode 100755 index 43532a1cf6..0000000000 --- a/docker/scripts/app-install-chromedriver.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash - -HOSTARCH=$(arch) -if [ $HOSTARCH == "x86_64" ]; then - echo "Installing chrome driver..." - wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | apt-key add - - echo "deb http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google.list - apt-get update -y - apt-get install -y google-chrome-stable - CHROMEVER=$(google-chrome --product-version | grep -o "[^\.]*\.[^\.]*\.[^\.]*") - DRIVERVER=$(curl -s "https://chromedriver.storage.googleapis.com/LATEST_RELEASE_$CHROMEVER") - wget -q --continue -P /chromedriver "http://chromedriver.storage.googleapis.com/$DRIVERVER/chromedriver_linux64.zip" - unzip /chromedriver/chromedriver* -d /chromedriver - ln -s /chromedriver/chromedriver /usr/local/bin/chromedriver - ln -s /chromedriver/chromedriver /usr/bin/chromedriver -else - echo "This architecture doesn't support chromedriver. Skipping installation..." -fi \ No newline at end of file diff --git a/docker/scripts/app-setup-debian.sh b/docker/scripts/app-setup-debian.sh index ddfc351995..ea9cc3fb87 100644 --- a/docker/scripts/app-setup-debian.sh +++ b/docker/scripts/app-setup-debian.sh @@ -10,7 +10,6 @@ # Syntax: ./common-debian.sh [install zsh flag] [username] [user UID] [user GID] [upgrade packages flag] [install Oh My Zsh! flag] [Add non-free packages] set -e - INSTALL_ZSH=${1:-"true"} USERNAME=${2:-"automatic"} USER_UID=${3:-"automatic"} @@ -116,18 +115,9 @@ if [ "${PACKAGES_ALREADY_INSTALLED}" != "true" ]; then # Needed for adding manpages-posix and manpages-posix-dev which are non-free packages in Debian if [ "${ADD_NON_FREE_PACKAGES}" = "true" ]; then # Bring in variables from /etc/os-release like VERSION_CODENAME - . /etc/os-release - sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list - sed -i -E "s/deb-src http:\/\/(deb|httredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list - sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list - sed -i -E "s/deb-src http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list - sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list - sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list - sed -i "s/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list - sed -i "s/deb-src http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list - # Handle bullseye location for security https://www.debian.org/releases/bullseye/amd64/release-notes/ch-information.en.html - sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main contrib non-free/" /etc/apt/sources.list - sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main contrib non-free/" /etc/apt/sources.list + . /etc/os-release + sed -i -E "s/Components: main/Components: main contrib non-free/" /etc/apt/sources.list.d/debian.sources + echo "Running apt-get update..." apt-get update package_list="${package_list} manpages-posix manpages-posix-dev" diff --git a/ietf/__init__.py b/ietf/__init__.py index 59f9802dec..26124c3c67 100644 --- a/ietf/__init__.py +++ b/ietf/__init__.py @@ -6,7 +6,7 @@ # Version must stay in single quotes for automatic CI replace # Don't add patch number here: -__version__ = '11.0.0-dev' +__version__ = '1.0.0-dev' # Release hash must stay in single quotes for automatic CI replace __release_hash__ = '' @@ -17,6 +17,24 @@ # set this to ".p1", ".p2", etc. after patching __patch__ = "" +if __version__ == '1.0.0-dev' and __release_hash__ == '' and __release_branch__ == '': + import subprocess + branch = subprocess.run( + ["/usr/bin/git", "branch", "--show-current"], + capture_output=True, + ).stdout.decode().strip() + git_hash = subprocess.run( + ["/usr/bin/git", "rev-parse", "head"], + capture_output=True, + ).stdout.decode().strip() + rev = subprocess.run( + ["/usr/bin/git", "describe", "--tags", git_hash], + capture_output=True, + ).stdout.decode().strip().split('-', 1)[0] + __version__ = f"{rev}-dev" + __release_branch__ = branch + __release_hash__ = git_hash + # This will make sure the app is always imported when # Django starts so that shared_task will use this app. diff --git a/ietf/ietfauth/management/__init__.py b/ietf/admin/__init__.py similarity index 100% rename from ietf/ietfauth/management/__init__.py rename to ietf/admin/__init__.py diff --git a/ietf/admin/apps.py b/ietf/admin/apps.py new file mode 100644 index 0000000000..20b762cfec --- /dev/null +++ b/ietf/admin/apps.py @@ -0,0 +1,6 @@ +# Copyright The IETF Trust 2024, All Rights Reserved +from django.contrib.admin import apps as admin_apps + + +class AdminConfig(admin_apps.AdminConfig): + default_site = "ietf.admin.sites.AdminSite" diff --git a/ietf/admin/sites.py b/ietf/admin/sites.py new file mode 100644 index 0000000000..69cb62ae20 --- /dev/null +++ b/ietf/admin/sites.py @@ -0,0 +1,15 @@ +# Copyright The IETF Trust 2024, All Rights Reserved +from django.contrib.admin import AdminSite as _AdminSite +from django.conf import settings +from django.utils.safestring import mark_safe + + +class AdminSite(_AdminSite): + site_title = "Datatracker admin" + + @staticmethod + def site_header(): + if settings.SERVER_MODE == "production": + return "Datatracker administration" + else: + return mark_safe('Datatracker administration δ') diff --git a/ietf/api/__init__.py b/ietf/api/__init__.py index 54b4b7424b..d4562f97dd 100644 --- a/ietf/api/__init__.py +++ b/ietf/api/__init__.py @@ -4,11 +4,14 @@ import datetime import re +import sys from urllib.parse import urlencode -from django.conf import settings +from django.apps import apps as django_apps from django.core.exceptions import ObjectDoesNotExist +from django.utils.module_loading import autodiscover_modules + import debug # pyflakes:ignore @@ -21,38 +24,30 @@ _api_list = [] -for _app in settings.INSTALLED_APPS: +OMITTED_APPS_APIS = ["ietf.status"] + +# Pre-py3.11, fromisoformat() does not handle Z or +HH tz offsets +HAVE_BROKEN_FROMISOFORMAT = sys.version_info < (3, 11, 0, "", 0) + +def populate_api_list(): _module_dict = globals() - if '.' in _app: - _root, _name = _app.split('.', 1) - if _root == 'ietf': - if not '.' in _name: - _api = Api(api_name=_name) - _module_dict[_name] = _api - _api_list.append((_name, _api)) + for app_config in django_apps.get_app_configs(): + if '.' in app_config.name and app_config.name not in OMITTED_APPS_APIS: + _root, _name = app_config.name.split('.', 1) + if _root == 'ietf': + if not '.' in _name: + _api = Api(api_name=_name) + _module_dict[_name] = _api + _api_list.append((_name, _api)) def autodiscover(): """ Auto-discover INSTALLED_APPS resources.py modules and fail silently when - not present. This forces an import on them to register any admin bits they + not present. This forces an import on them to register any resources they may want. """ + autodiscover_modules("resources") - from importlib import import_module - from django.conf import settings - from django.utils.module_loading import module_has_submodule - - for app in settings.INSTALLED_APPS: - mod = import_module(app) - # Attempt to import the app's admin module. - try: - import_module('%s.resources' % (app, )) - except: - # Decide whether to bubble up this error. If the app just - # doesn't have an admin module, we can ignore the error - # attempting to import it, otherwise we want it to bubble up. - if module_has_submodule(mod, "resources"): - raise class ModelResource(tastypie.resources.ModelResource): def generate_cache_key(self, *args, **kwargs): @@ -67,6 +62,35 @@ def generate_cache_key(self, *args, **kwargs): # Use a list plus a ``.join()`` because it's faster than concatenation. return "%s:%s:%s:%s" % (self._meta.api_name, self._meta.resource_name, ':'.join(args), smooshed) + def _z_aware_fromisoformat(self, value: str) -> datetime.datetime: + """datetime.datetime.fromisoformat replacement that works with python < 3.11""" + if HAVE_BROKEN_FROMISOFORMAT: + if value.upper().endswith("Z"): + value = value[:-1] + "+00:00" # Z -> UTC + elif re.match(r"[+-][0-9][0-9]$", value[-3:]): + value = value + ":00" # -04 -> -04:00 + return datetime.datetime.fromisoformat(value) + + def filter_value_to_python( + self, value, field_name, filters, filter_expr, filter_type + ): + py_value = super().filter_value_to_python( + value, field_name, filters, filter_expr, filter_type + ) + if isinstance( + self.fields[field_name], tastypie.fields.DateTimeField + ) and isinstance(py_value, str): + # Ensure datetime values are TZ-aware, using UTC by default + try: + dt = self._z_aware_fromisoformat(py_value) + except ValueError: + pass # let tastypie deal with the original value + else: + if dt.tzinfo is None: + dt = dt.replace(tzinfo=datetime.timezone.utc) + py_value = dt.isoformat() + return py_value + TIMEDELTA_REGEX = re.compile(r'^(?P\d+d)?\s?(?P\d+h)?\s?(?P\d+m)?\s?(?P\d+s?)$') @@ -154,5 +178,26 @@ def dehydrate(self, bundle, for_list=True): class Serializer(tastypie.serializers.Serializer): + OPTION_ESCAPE_NULLS = "datatracker-escape-nulls" + def format_datetime(self, data): - return data.astimezone(datetime.timezone.utc).replace(tzinfo=None).isoformat(timespec="seconds") + "Z" + return data.astimezone(datetime.UTC).replace(tzinfo=None).isoformat(timespec="seconds") + "Z" + + def to_simple(self, data, options): + options = options or {} + simple_data = super().to_simple(data, options) + if ( + options.get(self.OPTION_ESCAPE_NULLS, False) + and isinstance(simple_data, str) + ): + # replace nulls with unicode "symbol for null character", \u2400 + simple_data = simple_data.replace("\x00", "\u2400") + return simple_data + + def to_etree(self, data, options=None, name=None, depth=0): + # lxml does not escape nulls on its own, so ask to_simple() to do it. + # This is mostly (only?) an issue when generating errors responses for + # fuzzers. + options = options or {} + options[self.OPTION_ESCAPE_NULLS] = True + return super().to_etree(data, options, name, depth) diff --git a/ietf/api/__init__.pyi b/ietf/api/__init__.pyi index 63d9bc513b..ededea90a7 100644 --- a/ietf/api/__init__.pyi +++ b/ietf/api/__init__.pyi @@ -30,4 +30,5 @@ class Serializer(): ... class ToOneField(tastypie.fields.ToOneField): ... class TimedeltaField(tastypie.fields.ApiField): ... +def populate_api_list() -> None: ... def autodiscover() -> None: ... diff --git a/ietf/api/apps.py b/ietf/api/apps.py new file mode 100644 index 0000000000..4549e0d7f2 --- /dev/null +++ b/ietf/api/apps.py @@ -0,0 +1,19 @@ +from django.apps import AppConfig +from . import populate_api_list + + +class ApiConfig(AppConfig): + name = "ietf.api" + + def ready(self): + """Hook to do init after the app registry is fully populated + + Importing models or accessing the app registry is ok here, but do not + interact with the database. See + https://docs.djangoproject.com/en/4.2/ref/applications/#django.apps.AppConfig.ready + """ + # Populate our API list now that the app registry is set up + populate_api_list() + + # Import drf-spectacular extensions + import ietf.api.schema # pyflakes: ignore diff --git a/ietf/api/authentication.py b/ietf/api/authentication.py new file mode 100644 index 0000000000..dfab0d72b8 --- /dev/null +++ b/ietf/api/authentication.py @@ -0,0 +1,19 @@ +# Copyright The IETF Trust 2024, All Rights Reserved +# +from rest_framework import authentication +from django.contrib.auth.models import AnonymousUser + + +class ApiKeyAuthentication(authentication.BaseAuthentication): + """API-Key header authentication""" + + def authenticate(self, request): + """Extract the authentication token, if present + + This does not validate the token, it just arranges for it to be available in request.auth. + It's up to a Permissions class to validate it for the appropriate endpoint. + """ + token = request.META.get("HTTP_X_API_KEY", None) + if token is None: + return None + return AnonymousUser(), token # available as request.user and request.auth diff --git a/ietf/api/ietf_utils.py b/ietf/api/ietf_utils.py index 06b9d76aff..50767a5afd 100644 --- a/ietf/api/ietf_utils.py +++ b/ietf/api/ietf_utils.py @@ -2,14 +2,75 @@ # This is not utils.py because Tastypie implicitly consumes ietf.api.utils. # See ietf.api.__init__.py for details. +from functools import wraps +from typing import Callable, Optional, Union from django.conf import settings +from django.http import HttpResponseForbidden + def is_valid_token(endpoint, token): # This is where we would consider integration with vault # Settings implementation for now. if hasattr(settings, "APP_API_TOKENS"): token_store = settings.APP_API_TOKENS - if endpoint in token_store and token in token_store[endpoint]: - return True + if endpoint in token_store: + endpoint_tokens = token_store[endpoint] + # Be sure endpoints is a list or tuple so we don't accidentally use substring matching! + if not isinstance(endpoint_tokens, (list, tuple)): + endpoint_tokens = [endpoint_tokens] + if token in endpoint_tokens: + return True return False + + +def requires_api_token(func_or_endpoint: Optional[Union[Callable, str]] = None): + """Validate API token before executing the wrapped method + + Usage: + * Basic: endpoint defaults to the qualified name of the wrapped method. E.g., in ietf.api.views, + + @requires_api_token + def my_view(request): + ... + + will require a token for "ietf.api.views.my_view" + + * Custom endpoint: specify the endpoint explicitly + + @requires_api_token("ietf.api.views.some_other_thing") + def my_view(request): + ... + + will require a token for "ietf.api.views.some_other_thing" + """ + + def decorate(f): + if _endpoint is None: + fname = getattr(f, "__qualname__", None) + if fname is None: + raise TypeError( + "Cannot automatically decorate function that does not support __qualname__. " + "Explicitly set the endpoint." + ) + endpoint = "{}.{}".format(f.__module__, fname) + else: + endpoint = _endpoint + + @wraps(f) + def wrapped(request, *args, **kwargs): + authtoken = request.META.get("HTTP_X_API_KEY", None) + if authtoken is None or not is_valid_token(endpoint, authtoken): + return HttpResponseForbidden() + return f(request, *args, **kwargs) + + return wrapped + + # Magic to allow decorator to be used with or without parentheses + if callable(func_or_endpoint): + func = func_or_endpoint + _endpoint = None + return decorate(func) + else: + _endpoint = func_or_endpoint + return decorate diff --git a/ietf/api/permissions.py b/ietf/api/permissions.py new file mode 100644 index 0000000000..8f7fdd026f --- /dev/null +++ b/ietf/api/permissions.py @@ -0,0 +1,39 @@ +# Copyright The IETF Trust 2024, All Rights Reserved +# +from rest_framework import permissions +from ietf.api.ietf_utils import is_valid_token + + +class HasApiKey(permissions.BasePermission): + """Permissions class that validates a token using is_valid_token + + The view class must indicate the relevant endpoint by setting `api_key_endpoint`. + Must be used with an Authentication class that puts a token in request.auth. + """ + def has_permission(self, request, view): + endpoint = getattr(view, "api_key_endpoint", None) + auth_token = getattr(request, "auth", None) + if endpoint is not None and auth_token is not None: + return is_valid_token(endpoint, auth_token) + return False + + +class IsOwnPerson(permissions.BasePermission): + """Permission to access own Person object""" + def has_object_permission(self, request, view, obj): + if not (request.user.is_authenticated and hasattr(request.user, "person")): + return False + return obj == request.user.person + + +class BelongsToOwnPerson(permissions.BasePermission): + """Permission to access objects associated with own Person + + Requires that the object have a "person" field that indicates ownership. + """ + def has_object_permission(self, request, view, obj): + if not (request.user.is_authenticated and hasattr(request.user, "person")): + return False + return ( + hasattr(obj, "person") and obj.person == request.user.person + ) diff --git a/ietf/api/routers.py b/ietf/api/routers.py new file mode 100644 index 0000000000..99afdb242a --- /dev/null +++ b/ietf/api/routers.py @@ -0,0 +1,31 @@ +# Copyright The IETF Trust 2024, All Rights Reserved +"""Custom django-rest-framework routers""" +from django.core.exceptions import ImproperlyConfigured +from rest_framework import routers + + +class PrefixedBasenameMixin: + """Mixin to add a prefix to the basename of a rest_framework BaseRouter""" + def __init__(self, name_prefix="", *args, **kwargs): + self.name_prefix = name_prefix + if len(self.name_prefix) == 0 or self.name_prefix[-1] == ".": + raise ImproperlyConfigured("Cannot use a name_prefix that is empty or ends with '.'") + super().__init__(*args, **kwargs) + + def register(self, prefix, viewset, basename=None): + # Get the superclass "register" method from the class this is mixed-in with. + # This avoids typing issues with calling super().register() directly in a + # mixin class. + super_register = getattr(super(), "register") + if not super_register or not callable(super_register): + raise TypeError("Must mixin with superclass that has register() method") + super_register(prefix, viewset, basename=f"{self.name_prefix}.{basename}") + + +class PrefixedSimpleRouter(PrefixedBasenameMixin, routers.SimpleRouter): + """SimpleRouter that adds a dot-separated prefix to its basename""" + + +class PrefixedDefaultRouter(PrefixedBasenameMixin, routers.DefaultRouter): + """DefaultRouter that adds a dot-separated prefix to its basename""" + diff --git a/ietf/api/schema.py b/ietf/api/schema.py new file mode 100644 index 0000000000..7340149685 --- /dev/null +++ b/ietf/api/schema.py @@ -0,0 +1,20 @@ +# Copyright The IETF Trust 2024, All Rights Reserved +# +from drf_spectacular.extensions import OpenApiAuthenticationExtension + + +class ApiKeyAuthenticationScheme(OpenApiAuthenticationExtension): + """Authentication scheme extension for the ApiKeyAuthentication + + Used by drf-spectacular when rendering the OpenAPI schema + """ + target_class = "ietf.api.authentication.ApiKeyAuthentication" + name = "apiKeyAuth" + + def get_security_definition(self, auto_schema): + return { + "type": "apiKey", + "description": "Shared secret in the X-Api-Key header", + "name": "X-Api-Key", + "in": "header", + } diff --git a/ietf/api/serializer.py b/ietf/api/serializer.py index 27f194c5b5..d5bca430e0 100644 --- a/ietf/api/serializer.py +++ b/ietf/api/serializer.py @@ -1,6 +1,9 @@ -# Copyright The IETF Trust 2018-2020, All Rights Reserved +# Copyright The IETF Trust 2018-2024, All Rights Reserved # -*- coding: utf-8 -*- +"""Serialization utilities +This is _not_ for django-rest-framework! +""" import hashlib import json diff --git a/ietf/api/serializers_rpc.py b/ietf/api/serializers_rpc.py new file mode 100644 index 0000000000..d888de4586 --- /dev/null +++ b/ietf/api/serializers_rpc.py @@ -0,0 +1,804 @@ +# Copyright The IETF Trust 2025-2026, All Rights Reserved +import datetime +from pathlib import Path +from typing import Literal, Optional + +from django.db import transaction +from django.urls import reverse as urlreverse +from django.utils import timezone +from drf_spectacular.types import OpenApiTypes +from drf_spectacular.utils import extend_schema_field +from rest_framework import serializers + +from ietf.doc.expire import move_draft_files_to_archive +from ietf.doc.models import ( + DocumentAuthor, + Document, + RelatedDocument, + State, + DocEvent, + RfcAuthor, +) +from ietf.doc.serializers import RfcAuthorSerializer +from ietf.doc.tasks import trigger_red_precomputer_task, update_rfc_searchindex_task +from ietf.doc.utils import ( + default_consensus, + prettify_std_name, + update_action_holders, + update_rfcauthors, +) +from ietf.group.models import Group, Role +from ietf.group.serializers import AreaSerializer +from ietf.name.models import StreamName, StdLevelName +from ietf.person.models import Person +from ietf.utils import log + + +class PersonSerializer(serializers.ModelSerializer): + email = serializers.EmailField(read_only=True) + picture = serializers.URLField(source="cdn_photo_url", read_only=True) + url = serializers.SerializerMethodField( + help_text="relative URL for datatracker person page" + ) + + class Meta: + model = Person + fields = ["id", "plain_name", "email", "picture", "url"] + read_only_fields = ["id", "plain_name", "email", "picture", "url"] + + @extend_schema_field(OpenApiTypes.URI) + def get_url(self, object: Person): + return urlreverse( + "ietf.person.views.profile", + kwargs={"email_or_name": object.email_address() or object.name}, + ) + + +class EmailPersonSerializer(serializers.Serializer): + email = serializers.EmailField(source="address") + person_pk = serializers.IntegerField(source="person.pk") + name = serializers.CharField(source="person.name") + last_name = serializers.CharField(source="person.last_name") + initials = serializers.CharField(source="person.initials") + + +class LowerCaseEmailField(serializers.EmailField): + def to_representation(self, value): + return super().to_representation(value).lower() + + +class AuthorPersonSerializer(serializers.ModelSerializer): + person_pk = serializers.IntegerField(source="pk", read_only=True) + last_name = serializers.CharField() + initials = serializers.CharField() + email_addresses = serializers.ListField( + source="email_set.all", child=LowerCaseEmailField() + ) + + class Meta: + model = Person + fields = ["person_pk", "name", "last_name", "initials", "email_addresses"] + + +class RfcWithAuthorsSerializer(serializers.ModelSerializer): + authors = AuthorPersonSerializer(many=True, source="author_persons") + + class Meta: + model = Document + fields = ["rfc_number", "authors"] + + +class DraftWithAuthorsSerializer(serializers.ModelSerializer): + draft_name = serializers.CharField(source="name") + authors = AuthorPersonSerializer(many=True, source="author_persons") + + class Meta: + model = Document + fields = ["draft_name", "authors"] + + +class WgChairSerializer(serializers.Serializer): + """Serialize a WG chair's name and email from a Role""" + + name = serializers.SerializerMethodField() + email = serializers.SerializerMethodField() + + @extend_schema_field(serializers.CharField) + def get_name(self, role: Role) -> str: + return role.person.plain_name() + + @extend_schema_field(serializers.EmailField) + def get_email(self, role: Role) -> str: + return role.email.email_address() + + +class DocumentAuthorSerializer(serializers.ModelSerializer): + """Serializer for a Person in a response""" + + plain_name = serializers.SerializerMethodField() + + class Meta: + model = DocumentAuthor + fields = ["person", "plain_name", "affiliation"] + + def get_plain_name(self, document_author: DocumentAuthor) -> str: + return document_author.person.plain_name() + + +class FullDraftSerializer(serializers.ModelSerializer): + # Redefine these fields so they don't pick up the regex validator patterns. + # There seem to be some non-compliant drafts in the system! If this serializer + # is used for a writeable view, the validation will need to be added back. + name = serializers.CharField(max_length=255) + title = serializers.CharField(max_length=255) + group = serializers.SlugRelatedField(slug_field="acronym", read_only=True) + area = AreaSerializer(read_only=True) + + # Other fields we need to add / adjust + source_format = serializers.SerializerMethodField() + authors = DocumentAuthorSerializer(many=True, source="documentauthor_set") + shepherd = serializers.PrimaryKeyRelatedField( + source="shepherd.person", read_only=True + ) + consensus = serializers.SerializerMethodField() + wg_chairs = serializers.SerializerMethodField() + + class Meta: + model = Document + fields = [ + "id", + "name", + "rev", + "stream", + "title", + "group", + "area", + "abstract", + "pages", + "source_format", + "authors", + "intended_std_level", + "consensus", + "shepherd", + "ad", + "wg_chairs", + ] + + def get_consensus(self, doc: Document) -> Optional[bool]: + return default_consensus(doc) + + @extend_schema_field(WgChairSerializer(many=True)) + def get_wg_chairs(self, doc: Document): + if doc.group is None: + return [] + chairs = doc.group.role_set.filter(name_id="chair").select_related( + "person", "email" + ) + return WgChairSerializer(chairs, many=True).data + + def get_source_format( + self, doc: Document + ) -> Literal["unknown", "xml-v2", "xml-v3", "txt"]: + submission = doc.submission() + if submission is None: + return "unknown" + if ".xml" in submission.file_types: + if submission.xml_version == "3": + return "xml-v3" + else: + return "xml-v2" + elif ".txt" in submission.file_types: + return "txt" + return "unknown" + + +class DraftSerializer(FullDraftSerializer): + class Meta: + model = Document + fields = [ + "id", + "name", + "rev", + "stream", + "title", + "group", + "pages", + "source_format", + "authors", + "consensus", + ] + + +class SubmittedToQueueSerializer(FullDraftSerializer): + submitted = serializers.SerializerMethodField() + consensus = serializers.SerializerMethodField() + + class Meta: + model = Document + fields = [ + "id", + "name", + "stream", + "submitted", + "consensus", + ] + + def get_submitted(self, doc) -> Optional[datetime.datetime]: + event = doc.sent_to_rfc_editor_event() + return None if event is None else event.time + + def get_consensus(self, doc) -> Optional[bool]: + return default_consensus(doc) + + +class OriginalStreamSerializer(serializers.ModelSerializer): + stream = serializers.CharField(read_only=True, source="orig_stream_id") + + class Meta: + model = Document + fields = ["rfc_number", "stream"] + + +class ReferenceSerializer(serializers.ModelSerializer): + class Meta: + model = Document + fields = ["id", "name"] + read_only_fields = ["id", "name"] + + +def _update_authors(rfc, authors_data): + # Construct unsaved instances from validated author data + new_authors = [RfcAuthor(**authdata) for authdata in authors_data] + # Update the RFC with the new author set + with transaction.atomic(): + change_events = update_rfcauthors(rfc, new_authors) + for event in change_events: + event.save() + return change_events + + +class SubseriesNameField(serializers.RegexField): + + def __init__(self, **kwargs): + # pattern: no leading 0, finite length (arbitrarily set to 5 digits) + regex = r"^(bcp|std|fyi)[1-9][0-9]{0,4}$" + super().__init__(regex, **kwargs) + + + +class RfcPubSerializer(serializers.ModelSerializer): + """Write-only serializer for RFC publication""" + # publication-related fields + published = serializers.DateTimeField(default_timezone=datetime.timezone.utc) + draft_name = serializers.RegexField( + required=False, regex=r"^draft-[a-zA-Z0-9-]+$" + ) + draft_rev = serializers.RegexField( + required=False, regex=r"^[0-9][0-9]$" + ) + + # fields on the RFC Document that need tweaking from ModelSerializer defaults + rfc_number = serializers.IntegerField(min_value=1, required=True) + group = serializers.SlugRelatedField( + slug_field="acronym", queryset=Group.objects.all(), required=False + ) + stream = serializers.PrimaryKeyRelatedField( + queryset=StreamName.objects.filter(used=True) + ) + std_level = serializers.PrimaryKeyRelatedField( + queryset=StdLevelName.objects.filter(used=True), + ) + ad = serializers.PrimaryKeyRelatedField( + queryset=Person.objects.all(), + allow_null=True, + required=False, + ) + obsoletes = serializers.SlugRelatedField( + many=True, + required=False, + slug_field="rfc_number", + queryset=Document.objects.filter(type_id="rfc"), + ) + updates = serializers.SlugRelatedField( + many=True, + required=False, + slug_field="rfc_number", + queryset=Document.objects.filter(type_id="rfc"), + ) + subseries = serializers.ListField(child=SubseriesNameField(required=False)) + # N.b., authors is _not_ a field on Document! + authors = RfcAuthorSerializer(many=True) + + class Meta: + model = Document + fields = [ + "published", + "draft_name", + "draft_rev", + "rfc_number", + "title", + "authors", + "group", + "stream", + "abstract", + "pages", + "std_level", + "ad", + "obsoletes", + "updates", + "subseries", + "keywords", + ] + + def validate(self, data): + if "draft_name" in data or "draft_rev" in data: + if "draft_name" not in data: + raise serializers.ValidationError( + {"draft_name": "Missing draft_name"}, + code="invalid-draft-spec", + ) + if "draft_rev" not in data: + raise serializers.ValidationError( + {"draft_rev": "Missing draft_rev"}, + code="invalid-draft-spec", + ) + return data + + def update(self, instance, validated_data): + raise RuntimeError("Cannot update with this serializer") + + def create(self, validated_data): + """Publish an RFC""" + published = validated_data.pop("published") + draft_name = validated_data.pop("draft_name", None) + draft_rev = validated_data.pop("draft_rev", None) + obsoletes = validated_data.pop("obsoletes", []) + updates = validated_data.pop("updates", []) + subseries = validated_data.pop("subseries", []) + + system_person = Person.objects.get(name="(System)") + + # If specified, retrieve draft and extract RFC default values from it + if draft_name is None: + draft = None + else: + # validation enforces that draft_name and draft_rev are both present + draft = Document.objects.filter( + type_id="draft", + name=draft_name, + rev=draft_rev, + ).first() + if draft is None: + raise serializers.ValidationError( + { + "draft_name": "No such draft", + "draft_rev": "No such draft", + }, + code="invalid-draft" + ) + elif draft.get_state_slug() == "rfc": + raise serializers.ValidationError( + { + "draft_name": "Draft already published as RFC", + }, + code="already-published-draft", + ) + + # Transaction to clean up if something fails + with transaction.atomic(): + # create rfc, letting validated request data override draft defaults + rfc = self._create_rfc(validated_data) + DocEvent.objects.create( + doc=rfc, + rev=rfc.rev, + type="published_rfc", + time=published, + by=system_person, + desc="RFC published", + ) + rfc.set_state(State.objects.get(used=True, type_id="rfc", slug="published")) + + # create updates / obsoletes relations + for obsoleted_rfc_pk in obsoletes: + RelatedDocument.objects.get_or_create( + source=rfc, target=obsoleted_rfc_pk, relationship_id="obs" + ) + for updated_rfc_pk in updates: + RelatedDocument.objects.get_or_create( + source=rfc, target=updated_rfc_pk, relationship_id="updates" + ) + + # create subseries relations + for subseries_doc_name in subseries: + ss_slug = subseries_doc_name[:3] + subseries_doc, ss_doc_created = Document.objects.get_or_create( + type_id=ss_slug, name=subseries_doc_name + ) + if ss_doc_created: + subseries_doc.docevent_set.create( + type=f"{ss_slug}_doc_created", + by=system_person, + desc=f"Created {subseries_doc_name} via publication of {rfc.name}", + ) + _, ss_rel_created = subseries_doc.relateddocument_set.get_or_create( + relationship_id="contains", target=rfc + ) + if ss_rel_created: + subseries_doc.docevent_set.create( + type="sync_from_rfc_editor", + by=system_person, + desc=f"Added {rfc.name} to {subseries_doc.name}", + ) + rfc.docevent_set.create( + type="sync_from_rfc_editor", + by=system_person, + desc=f"Added {rfc.name} to {subseries_doc.name}", + ) + + + # create relation with draft and update draft state + if draft is not None: + draft_changes = [] + draft_events = [] + if draft.get_state_slug() != "rfc": + draft.set_state( + State.objects.get(used=True, type="draft", slug="rfc") + ) + move_draft_files_to_archive(draft, draft.rev) + draft_changes.append(f"changed state to {draft.get_state()}") + + r, created_relateddoc = RelatedDocument.objects.get_or_create( + source=draft, target=rfc, relationship_id="became_rfc", + ) + if created_relateddoc: + change = "created {rel_name} relationship between {pretty_draft_name} and {pretty_rfc_name}".format( + rel_name=r.relationship.name.lower(), + pretty_draft_name=prettify_std_name(draft_name), + pretty_rfc_name=prettify_std_name(rfc.name), + ) + draft_changes.append(change) + + # Always set the "draft-iesg" state. This state should be set for all drafts, so + # log a warning if it is not set. What should happen here is that ietf stream + # RFCs come in as "rfcqueue" and are set to "pub" when they appear in the RFC index. + # Other stream documents should normally be "idexists" and be left that way. The + # code here *actually* leaves "draft-iesg" state alone if it is "idexists" or "pub", + # and changes any other state to "pub". If unset, it changes it to "idexists". + # This reflects historical behavior and should probably be updated, but a migration + # of existing drafts (and validation of the change) is needed before we change the + # handling. + prev_iesg_state = draft.get_state("draft-iesg") + if prev_iesg_state is None: + log.log(f'Warning while processing {rfc.name}: {draft.name} has no "draft-iesg" state') + new_iesg_state = State.objects.get(type_id="draft-iesg", slug="idexists") + elif prev_iesg_state.slug not in ("pub", "idexists"): + if prev_iesg_state.slug != "rfcqueue": + log.log( + 'Warning while processing {}: {} is in "draft-iesg" state {} (expected "rfcqueue")'.format( + rfc.name, draft.name, prev_iesg_state.slug + ) + ) + new_iesg_state = State.objects.get(type_id="draft-iesg", slug="pub") + else: + new_iesg_state = prev_iesg_state + + if new_iesg_state != prev_iesg_state: + draft.set_state(new_iesg_state) + draft_changes.append(f"changed {new_iesg_state.type.label} to {new_iesg_state}") + e = update_action_holders(draft, prev_iesg_state, new_iesg_state) + if e: + draft_events.append(e) + + # If the draft and RFC streams agree, move draft to "pub" stream state. If not, complain. + if draft.stream != rfc.stream: + log.log("Warning while processing {}: draft {} stream is {} but RFC stream is {}".format( + rfc.name, draft.name, draft.stream, rfc.stream + )) + elif draft.stream.slug in ["iab", "irtf", "ise", "editorial"]: + stream_slug = f"draft-stream-{draft.stream.slug}" + prev_state = draft.get_state(stream_slug) + if prev_state is not None and prev_state.slug != "pub": + new_state = State.objects.select_related("type").get(used=True, type__slug=stream_slug, slug="pub") + draft.set_state(new_state) + draft_changes.append( + f"changed {new_state.type.label} to {new_state}" + ) + e = update_action_holders(draft, prev_state, new_state) + if e: + draft_events.append(e) + if draft_changes: + draft_events.append( + DocEvent.objects.create( + doc=draft, + rev=draft.rev, + by=system_person, + type="sync_from_rfc_editor", + desc=f"Updated while publishing {rfc.name} ({', '.join(draft_changes)})", + ) + ) + draft.save_with_history(draft_events) + + return rfc + + def _create_rfc(self, validated_data): + authors_data = validated_data.pop("authors") + rfc = Document.objects.create( + type_id="rfc", + name=f"rfc{validated_data['rfc_number']}", + **validated_data, + ) + for order, author_data in enumerate(authors_data): + rfc.rfcauthor_set.create( + order=order, + **author_data, + ) + return rfc + + +class EditableRfcSerializer(serializers.ModelSerializer): + # Would be nice to reconcile this with ietf.doc.serializers.RfcSerializer. + # The purposes of that serializer (representing data for Red) and this one + # (accepting updates from Purple) are different enough that separate formats + # may be needed, but if not it'd be nice to have a single RfcSerializer that + # can serve both. + # + # Should also consider whether this and RfcPubSerializer should merge. + # + # Treats published and subseries fields as write-only. This isn't quite correct, + # but makes it easier and we don't currently use the serialized value except for + # debugging. + published = serializers.DateTimeField( + default_timezone=datetime.timezone.utc, + write_only=True, + ) + authors = RfcAuthorSerializer(many=True, min_length=1, source="rfcauthor_set") + subseries = serializers.ListField( + child=SubseriesNameField(required=False), + write_only=True, + ) + + class Meta: + model = Document + fields = [ + "published", + "title", + "authors", + "stream", + "abstract", + "pages", + "std_level", + "subseries", + "keywords", + ] + + def create(self, validated_data): + raise RuntimeError("Cannot create with this serializer") + + def update(self, instance, validated_data): + assert isinstance(instance, Document) + assert instance.type_id == "rfc" + rfc = instance # get better name + + system_person = Person.objects.get(name="(System)") + + # Remove data that needs special handling. Use a singleton object to detect + # missing values in case we ever support a value that needs None as an option. + omitted = object() + published = validated_data.pop("published", omitted) + subseries = validated_data.pop("subseries", omitted) + authors_data = validated_data.pop("rfcauthor_set", omitted) + + # Transaction to clean up if something fails + with transaction.atomic(): + # update the rfc Document itself + rfc_changes = [] + rfc_events = [] + + for attr, new_value in validated_data.items(): + old_value = getattr(rfc, attr) + if new_value != old_value: + rfc_changes.append( + f"changed {attr} to '{new_value}' from '{old_value}'" + ) + setattr(rfc, attr, new_value) + if len(rfc_changes) > 0: + rfc_change_summary = f"{', '.join(rfc_changes)}" + rfc_events.append( + DocEvent.objects.create( + doc=rfc, + rev=rfc.rev, + by=system_person, + type="sync_from_rfc_editor", + desc=f"Changed metadata: {rfc_change_summary}", + ) + ) + if authors_data is not omitted: + rfc_events.extend(_update_authors(instance, authors_data)) + + if published is not omitted: + published_event = rfc.latest_event(type="published_rfc") + if published_event is None: + # unexpected, but possible in theory + rfc_events.append( + DocEvent.objects.create( + doc=rfc, + rev=rfc.rev, + type="published_rfc", + time=published, + by=system_person, + desc="RFC published", + ) + ) + rfc_events.append( + DocEvent.objects.create( + doc=rfc, + rev=rfc.rev, + type="sync_from_rfc_editor", + by=system_person, + desc=( + f"Set publication timestamp to {published.isoformat()}" + ), + ) + ) + else: + original_pub_time = published_event.time + if published != original_pub_time: + published_event.time = published + published_event.save() + rfc_events.append( + DocEvent.objects.create( + doc=rfc, + rev=rfc.rev, + type="sync_from_rfc_editor", + by=system_person, + desc=( + f"Changed publication time to " + f"{published.isoformat()} from " + f"{original_pub_time.isoformat()}" + ) + ) + ) + + # update subseries relations + if subseries is not omitted: + for subseries_doc_name in subseries: + ss_slug = subseries_doc_name[:3] + subseries_doc, ss_doc_created = Document.objects.get_or_create( + type_id=ss_slug, name=subseries_doc_name + ) + if ss_doc_created: + subseries_doc.docevent_set.create( + type=f"{ss_slug}_doc_created", + by=system_person, + desc=f"Created {subseries_doc_name} via update of {rfc.name}", + ) + _, ss_rel_created = subseries_doc.relateddocument_set.get_or_create( + relationship_id="contains", target=rfc + ) + if ss_rel_created: + subseries_doc.docevent_set.create( + type="sync_from_rfc_editor", + by=system_person, + desc=f"Added {rfc.name} to {subseries_doc.name}", + ) + rfc_events.append( + rfc.docevent_set.create( + type="sync_from_rfc_editor", + by=system_person, + desc=f"Added {rfc.name} to {subseries_doc.name}", + ) + ) + # Delete subseries relations that are no longer current + stale_subseries_relations = rfc.relations_that("contains").exclude( + source__name__in=subseries + ) + for stale_relation in stale_subseries_relations: + stale_subseries_doc = stale_relation.source + rfc_events.append( + rfc.docevent_set.create( + type="sync_from_rfc_editor", + by=system_person, + desc=f"Removed {rfc.name} from {stale_subseries_doc.name}", + ) + ) + stale_subseries_doc.docevent_set.create( + type="sync_from_rfc_editor", + by=system_person, + desc=f"Removed {rfc.name} from {stale_subseries_doc.name}", + ) + stale_subseries_relations.delete() + if len(rfc_events) > 0: + rfc.save_with_history(rfc_events) + # Gather obs and updates in both directions as a title/author change to + # this doc affects the info rendering of all of the other RFCs + needs_updating = sorted( + [ + d.rfc_number + for d in [rfc] + + rfc.related_that_doc(("obs", "updates")) + + rfc.related_that(("obs", "updates")) + ] + ) + trigger_red_precomputer_task.delay(rfc_number_list=needs_updating) + # Update the search index also + update_rfc_searchindex_task.delay(rfc.rfc_number) + return rfc + + +class RfcFileSerializer(serializers.Serializer): + # The structure of this serializer is constrained by what openapi-generator-cli's + # python generator can correctly serialize as multipart/form-data. It does not + # handle nested serializers well (or perhaps at all). ListFields with child + # ChoiceField or RegexField do not serialize correctly. DictFields don't seem + # to work. + # + # It does seem to correctly send filenames along with FileFields, even as a child + # in a ListField, so we use that to convey the file format of each item. There + # are other options we could consider (e.g., a structured CharField) but this + # works. + allowed_extensions = ( + ".html", + ".json", + ".notprepped.xml", + ".pdf", + ".txt", + ".xml", + ) + + rfc = serializers.SlugRelatedField( + slug_field="rfc_number", + queryset=Document.objects.filter(type_id="rfc"), + help_text="RFC number to which the contents belong", + ) + contents = serializers.ListField( + child=serializers.FileField( + allow_empty_file=False, + use_url=False, + ), + help_text=( + "List of content files. Filename extensions are used to identify " + "file types, but filenames are otherwise ignored." + ), + ) + mtime = serializers.DateTimeField( + required=False, + default=timezone.now, + default_timezone=datetime.UTC, + help_text="Modification timestamp to apply to uploaded files", + ) + replace = serializers.BooleanField( + required=False, + default=False, + help_text=( + "Replace existing files for this RFC. Defaults to false. When false, " + "if _any_ files already exist for the specified RFC the upload will be " + "rejected regardless of which files are being uploaded. When true," + "existing files will be removed and new ones will be put in place. BE" + "VERY CAREFUL WITH THIS OPTION IN PRODUCTION." + ), + ) + + def validate_contents(self, data): + found_extensions = [] + for uploaded_file in data: + if not hasattr(uploaded_file, "name"): + raise serializers.ValidationError( + "filename not specified for uploaded file", + code="missing-filename", + ) + ext = "".join(Path(uploaded_file.name).suffixes) + if ext not in self.allowed_extensions: + raise serializers.ValidationError( + f"File uploaded with invalid extension '{ext}'", + code="invalid-filename-ext", + ) + if ext in found_extensions: + raise serializers.ValidationError( + f"More than one file uploaded with extension '{ext}'", + code="duplicate-filename-ext", + ) + return data + + +class NotificationAckSerializer(serializers.Serializer): + message = serializers.CharField(default="ack") diff --git a/ietf/api/tests.py b/ietf/api/tests.py index 24d76a6a96..2a44791a5c 100644 --- a/ietf/api/tests.py +++ b/ietf/api/tests.py @@ -1,18 +1,22 @@ -# Copyright The IETF Trust 2015-2020, All Rights Reserved +# Copyright The IETF Trust 2015-2024, All Rights Reserved # -*- coding: utf-8 -*- - +import base64 +import copy import datetime import json import html +from unittest import mock import os import sys from importlib import import_module from pathlib import Path +from random import randrange from django.apps import apps from django.conf import settings -from django.test import Client +from django.http import HttpResponseForbidden +from django.test import Client, RequestFactory from django.test.utils import override_settings from django.urls import reverse as urlreverse from django.utils import timezone @@ -22,24 +26,31 @@ import debug # pyflakes:ignore import ietf +from ietf.doc.storage_utils import retrieve_str from ietf.doc.utils import get_unicode_document_content from ietf.doc.models import RelatedDocument, State -from ietf.doc.factories import IndividualDraftFactory, WgDraftFactory +from ietf.doc.factories import IndividualDraftFactory, WgDraftFactory, WgRfcFactory from ietf.group.factories import RoleFactory from ietf.meeting.factories import MeetingFactory, SessionFactory -from ietf.meeting.models import Session -from ietf.person.factories import PersonFactory, random_faker -from ietf.person.models import User -from ietf.person.models import PersonalApiKey -from ietf.stats.models import MeetingRegistration -from ietf.utils.mail import outbox, get_payload_text +from ietf.meeting.models import Session, Registration +from ietf.nomcom.models import Volunteer +from ietf.nomcom.factories import NomComFactory, nomcom_kwargs_for_year +from ietf.person.factories import PersonFactory, random_faker, EmailFactory, PersonalApiKeyFactory +from ietf.person.models import Email, User +from ietf.utils.mail import empty_outbox, outbox, get_payload_text from ietf.utils.models import DumpInfo from ietf.utils.test_utils import TestCase, login_testing_unauthorized, reload_db_objects +from . import Serializer +from .ietf_utils import is_valid_token, requires_api_token +from .views import EmailIngestionError + OMITTED_APPS = ( 'ietf.secr.meetings', 'ietf.secr.proceedings', 'ietf.ipr', + 'ietf.status', + 'ietf.blobdb', ) class CustomApiTests(TestCase): @@ -62,7 +73,7 @@ def test_deprecated_api_set_session_video_url(self): meeting = MeetingFactory(type_id='ietf') session = SessionFactory(group__type_id='wg', meeting=meeting) group = session.group - apikey = PersonalApiKey.objects.create(endpoint=url, person=recman) + apikey = PersonalApiKeyFactory(endpoint=url, person=recman) video = 'https://foo.example.com/bar/beer/' # error cases @@ -70,7 +81,7 @@ def test_deprecated_api_set_session_video_url(self): self.assertContains(r, "Missing apikey parameter", status_code=400) badrole = RoleFactory(group__type_id='ietf', name_id='ad') - badapikey = PersonalApiKey.objects.create(endpoint=url, person=badrole.person) + badapikey = PersonalApiKeyFactory(endpoint=url, person=badrole.person) badrole.person.user.last_login = timezone.now() badrole.person.user.save() r = self.client.post(url, {'apikey': badapikey.hash()} ) @@ -142,7 +153,7 @@ def test_api_set_session_video_url(self): recman = recmanrole.person meeting = MeetingFactory(type_id="ietf") session = SessionFactory(group__type_id="wg", meeting=meeting) - apikey = PersonalApiKey.objects.create(endpoint=url, person=recman) + apikey = PersonalApiKeyFactory(endpoint=url, person=recman) video = "https://foo.example.com/bar/beer/" # error cases @@ -150,7 +161,7 @@ def test_api_set_session_video_url(self): self.assertContains(r, "Missing apikey parameter", status_code=400) badrole = RoleFactory(group__type_id="ietf", name_id="ad") - badapikey = PersonalApiKey.objects.create(endpoint=url, person=badrole.person) + badapikey = PersonalApiKeyFactory(endpoint=url, person=badrole.person) badrole.person.user.last_login = timezone.now() badrole.person.user.save() r = self.client.post(url, {"apikey": badapikey.hash()}) @@ -213,17 +224,83 @@ def test_api_set_session_video_url(self): event = doc.latest_event() self.assertEqual(event.by, recman) - def test_api_add_session_attendees(self): + def test_api_set_meetecho_recording_name(self): + url = urlreverse("ietf.meeting.views.api_set_meetecho_recording_name") + recmanrole = RoleFactory(group__type_id="ietf", name_id="recman") + recman = recmanrole.person + meeting = MeetingFactory(type_id="ietf") + session = SessionFactory(group__type_id="wg", meeting=meeting) + apikey = PersonalApiKeyFactory(endpoint=url, person=recman) + name = "testname" + + # error cases + r = self.client.post(url, {}) + self.assertContains(r, "Missing apikey parameter", status_code=400) + + badrole = RoleFactory(group__type_id="ietf", name_id="ad") + badapikey = PersonalApiKeyFactory(endpoint=url, person=badrole.person) + badrole.person.user.last_login = timezone.now() + badrole.person.user.save() + r = self.client.post(url, {"apikey": badapikey.hash()}) + self.assertContains(r, "Restricted to role: Recording Manager", status_code=403) + + r = self.client.post(url, {"apikey": apikey.hash()}) + self.assertContains(r, "Too long since last regular login", status_code=400) + recman.user.last_login = timezone.now() + recman.user.save() + + r = self.client.get(url, {"apikey": apikey.hash()}) + self.assertContains(r, "Method not allowed", status_code=405) + + r = self.client.post(url, {"apikey": apikey.hash()}) + self.assertContains(r, "Missing session_id parameter", status_code=400) + + r = self.client.post(url, {"apikey": apikey.hash(), "session_id": session.pk}) + self.assertContains(r, "Missing name parameter", status_code=400) + + bad_pk = int(Session.objects.order_by("-pk").first().pk) + 1 + r = self.client.post( + url, + { + "apikey": apikey.hash(), + "session_id": bad_pk, + "name": name, + }, + ) + self.assertContains(r, "Session not found", status_code=400) + + r = self.client.post( + url, + { + "apikey": apikey.hash(), + "session_id": "foo", + "name": name, + }, + ) + self.assertContains(r, "Invalid session_id", status_code=400) + + r = self.client.post( + url, {"apikey": apikey.hash(), "session_id": session.pk, "name": name} + ) + self.assertContains(r, "Done", status_code=200) + + session.refresh_from_db() + self.assertEqual(session.meetecho_recording_name, name) + + + def test_api_add_session_attendees_deprecated(self): + # Deprecated test - should be removed when we stop accepting a simple list of user PKs in + # the add_session_attendees() view url = urlreverse('ietf.meeting.views.api_add_session_attendees') otherperson = PersonFactory() recmanrole = RoleFactory(group__type_id='ietf', name_id='recman') recman = recmanrole.person meeting = MeetingFactory(type_id='ietf') session = SessionFactory(group__type_id='wg', meeting=meeting) - apikey = PersonalApiKey.objects.create(endpoint=url, person=recman) + apikey = PersonalApiKeyFactory(endpoint=url, person=recman) badrole = RoleFactory(group__type_id='ietf', name_id='ad') - badapikey = PersonalApiKey.objects.create(endpoint=url, person=badrole.person) + badapikey = PersonalApiKeyFactory(endpoint=url, person=badrole.person) badrole.person.user.last_login = timezone.now() badrole.person.user.save() @@ -279,6 +356,120 @@ def test_api_add_session_attendees(self): self.assertTrue(session.attended_set.filter(person=recman).exists()) self.assertTrue(session.attended_set.filter(person=otherperson).exists()) + def test_api_add_session_attendees(self): + url = urlreverse("ietf.meeting.views.api_add_session_attendees") + otherperson = PersonFactory() + recmanrole = RoleFactory(group__type_id="ietf", name_id="recman") + recman = recmanrole.person + meeting = MeetingFactory(type_id="ietf") + session = SessionFactory(group__type_id="wg", meeting=meeting) + apikey = PersonalApiKeyFactory(endpoint=url, person=recman) + + badrole = RoleFactory(group__type_id="ietf", name_id="ad") + badapikey = PersonalApiKeyFactory(endpoint=url, person=badrole.person) + badrole.person.user.last_login = timezone.now() + badrole.person.user.save() + + # Improper credentials, or method + r = self.client.post(url, {}) + self.assertContains(r, "Missing apikey parameter", status_code=400) + + r = self.client.post(url, {"apikey": badapikey.hash()}) + self.assertContains(r, "Restricted to role: Recording Manager", status_code=403) + + r = self.client.post(url, {"apikey": apikey.hash()}) + self.assertContains(r, "Too long since last regular login", status_code=400) + + recman.user.last_login = timezone.now() - datetime.timedelta(days=365) + recman.user.save() + r = self.client.post(url, {"apikey": apikey.hash()}) + self.assertContains(r, "Too long since last regular login", status_code=400) + + recman.user.last_login = timezone.now() + recman.user.save() + r = self.client.get(url, {"apikey": apikey.hash()}) + self.assertContains(r, "Method not allowed", status_code=405) + + recman.user.last_login = timezone.now() + recman.user.save() + + # Malformed requests + r = self.client.post(url, {"apikey": apikey.hash()}) + self.assertContains(r, "Missing attended parameter", status_code=400) + + for baddict in ( + "{}", + '{"bogons;drop table":"bogons;drop table"}', + '{"session_id":"Not an integer;drop table"}', + f'{{"session_id":{session.pk},"attendees":"not a list;drop table"}}', + f'{{"session_id":{session.pk},"attendees":"not a list;drop table"}}', + f'{{"session_id":{session.pk},"attendees":[1,2,"not an int;drop table",4]}}', + f'{{"session_id":{session.pk},"attendees":["user_id":{recman.user.pk}]}}', # no join_time + f'{{"session_id":{session.pk},"attendees":["user_id":{recman.user.pk},"join_time;drop table":"2024-01-01T00:00:00Z]}}', + f'{{"session_id":{session.pk},"attendees":["user_id":{recman.user.pk},"join_time":"not a time;drop table"]}}', + # next has no time zone indicator + f'{{"session_id":{session.pk},"attendees":["user_id":{recman.user.pk},"join_time":"2024-01-01T00:00:00"]}}', + f'{{"session_id":{session.pk},"attendees":["user_id":"not an int; drop table","join_time":"2024-01-01T00:00:00Z"]}}', + # Uncomment the next one when the _deprecated version of this test is retired + # f'{{"session_id":{session.pk},"attendees":[{recman.user.pk}, {otherperson.user.pk}]}}', + ): + r = self.client.post(url, {"apikey": apikey.hash(), "attended": baddict}) + self.assertContains(r, "Malformed post", status_code=400) + + bad_session_id = Session.objects.order_by("-pk").first().pk + 1 + r = self.client.post( + url, + { + "apikey": apikey.hash(), + "attended": f'{{"session_id":{bad_session_id},"attendees":[]}}', + }, + ) + self.assertContains(r, "Invalid session", status_code=400) + bad_user_id = User.objects.order_by("-pk").first().pk + 1 + r = self.client.post( + url, + { + "apikey": apikey.hash(), + "attended": f'{{"session_id":{session.pk},"attendees":[{{"user_id":{bad_user_id}, "join_time":"2024-01-01T00:00:00Z"}}]}}', + }, + ) + self.assertContains(r, "Invalid attendee", status_code=400) + + # Reasonable request + r = self.client.post( + url, + { + "apikey": apikey.hash(), + "attended": json.dumps( + { + "session_id": session.pk, + "attendees": [ + { + "user_id": recman.user.pk, + "join_time": "2023-09-03T12:34:56Z", + }, + { + "user_id": otherperson.user.pk, + "join_time": "2023-09-03T03:00:19Z", + }, + ], + } + ), + }, + ) + + self.assertEqual(session.attended_set.count(), 2) + self.assertTrue(session.attended_set.filter(person=recman).exists()) + self.assertEqual( + session.attended_set.get(person=recman).time, + datetime.datetime(2023, 9, 3, 12, 34, 56, tzinfo=datetime.UTC), + ) + self.assertTrue(session.attended_set.filter(person=otherperson).exists()) + self.assertEqual( + session.attended_set.get(person=otherperson).time, + datetime.datetime(2023, 9, 3, 3, 0, 19, tzinfo=datetime.UTC), + ) + def test_api_upload_polls_and_chatlog(self): recmanrole = RoleFactory(group__type_id='ietf', name_id='recman') recmanrole.person.user.last_login = timezone.now() @@ -328,8 +519,8 @@ def test_api_upload_polls_and_chatlog(self): ), ): url = urlreverse(f"ietf.meeting.views.api_upload_{type_id}") - apikey = PersonalApiKey.objects.create(endpoint=url, person=recmanrole.person) - badapikey = PersonalApiKey.objects.create(endpoint=url, person=badrole.person) + apikey = PersonalApiKeyFactory(endpoint=url, person=recmanrole.person) + badapikey = PersonalApiKeyFactory(endpoint=url, person=badrole.person) r = self.client.post(url, {}) self.assertContains(r, "Missing apikey parameter", status_code=400) @@ -362,101 +553,13 @@ def test_api_upload_polls_and_chatlog(self): r = self.client.post(url,{'apikey':apikey.hash(),'apidata': f'{{"session_id":{session.pk}, "{type_id}":{content}}}'}) self.assertEqual(r.status_code, 200) - newdoc = session.sessionpresentation_set.get(document__type_id=type_id).document + newdoc = session.presentations.get(document__type_id=type_id).document newdoccontent = get_unicode_document_content(newdoc.name, Path(session.meeting.get_materials_path()) / type_id / newdoc.uploaded_filename) self.assertEqual(json.loads(content), json.loads(newdoccontent)) - - def test_deprecated_api_upload_bluesheet(self): - url = urlreverse('ietf.meeting.views.api_upload_bluesheet') - recmanrole = RoleFactory(group__type_id='ietf', name_id='recman') - recman = recmanrole.person - meeting = MeetingFactory(type_id='ietf') - session = SessionFactory(group__type_id='wg', meeting=meeting) - group = session.group - apikey = PersonalApiKey.objects.create(endpoint=url, person=recman) - - people = [ - {"name": "Andrea Andreotti", "affiliation": "Azienda"}, - {"name": "Bosse Bernadotte", "affiliation": "Bolag"}, - {"name": "Charles Charlemagne", "affiliation": "Compagnie"}, - ] - for i in range(3): - faker = random_faker() - people.append(dict(name=faker.name(), affiliation=faker.company())) - bluesheet = json.dumps(people) - - # error cases - r = self.client.post(url, {}) - self.assertContains(r, "Missing apikey parameter", status_code=400) - - badrole = RoleFactory(group__type_id='ietf', name_id='ad') - badapikey = PersonalApiKey.objects.create(endpoint=url, person=badrole.person) - badrole.person.user.last_login = timezone.now() - badrole.person.user.save() - r = self.client.post(url, {'apikey': badapikey.hash()}) - self.assertContains(r, "Restricted to roles: Recording Manager, Secretariat", status_code=403) - - r = self.client.post(url, {'apikey': apikey.hash()}) - self.assertContains(r, "Too long since last regular login", status_code=400) - recman.user.last_login = timezone.now() - recman.user.save() - - r = self.client.get(url, {'apikey': apikey.hash()}) - self.assertContains(r, "Method not allowed", status_code=405) - - r = self.client.post(url, {'apikey': apikey.hash(), 'group': group.acronym}) - self.assertContains(r, "Missing meeting parameter", status_code=400) - - r = self.client.post(url, {'apikey': apikey.hash(), 'meeting': meeting.number, }) - self.assertContains(r, "Missing group parameter", status_code=400) - - r = self.client.post(url, {'apikey': apikey.hash(), 'meeting': meeting.number, 'group': group.acronym}) - self.assertContains(r, "Missing item parameter", status_code=400) - - r = self.client.post(url, - {'apikey': apikey.hash(), 'meeting': meeting.number, 'group': group.acronym, 'item': '1'}) - self.assertContains(r, "Missing bluesheet parameter", status_code=400) - - r = self.client.post(url, {'apikey': apikey.hash(), 'meeting': '1', 'group': group.acronym, - 'item': '1', 'bluesheet': bluesheet, }) - self.assertContains(r, "No sessions found for meeting", status_code=400) - - r = self.client.post(url, {'apikey': apikey.hash(), 'meeting': meeting.number, 'group': 'bogous', - 'item': '1', 'bluesheet': bluesheet, }) - self.assertContains(r, "No sessions found in meeting '%s' for group 'bogous'" % meeting.number, status_code=400) - - r = self.client.post(url, {'apikey': apikey.hash(), 'meeting': meeting.number, 'group': group.acronym, - 'item': '1', 'bluesheet': "foobar", }) - self.assertContains(r, "Invalid json value: 'foobar'", status_code=400) - - r = self.client.post(url, {'apikey': apikey.hash(), 'meeting': meeting.number, 'group': group.acronym, - 'item': '5', 'bluesheet': bluesheet, }) - self.assertContains(r, "No item '5' found in list of sessions for group", status_code=400) - - r = self.client.post(url, {'apikey': apikey.hash(), 'meeting': meeting.number, 'group': group.acronym, - 'item': 'foo', 'bluesheet': bluesheet, }) - self.assertContains(r, "Expected a numeric value for 'item', found 'foo'", status_code=400) - - r = self.client.post(url, {'apikey': apikey.hash(), 'meeting': meeting.number, 'group': group.acronym, - 'item': '1', 'bluesheet': bluesheet, }) - self.assertContains(r, "Done", status_code=200) - - # Submit again, with slightly different content, as an updated version - people[1]['affiliation'] = 'Bolaget AB' - bluesheet = json.dumps(people) - r = self.client.post(url, {'apikey': apikey.hash(), 'meeting': meeting.number, 'group': group.acronym, - 'item': '1', 'bluesheet': bluesheet, }) - self.assertContains(r, "Done", status_code=200) - - bluesheet = session.sessionpresentation_set.filter(document__type__slug='bluesheets').first().document - # We've submitted an update; check that the rev is right - self.assertEqual(bluesheet.rev, '01') - # Check the content - with open(bluesheet.get_file_name()) as file: - text = file.read() - for p in people: - self.assertIn(p['name'], html.unescape(text)) - self.assertIn(p['affiliation'], html.unescape(text)) + self.assertEqual( + json.loads(retrieve_str(type_id, newdoc.uploaded_filename)), + json.loads(content) + ) def test_api_upload_bluesheet(self): url = urlreverse("ietf.meeting.views.api_upload_bluesheet") @@ -464,8 +567,7 @@ def test_api_upload_bluesheet(self): recman = recmanrole.person meeting = MeetingFactory(type_id="ietf") session = SessionFactory(group__type_id="wg", meeting=meeting) - group = session.group - apikey = PersonalApiKey.objects.create(endpoint=url, person=recman) + apikey = PersonalApiKeyFactory(endpoint=url, person=recman) people = [ {"name": "Andrea Andreotti", "affiliation": "Azienda"}, @@ -482,7 +584,7 @@ def test_api_upload_bluesheet(self): self.assertContains(r, "Missing apikey parameter", status_code=400) badrole = RoleFactory(group__type_id="ietf", name_id="ad") - badapikey = PersonalApiKey.objects.create(endpoint=url, person=badrole.person) + badapikey = PersonalApiKeyFactory(endpoint=url, person=badrole.person) badrole.person.user.last_login = timezone.now() badrole.person.user.save() r = self.client.post(url, {"apikey": badapikey.hash()}) @@ -504,18 +606,6 @@ def test_api_upload_bluesheet(self): r = self.client.post(url, {"apikey": apikey.hash(), "session_id": session.pk}) self.assertContains(r, "Missing bluesheet parameter", status_code=400) - r = self.client.post( - url, - { - "apikey": apikey.hash(), - "meeting": meeting.number, - "group": group.acronym, - "item": "1", - "bluesheet": "foobar", - }, - ) - self.assertContains(r, "Invalid json value: 'foobar'", status_code=400) - bad_session_pk = int(Session.objects.order_by("-pk").first().pk) + 1 r = self.client.post( url, @@ -554,16 +644,14 @@ def test_api_upload_bluesheet(self): url, { "apikey": apikey.hash(), - "meeting": meeting.number, - "group": group.acronym, - "item": "1", + "session_id": session.pk, "bluesheet": bluesheet, }, ) self.assertContains(r, "Done", status_code=200) bluesheet = ( - session.sessionpresentation_set.filter(document__type__slug="bluesheets") + session.presentations.filter(document__type__slug="bluesheets") .first() .document ) @@ -592,14 +680,14 @@ def test_api_v2_person_export_view(self): url = urlreverse('ietf.api.views.ApiV2PersonExportView') robot = PersonFactory(user__is_staff=True) RoleFactory(name_id='robot', person=robot, email=robot.email(), group__acronym='secretariat') - apikey = PersonalApiKey.objects.create(endpoint=url, person=robot) + apikey = PersonalApiKeyFactory(endpoint=url, person=robot) # error cases r = self.client.post(url, {}) self.assertContains(r, "Missing apikey parameter", status_code=400) badrole = RoleFactory(group__type_id='ietf', name_id='ad') - badapikey = PersonalApiKey.objects.create(endpoint=url, person=badrole.person) + badapikey = PersonalApiKeyFactory(endpoint=url, person=badrole.person) badrole.person.user.last_login = timezone.now() badrole.person.user.save() r = self.client.post(url, {'apikey': badapikey.hash()}) @@ -617,86 +705,179 @@ def test_api_v2_person_export_view(self): self.assertEqual(data['ascii'], robot.ascii) self.assertEqual(data['user']['email'], robot.user.email) - def test_api_new_meeting_registration(self): + @override_settings(APP_API_TOKENS={"ietf.api.views.api_new_meeting_registration_v2": ["valid-token"]}) + def test_api_new_meeting_registration_v2(self): meeting = MeetingFactory(type_id='ietf') - reg = { - 'apikey': 'invalid', + person = PersonFactory() + reg_detail = { + 'email': person.email().address, + 'first_name': person.first_name(), + 'last_name': person.last_name(), + 'meeting': meeting.number, 'affiliation': "Alguma Corporação", 'country_code': 'PT', - 'email': 'foo@example.pt', - 'first_name': 'Foo', - 'last_name': 'Bar', - 'meeting': meeting.number, - 'reg_type': 'hackathon', - 'ticket_type': '', - 'checkedin': 'False', + 'checkedin': False, + 'is_nomcom_volunteer': False, + 'cancelled': False, + 'tickets': [{'attendance_type': 'onsite', 'ticket_type': 'week_pass'}], } - url = urlreverse('ietf.api.views.api_new_meeting_registration') - r = self.client.post(url, reg) - self.assertContains(r, 'Invalid apikey', status_code=403) - oidcp = PersonFactory(user__is_staff=True) - # Make sure 'oidcp' has an acceptable role - RoleFactory(name_id='robot', person=oidcp, email=oidcp.email(), group__acronym='secretariat') - key = PersonalApiKey.objects.create(person=oidcp, endpoint=url) - reg['apikey'] = key.hash() - # - # Test valid POST - # FIXME: sometimes, there seems to be something in the outbox? - old_len = len(outbox) - r = self.client.post(url, reg) - self.assertContains(r, "Accepted, New registration, Email sent", status_code=202) - # - # Check outgoing mail - self.assertEqual(len(outbox), old_len + 1) - body = get_payload_text(outbox[-1]) - self.assertIn(reg['email'], outbox[-1]['To'] ) - self.assertIn(reg['email'], body) - self.assertIn('account creation request', body) + reg_data = {'objects': {reg_detail['email']: reg_detail}} + url = urlreverse('ietf.api.views.api_new_meeting_registration_v2') # - # Check record - obj = MeetingRegistration.objects.get(email=reg['email'], meeting__number=reg['meeting']) - for key in ['affiliation', 'country_code', 'first_name', 'last_name', 'person', 'reg_type', 'ticket_type', 'checkedin']: - self.assertEqual(getattr(obj, key), False if key=='checkedin' else reg.get(key) , "Bad data for field '%s'" % key) + # Test invalid key + r = self.client.post(url, data=json.dumps(reg_data), content_type='application/json', headers={"X-Api-Key": "invalid-token"}) + self.assertEqual(r.status_code, 403) # - # Test with existing user - person = PersonFactory() - reg['email'] = person.email().address - reg['first_name'] = person.first_name() - reg['last_name'] = person.last_name() + # Test invalid data + bad_reg_data = copy.deepcopy(reg_data) + del bad_reg_data['objects'][reg_detail['email']]['email'] + r = self.client.post(url, data=json.dumps(bad_reg_data), content_type='application/json', headers={"X-Api-Key": "valid-token"}) + self.assertEqual(r.status_code, 400) # - r = self.client.post(url, reg) - self.assertContains(r, "Accepted, New registration", status_code=202) + # Test valid POST + r = self.client.post(url, data=json.dumps(reg_data), content_type='application/json', headers={"X-Api-Key": "valid-token"}) + self.assertContains(r, "Success", status_code=202) # - # There should be no new outgoing mail - self.assertEqual(len(outbox), old_len + 1) + # Check record + objects = Registration.objects.filter(email=reg_detail['email'], meeting__number=reg_detail['meeting']) + self.assertEqual(objects.count(), 1) + obj = objects[0] + for key in ['affiliation', 'country_code', 'first_name', 'last_name', 'checkedin']: + self.assertEqual(getattr(obj, key), False if key == 'checkedin' else reg_detail.get(key), f"Bad data for field {key}") + self.assertEqual(obj.tickets.count(), 1) + ticket = obj.tickets.first() + self.assertEqual(ticket.ticket_type.slug, reg_detail['tickets'][0]['ticket_type']) + self.assertEqual(ticket.attendance_type.slug, reg_detail['tickets'][0]['attendance_type']) + self.assertEqual(obj.person, person) # - # Test multiple reg types - reg['reg_type'] = 'remote' - reg['ticket_type'] = 'full_week_pass' - r = self.client.post(url, reg) - self.assertContains(r, "Accepted, New registration", status_code=202) - objs = MeetingRegistration.objects.filter(email=reg['email'], meeting__number=reg['meeting']) - self.assertEqual(len(objs), 2) - self.assertEqual(objs.filter(reg_type='hackathon').count(), 1) - self.assertEqual(objs.filter(reg_type='remote', ticket_type='full_week_pass').count(), 1) - self.assertEqual(len(outbox), old_len + 1) + # Test update (switch to remote) + reg_detail = { + 'affiliation': "Alguma Corporação", + 'country_code': 'PT', + 'email': person.email().address, + 'first_name': person.first_name(), + 'last_name': person.last_name(), + 'meeting': meeting.number, + 'checkedin': False, + 'is_nomcom_volunteer': False, + 'cancelled': False, + 'tickets': [{'attendance_type': 'remote', 'ticket_type': 'week_pass'}], + } + reg_data = {'objects': {reg_detail['email']: reg_detail}} + r = self.client.post(url, data=json.dumps(reg_data), content_type='application/json', headers={"X-Api-Key": "valid-token"}) + self.assertContains(r, "Success", status_code=202) + objects = Registration.objects.filter(email=reg_detail['email'], meeting__number=reg_detail['meeting']) + self.assertEqual(objects.count(), 1) + obj = objects[0] + self.assertEqual(obj.tickets.count(), 1) + ticket = obj.tickets.first() + self.assertEqual(ticket.ticket_type.slug, reg_detail['tickets'][0]['ticket_type']) + self.assertEqual(ticket.attendance_type.slug, reg_detail['tickets'][0]['attendance_type']) # - # Test incomplete POST - drop_fields = ['affiliation', 'first_name', 'reg_type'] - for field in drop_fields: - del reg[field] - r = self.client.post(url, reg) - self.assertContains(r, 'Missing parameters:', status_code=400) - err, fields = r.content.decode().split(':', 1) - missing_fields = [f.strip() for f in fields.split(',')] - self.assertEqual(set(missing_fields), set(drop_fields)) + # Test multiple + reg_detail = { + 'affiliation': "Alguma Corporação", + 'country_code': 'PT', + 'email': person.email().address, + 'first_name': person.first_name(), + 'last_name': person.last_name(), + 'meeting': meeting.number, + 'checkedin': False, + 'is_nomcom_volunteer': False, + 'cancelled': False, + 'tickets': [ + {'attendance_type': 'onsite', 'ticket_type': 'one_day'}, + {'attendance_type': 'remote', 'ticket_type': 'week_pass'}, + ], + } + reg_data = {'objects': {reg_detail['email']: reg_detail}} + r = self.client.post(url, data=json.dumps(reg_data), content_type='application/json', headers={"X-Api-Key": "valid-token"}) + self.assertContains(r, "Success", status_code=202) + objects = Registration.objects.filter(email=reg_detail['email'], meeting__number=reg_detail['meeting']) + self.assertEqual(objects.count(), 1) + obj = objects[0] + self.assertEqual(obj.tickets.count(), 2) + self.assertEqual(obj.tickets.filter(attendance_type__slug='onsite').count(), 1) + self.assertEqual(obj.tickets.filter(attendance_type__slug='remote').count(), 1) + + @override_settings(APP_API_TOKENS={"ietf.api.views.api_new_meeting_registration_v2": ["valid-token"]}) + def test_api_new_meeting_registration_v2_cancelled(self): + meeting = MeetingFactory(type_id='ietf') + person = PersonFactory() + reg_detail = { + 'affiliation': "Acme", + 'country_code': 'US', + 'email': person.email().address, + 'first_name': person.first_name(), + 'last_name': person.last_name(), + 'meeting': meeting.number, + 'checkedin': False, + 'is_nomcom_volunteer': False, + 'cancelled': False, + 'tickets': [{'attendance_type': 'onsite', 'ticket_type': 'week_pass'}], + } + reg_data = {'objects': {reg_detail['email']: reg_detail}} + url = urlreverse('ietf.api.views.api_new_meeting_registration_v2') + self.assertEqual(Registration.objects.count(), 0) + r = self.client.post(url, data=json.dumps(reg_data), content_type='application/json', headers={"X-Api-Key": "valid-token"}) + self.assertContains(r, "Success", status_code=202) + self.assertEqual(Registration.objects.count(), 1) + reg_detail['cancelled'] = True + r = self.client.post(url, data=json.dumps(reg_data), content_type='application/json', headers={"X-Api-Key": "valid-token"}) + self.assertContains(r, "Success", status_code=202) + self.assertEqual(Registration.objects.count(), 0) + + @override_settings(APP_API_TOKENS={"ietf.api.views.api_new_meeting_registration_v2": ["valid-token"]}) + def test_api_new_meeting_registration_v2_nomcom(self): + meeting = MeetingFactory(type_id='ietf') + person = PersonFactory() + reg_detail = { + 'affiliation': "Acme", + 'country_code': 'US', + 'email': person.email().address, + 'first_name': person.first_name(), + 'last_name': person.last_name(), + 'meeting': meeting.number, + 'checkedin': False, + 'is_nomcom_volunteer': False, + 'cancelled': False, + 'tickets': [{'attendance_type': 'onsite', 'ticket_type': 'week_pass'}], + } + reg_data = {'objects': {reg_detail['email']: reg_detail}} + url = urlreverse('ietf.api.views.api_new_meeting_registration_v2') + now = datetime.datetime.now() + if now.month > 10: + year = now.year + 1 + else: + year = now.year + # create appropriate group and nomcom objects + nomcom = NomComFactory.create(is_accepting_volunteers=True, **nomcom_kwargs_for_year(year)) + + # first test is_nomcom_volunteer False + r = self.client.post(url, data=json.dumps(reg_data), content_type='application/json', headers={"X-Api-Key": "valid-token"}) + self.assertContains(r, "Success", status_code=202) + # assert no Volunteers exists + self.assertEqual(Volunteer.objects.count(), 0) + + # test is_nomcom_volunteer True + reg_detail['is_nomcom_volunteer'] = True + r = self.client.post(url, data=json.dumps(reg_data), content_type='application/json', headers={"X-Api-Key": "valid-token"}) + self.assertContains(r, "Success", status_code=202) + # assert Volunteer exists + self.assertEqual(Volunteer.objects.count(), 1) + volunteer = Volunteer.objects.last() + self.assertEqual(volunteer.person, person) + self.assertEqual(volunteer.nomcom, nomcom) + self.assertEqual(volunteer.origin, 'registration') def test_api_version(self): - DumpInfo.objects.create(date=timezone.datetime(2022,8,31,7,10,1,tzinfo=datetime.timezone.utc), host='testapi.example.com',tz='UTC') + DumpInfo.objects.create(date=timezone.datetime(2022,8,31,7,10,1,tzinfo=datetime.UTC), host='testapi.example.com',tz='UTC') url = urlreverse('ietf.api.views.version') r = self.client.get(url) data = r.json() self.assertEqual(data['version'], ietf.__version__+ietf.__patch__) + for lib in settings.ADVERTISE_VERSIONS: + self.assertIn(lib, data['other']) self.assertEqual(data['dumptime'], "2022-08-31 07:10:01 +0000") DumpInfo.objects.update(tz='PST8PDT') r = self.client.get(url) @@ -705,35 +886,518 @@ def test_api_version(self): def test_api_appauth(self): - url = urlreverse('ietf.api.views.app_auth') - person = PersonFactory() - apikey = PersonalApiKey.objects.create(endpoint=url, person=person) - - self.client.login(username=person.user.username,password=f'{person.user.username}+password') - self.client.logout() - - # error cases - # missing apikey - r = self.client.post(url, {}) - self.assertContains(r, 'Missing apikey parameter', status_code=400) + for app in ["authortools", "bibxml"]: + url = urlreverse('ietf.api.views.app_auth', kwargs={"app": app}) + person = PersonFactory() + apikey = PersonalApiKeyFactory(endpoint=url, person=person) + + self.client.login(username=person.user.username,password=f'{person.user.username}+password') + self.client.logout() + + # error cases + # missing apikey + r = self.client.post(url, {}) + self.assertContains(r, 'Missing apikey parameter', status_code=400) + + # invalid apikey + r = self.client.post(url, {'apikey': 'foobar'}) + self.assertContains(r, 'Invalid apikey', status_code=403) + + # working case + r = self.client.post(url, {'apikey': apikey.hash()}) + self.assertEqual(r.status_code, 200) + jsondata = r.json() + self.assertEqual(jsondata['success'], True) + self.client.logout() - # invalid apikey - r = self.client.post(url, {'apikey': 'foobar'}) - self.assertContains(r, 'Invalid apikey', status_code=403) + @override_settings(APP_API_TOKENS={"ietf.api.views.nfs_metrics": ["valid-token"]}) + def test_api_nfs_metrics(self): + url = urlreverse("ietf.api.views.nfs_metrics") + r = self.client.get(url) + self.assertEqual(r.status_code, 403) + r = self.client.get(url, headers={"X-Api-Key": "valid-token"}) + self.assertContains(r, 'nfs_latency_seconds{operation="write"}') - # working case - r = self.client.post(url, {'apikey': apikey.hash()}) - self.assertEqual(r.status_code, 200) - jsondata = r.json() - self.assertEqual(jsondata['success'], True) - def test_api_get_session_matherials_no_agenda_meeting_url(self): meeting = MeetingFactory(type_id='ietf') session = SessionFactory(meeting=meeting) url = urlreverse('ietf.meeting.views.api_get_session_materials', kwargs={'session_id': session.pk}) r = self.client.get(url) self.assertEqual(r.status_code, 200) + + @override_settings(APP_API_TOKENS={"ietf.api.views.draft_aliases": ["valid-token"]}) + @mock.patch("ietf.api.views.DraftAliasGenerator") + def test_draft_aliases(self, mock): + mock.return_value = (("alias1", ("a1", "a2")), ("alias2", ("a3", "a4"))) + url = urlreverse("ietf.api.views.draft_aliases") + r = self.client.get(url, headers={"X-Api-Key": "valid-token"}) + self.assertEqual(r.status_code, 200) + self.assertEqual(r.headers["Content-type"], "application/json") + self.assertEqual( + json.loads(r.content), + { + "aliases": [ + {"alias": "alias1", "domains": ["ietf"], "addresses": ["a1", "a2"]}, + {"alias": "alias2", "domains": ["ietf"], "addresses": ["a3", "a4"]}, + ]} + ) + # some invalid cases + self.assertEqual( + self.client.get(url, headers={}).status_code, + 403, + ) + self.assertEqual( + self.client.get(url, headers={"X-Api-Key": "something-else"}).status_code, + 403, + ) + self.assertEqual( + self.client.post(url, headers={"X-Api-Key": "something-else"}).status_code, + 403, + ) + self.assertEqual( + self.client.post(url, headers={"X-Api-Key": "valid-token"}).status_code, + 405, + ) + + @override_settings(APP_API_TOKENS={"ietf.api.views.group_aliases": ["valid-token"]}) + @mock.patch("ietf.api.views.GroupAliasGenerator") + def test_group_aliases(self, mock): + mock.return_value = (("alias1", ("ietf",), ("a1", "a2")), ("alias2", ("ietf", "iab"), ("a3", "a4"))) + url = urlreverse("ietf.api.views.group_aliases") + r = self.client.get(url, headers={"X-Api-Key": "valid-token"}) + self.assertEqual(r.status_code, 200) + self.assertEqual(r.headers["Content-type"], "application/json") + self.assertEqual( + json.loads(r.content), + { + "aliases": [ + {"alias": "alias1", "domains": ["ietf"], "addresses": ["a1", "a2"]}, + {"alias": "alias2", "domains": ["ietf", "iab"], "addresses": ["a3", "a4"]}, + ]} + ) + # some invalid cases + self.assertEqual( + self.client.get(url, headers={}).status_code, + 403, + ) + self.assertEqual( + self.client.get(url, headers={"X-Api-Key": "something-else"}).status_code, + 403, + ) + self.assertEqual( + self.client.post(url, headers={"X-Api-Key": "something-else"}).status_code, + 403, + ) + self.assertEqual( + self.client.post(url, headers={"X-Api-Key": "valid-token"}).status_code, + 405, + ) + + @override_settings(APP_API_TOKENS={"ietf.api.views.active_email_list": ["valid-token"]}) + def test_active_email_list(self): + EmailFactory(active=True) # make sure there's at least one active email... + EmailFactory(active=False) # ... and at least one non-active emai + url = urlreverse("ietf.api.views.active_email_list") + r = self.client.post(url, headers={}) + self.assertEqual(r.status_code, 403) + r = self.client.get(url, headers={}) + self.assertEqual(r.status_code, 403) + r = self.client.get(url, headers={"X-Api-Key": "not-the-valid-token"}) + self.assertEqual(r.status_code, 403) + r = self.client.post(url, headers={"X-Api-Key": "not-the-valid-token"}) + self.assertEqual(r.status_code, 403) + r = self.client.post(url, headers={"X-Api-Key": "valid-token"}) + self.assertEqual(r.status_code, 405) + r = self.client.get(url, headers={"X-Api-Key": "valid-token"}) + self.assertEqual(r.status_code, 200) + self.assertEqual(r.headers["Content-Type"], "application/json") + result = json.loads(r.content) + self.assertCountEqual(result.keys(), ["addresses"]) + self.assertCountEqual(result["addresses"], Email.objects.filter(active=True).values_list("address", flat=True)) + + @override_settings(APP_API_TOKENS={"ietf.api.views.related_email_list": ["valid-token"]}) + def test_related_email_list(self): + joe = EmailFactory(address='joe@work.com') + EmailFactory(address='joe@home.com', person=joe.person) + EmailFactory(address='jòe@spain.com', person=joe.person) + url = urlreverse("ietf.api.views.related_email_list", kwargs={'email': 'joe@home.com'}) + # no api key + r = self.client.get(url, headers={}) + self.assertEqual(r.status_code, 403) + # invalid api key + r = self.client.get(url, headers={"X-Api-Key": "not-the-valid-token"}) + self.assertEqual(r.status_code, 403) + # wrong method + r = self.client.post(url, headers={"X-Api-Key": "valid-token"}) + self.assertEqual(r.status_code, 405) + # valid + r = self.client.get(url, headers={"X-Api-Key": "valid-token"}) + self.assertEqual(r.status_code, 200) + self.assertEqual(r.headers["Content-Type"], "application/json") + result = json.loads(r.content) + self.assertCountEqual(result.keys(), ["addresses"]) + self.assertCountEqual(result["addresses"], joe.person.email_set.values_list("address", flat=True)) + # non-ascii + non_ascii_url = urlreverse("ietf.api.views.related_email_list", kwargs={'email': 'jòe@spain.com'}) + r = self.client.get(non_ascii_url, headers={"X-Api-Key": "valid-token"}) + self.assertEqual(r.status_code, 200) + result = json.loads(r.content) + self.assertTrue('joe@home.com' in result["addresses"]) + # email not found + not_found_url = urlreverse("ietf.api.views.related_email_list", kwargs={'email': 'nobody@nowhere.com'}) + r = self.client.get(not_found_url, headers={"X-Api-Key": "valid-token"}) + self.assertEqual(r.status_code, 404) + + @override_settings(APP_API_TOKENS={"ietf.api.views.role_holder_addresses": ["valid-token"]}) + def test_role_holder_addresses(self): + url = urlreverse("ietf.api.views.role_holder_addresses") + r = self.client.get(url, headers={}) + self.assertEqual(r.status_code, 403, "No api token, no access") + r = self.client.get(url, headers={"X-Api-Key": "not-valid-token"}) + self.assertEqual(r.status_code, 403, "Bad api token, no access") + r = self.client.post(url, headers={"X-Api-Key": "valid-token"}) + self.assertEqual(r.status_code, 405, "Bad method, no access") + + emails = EmailFactory.create_batch(5) + email_queryset = Email.objects.filter(pk__in=[e.pk for e in emails]) + with mock.patch("ietf.api.views.role_holder_emails", return_value=email_queryset): + r = self.client.get(url, headers={"X-Api-Key": "valid-token"}) + self.assertEqual(r.status_code, 200, "Good api token and method, access") + content_dict = json.loads(r.content) + self.assertCountEqual(content_dict.keys(), ["addresses"]) + self.assertEqual( + content_dict["addresses"], + sorted(e.address for e in emails), + ) + + @override_settings( + APP_API_TOKENS={"ietf.api.views.ingest_email": "valid-token", "ietf.api.views.ingest_email_test": "test-token"} + ) + @mock.patch("ietf.api.views.iana_ingest_review_email") + @mock.patch("ietf.api.views.ipr_ingest_response_email") + @mock.patch("ietf.api.views.nomcom_ingest_feedback_email") + def test_ingest_email( + self, mock_nomcom_ingest, mock_ipr_ingest, mock_iana_ingest + ): + mocks = {mock_nomcom_ingest, mock_ipr_ingest, mock_iana_ingest} + empty_outbox() + url = urlreverse("ietf.api.views.ingest_email") + test_mode_url = urlreverse("ietf.api.views.ingest_email_test") + + # test various bad calls + r = self.client.get(url) + self.assertEqual(r.status_code, 403) + self.assertFalse(any(m.called for m in mocks)) + r = self.client.get(test_mode_url) + self.assertEqual(r.status_code, 403) + self.assertFalse(any(m.called for m in mocks)) + + r = self.client.post(url) + self.assertEqual(r.status_code, 403) + self.assertFalse(any(m.called for m in mocks)) + r = self.client.post(test_mode_url) + self.assertEqual(r.status_code, 403) + self.assertFalse(any(m.called for m in mocks)) + + r = self.client.get(url, headers={"X-Api-Key": "valid-token"}) + self.assertEqual(r.status_code, 405) + self.assertFalse(any(m.called for m in mocks)) + r = self.client.get(test_mode_url, headers={"X-Api-Key": "test-token"}) + self.assertEqual(r.status_code, 405) + self.assertFalse(any(m.called for m in mocks)) + + r = self.client.post(url, headers={"X-Api-Key": "valid-token"}) + self.assertEqual(r.status_code, 415) + self.assertFalse(any(m.called for m in mocks)) + r = self.client.post(test_mode_url, headers={"X-Api-Key": "test-token"}) + self.assertEqual(r.status_code, 415) + self.assertFalse(any(m.called for m in mocks)) + + r = self.client.post( + url, content_type="application/json", headers={"X-Api-Key": "valid-token"} + ) + self.assertEqual(r.status_code, 400) + self.assertFalse(any(m.called for m in mocks)) + r = self.client.post( + test_mode_url, content_type="application/json", headers={"X-Api-Key": "test-token"} + ) + self.assertEqual(r.status_code, 400) + self.assertFalse(any(m.called for m in mocks)) + + r = self.client.post( + url, + "this is not JSON!", + content_type="application/json", + headers={"X-Api-Key": "valid-token"}, + ) + self.assertEqual(r.status_code, 400) + self.assertFalse(any(m.called for m in mocks)) + r = self.client.post( + test_mode_url, + "this is not JSON!", + content_type="application/json", + headers={"X-Api-Key": "test-token"}, + ) + self.assertEqual(r.status_code, 400) + self.assertFalse(any(m.called for m in mocks)) + + r = self.client.post( + url, + {"json": "yes", "valid_schema": False}, + content_type="application/json", + headers={"X-Api-Key": "valid-token"}, + ) + self.assertEqual(r.status_code, 400) + self.assertFalse(any(m.called for m in mocks)) + r = self.client.post( + test_mode_url, + {"json": "yes", "valid_schema": False}, + content_type="application/json", + headers={"X-Api-Key": "test-token"}, + ) + self.assertEqual(r.status_code, 400) + self.assertFalse(any(m.called for m in mocks)) + + # bad destination + message_b64 = base64.b64encode(b"This is a message").decode() + r = self.client.post( + url, + {"dest": "not-a-destination", "message": message_b64}, + content_type="application/json", + headers={"X-Api-Key": "valid-token"}, + ) + self.assertEqual(r.status_code, 200) + self.assertEqual(r.headers["Content-Type"], "application/json") + self.assertEqual(json.loads(r.content), {"result": "bad_dest"}) + self.assertFalse(any(m.called for m in mocks)) + r = self.client.post( + test_mode_url, + {"dest": "not-a-destination", "message": message_b64}, + content_type="application/json", + headers={"X-Api-Key": "test-token"}, + ) + self.assertEqual(r.status_code, 200) + self.assertEqual(r.headers["Content-Type"], "application/json") + self.assertEqual(json.loads(r.content), {"result": "bad_dest"}) + self.assertFalse(any(m.called for m in mocks)) + + # test that valid requests call handlers appropriately + r = self.client.post( + url, + {"dest": "iana-review", "message": message_b64}, + content_type="application/json", + headers={"X-Api-Key": "valid-token"}, + ) + self.assertEqual(r.status_code, 200) + self.assertEqual(r.headers["Content-Type"], "application/json") + self.assertEqual(json.loads(r.content), {"result": "ok"}) + self.assertTrue(mock_iana_ingest.called) + self.assertEqual(mock_iana_ingest.call_args, mock.call(b"This is a message")) + self.assertFalse(any(m.called for m in (mocks - {mock_iana_ingest}))) + mock_iana_ingest.reset_mock() + + # the test mode endpoint should _not_ call the handler + r = self.client.post( + test_mode_url, + {"dest": "iana-review", "message": message_b64}, + content_type="application/json", + headers={"X-Api-Key": "test-token"}, + ) + self.assertEqual(r.status_code, 200) + self.assertEqual(r.headers["Content-Type"], "application/json") + self.assertEqual(json.loads(r.content), {"result": "ok"}) + self.assertFalse(any(m.called for m in mocks)) + mock_iana_ingest.reset_mock() + + r = self.client.post( + url, + {"dest": "ipr-response", "message": message_b64}, + content_type="application/json", + headers={"X-Api-Key": "valid-token"}, + ) + self.assertEqual(r.status_code, 200) + self.assertEqual(r.headers["Content-Type"], "application/json") + self.assertEqual(json.loads(r.content), {"result": "ok"}) + self.assertTrue(mock_ipr_ingest.called) + self.assertEqual(mock_ipr_ingest.call_args, mock.call(b"This is a message")) + self.assertFalse(any(m.called for m in (mocks - {mock_ipr_ingest}))) + mock_ipr_ingest.reset_mock() + + # the test mode endpoint should _not_ call the handler + r = self.client.post( + test_mode_url, + {"dest": "ipr-response", "message": message_b64}, + content_type="application/json", + headers={"X-Api-Key": "test-token"}, + ) + self.assertEqual(r.status_code, 200) + self.assertEqual(r.headers["Content-Type"], "application/json") + self.assertEqual(json.loads(r.content), {"result": "ok"}) + self.assertFalse(any(m.called for m in mocks)) + mock_ipr_ingest.reset_mock() + + # bad nomcom-feedback dest + for bad_nomcom_dest in [ + "nomcom-feedback", # no suffix + "nomcom-feedback-", # no year + "nomcom-feedback-squid", # not a year, + "nomcom-feedback-2024-2025", # also not a year + ]: + r = self.client.post( + url, + {"dest": bad_nomcom_dest, "message": message_b64}, + content_type="application/json", + headers={"X-Api-Key": "valid-token"}, + ) + self.assertEqual(r.status_code, 200) + self.assertEqual(r.headers["Content-Type"], "application/json") + self.assertEqual(json.loads(r.content), {"result": "bad_dest"}) + self.assertFalse(any(m.called for m in mocks)) + r = self.client.post( + test_mode_url, + {"dest": bad_nomcom_dest, "message": message_b64}, + content_type="application/json", + headers={"X-Api-Key": "test-token"}, + ) + self.assertEqual(r.status_code, 200) + self.assertEqual(r.headers["Content-Type"], "application/json") + self.assertEqual(json.loads(r.content), {"result": "bad_dest"}) + self.assertFalse(any(m.called for m in mocks)) + # good nomcom-feedback dest + random_year = randrange(100000) + r = self.client.post( + url, + {"dest": f"nomcom-feedback-{random_year}", "message": message_b64}, + content_type="application/json", + headers={"X-Api-Key": "valid-token"}, + ) + self.assertEqual(r.status_code, 200) + self.assertEqual(r.headers["Content-Type"], "application/json") + self.assertEqual(json.loads(r.content), {"result": "ok"}) + self.assertTrue(mock_nomcom_ingest.called) + self.assertEqual(mock_nomcom_ingest.call_args, mock.call(b"This is a message", random_year)) + self.assertFalse(any(m.called for m in (mocks - {mock_nomcom_ingest}))) + mock_nomcom_ingest.reset_mock() + + # the test mode endpoint should _not_ call the handler + r = self.client.post( + test_mode_url, + {"dest": f"nomcom-feedback-{random_year}", "message": message_b64}, + content_type="application/json", + headers={"X-Api-Key": "test-token"}, + ) + self.assertEqual(r.status_code, 200) + self.assertEqual(r.headers["Content-Type"], "application/json") + self.assertEqual(json.loads(r.content), {"result": "ok"}) + self.assertFalse(any(m.called for m in mocks)) + mock_nomcom_ingest.reset_mock() + + # test that exceptions lead to email being sent - assumes that iana-review handling is representative + mock_iana_ingest.side_effect = EmailIngestionError("Error: don't send email") + r = self.client.post( + url, + {"dest": "iana-review", "message": message_b64}, + content_type="application/json", + headers={"X-Api-Key": "valid-token"}, + ) + self.assertEqual(r.status_code, 200) + self.assertEqual(r.headers["Content-Type"], "application/json") + self.assertEqual(json.loads(r.content), {"result": "bad_msg"}) + self.assertTrue(mock_iana_ingest.called) + self.assertEqual(mock_iana_ingest.call_args, mock.call(b"This is a message")) + self.assertFalse(any(m.called for m in (mocks - {mock_iana_ingest}))) + self.assertEqual(len(outbox), 0) # implicitly tests that _none_ of the earlier tests sent email + mock_iana_ingest.reset_mock() + + # test default recipients and attached original message + mock_iana_ingest.side_effect = EmailIngestionError( + "Error: do send email", + email_body="This is my email\n", + email_original_message=b"This is the original message" + ) + with override_settings(ADMINS=[("Some Admin", "admin@example.com")]): + r = self.client.post( + url, + {"dest": "iana-review", "message": message_b64}, + content_type="application/json", + headers={"X-Api-Key": "valid-token"}, + ) + self.assertEqual(r.status_code, 200) + self.assertEqual(r.headers["Content-Type"], "application/json") + self.assertEqual(json.loads(r.content), {"result": "bad_msg"}) + self.assertTrue(mock_iana_ingest.called) + self.assertEqual(mock_iana_ingest.call_args, mock.call(b"This is a message")) + self.assertFalse(any(m.called for m in (mocks - {mock_iana_ingest}))) + self.assertEqual(len(outbox), 1) + self.assertIn("admin@example.com", outbox[0]["To"]) + self.assertEqual("Error: do send email", outbox[0]["Subject"]) + self.assertEqual("This is my email\n", get_payload_text(outbox[0].get_body())) + attachments = list(a for a in outbox[0].iter_attachments()) + self.assertEqual(len(attachments), 1) + self.assertEqual(attachments[0].get_filename(), "original-message") + self.assertEqual(attachments[0].get_content_type(), "application/octet-stream") + self.assertEqual(attachments[0].get_content(), b"This is the original message") + mock_iana_ingest.reset_mock() + empty_outbox() + + # test overridden recipients and no attached original message + mock_iana_ingest.side_effect = EmailIngestionError( + "Error: do send email", + email_body="This is my email\n", + email_recipients=("thatguy@example.com") + ) + with override_settings(ADMINS=[("Some Admin", "admin@example.com")]): + r = self.client.post( + url, + {"dest": "iana-review", "message": message_b64}, + content_type="application/json", + headers={"X-Api-Key": "valid-token"}, + ) + self.assertEqual(r.status_code, 200) + self.assertEqual(r.headers["Content-Type"], "application/json") + self.assertEqual(json.loads(r.content), {"result": "bad_msg"}) + self.assertTrue(mock_iana_ingest.called) + self.assertEqual(mock_iana_ingest.call_args, mock.call(b"This is a message")) + self.assertFalse(any(m.called for m in (mocks - {mock_iana_ingest}))) + self.assertEqual(len(outbox), 1) + self.assertNotIn("admin@example.com", outbox[0]["To"]) + self.assertIn("thatguy@example.com", outbox[0]["To"]) + self.assertEqual("Error: do send email", outbox[0]["Subject"]) + self.assertEqual("This is my email\n", get_payload_text(outbox[0])) + mock_iana_ingest.reset_mock() + empty_outbox() + + # test attached traceback + mock_iana_ingest.side_effect = EmailIngestionError( + "Error: do send email", + email_body="This is my email\n", + email_attach_traceback=True, + ) + with override_settings(ADMINS=[("Some Admin", "admin@example.com")]): + r = self.client.post( + url, + {"dest": "iana-review", "message": message_b64}, + content_type="application/json", + headers={"X-Api-Key": "valid-token"}, + ) + self.assertEqual(r.status_code, 200) + self.assertEqual(r.headers["Content-Type"], "application/json") + self.assertEqual(json.loads(r.content), {"result": "bad_msg"}) + self.assertTrue(mock_iana_ingest.called) + self.assertEqual(mock_iana_ingest.call_args, mock.call(b"This is a message")) + self.assertFalse(any(m.called for m in (mocks - {mock_iana_ingest}))) + self.assertEqual(len(outbox), 1) + self.assertIn("admin@example.com", outbox[0]["To"]) + self.assertEqual("Error: do send email", outbox[0]["Subject"]) + self.assertEqual("This is my email\n", get_payload_text(outbox[0].get_body())) + attachments = list(a for a in outbox[0].iter_attachments()) + self.assertEqual(len(attachments), 1) + self.assertEqual(attachments[0].get_filename(), "traceback.txt") + self.assertEqual(attachments[0].get_content_type(), "text/plain") + self.assertIn("ietf.api.views.EmailIngestionError: Error: do send email", attachments[0].get_content()) + mock_iana_ingest.reset_mock() + empty_outbox() class DirectAuthApiTests(TestCase): @@ -793,7 +1457,7 @@ def test_bad_post(self): data = self.response_data(r) self.assertEqual(data["result"], "failure") self.assertEqual(data["reason"], "invalid post") - + bad = dict(authtoken=self.valid_token, username=self.valid_person.user.username, password=self.valid_password) r = self.client.post(self.url, bad) self.assertEqual(r.status_code, 200) @@ -801,8 +1465,9 @@ def test_bad_post(self): self.assertEqual(data["result"], "failure") self.assertEqual(data["reason"], "invalid post") + @override_settings() def test_notokenstore(self): - self.assertFalse(hasattr(settings, "APP_API_TOKENS")) + del settings.APP_API_TOKENS # only affects overridden copy of settings! r = self.client.post(self.url,self.valid_body_with_good_password) self.assertEqual(r.status_code, 200) data = self.response_data(r) @@ -832,7 +1497,7 @@ def test_good_password(self): data = self.response_data(r) self.assertEqual(data["result"], "success") -class TastypieApiTestCase(ResourceTestCaseMixin, TestCase): +class TastypieApiTests(ResourceTestCaseMixin, TestCase): def __init__(self, *args, **kwargs): self.apps = {} for app_name in settings.INSTALLED_APPS: @@ -842,7 +1507,7 @@ def __init__(self, *args, **kwargs): models_path = os.path.join(os.path.dirname(app.__file__), "models.py") if os.path.exists(models_path): self.apps[name] = app_name - super(TastypieApiTestCase, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def test_api_top_level(self): client = Client(Accept='application/json') @@ -851,7 +1516,7 @@ def test_api_top_level(self): resource_list = r.json() for name in self.apps: - if not name in self.apps: + if not name in resource_list: sys.stderr.write("Expected a REST API resource for %s, but didn't find one\n" % name) for name in self.apps: @@ -877,6 +1542,21 @@ def test_all_model_resources_exist(self): self.assertIn(model._meta.model_name, list(app_resources.keys()), "There doesn't seem to be any API resource for model %s.models.%s"%(app.__name__,model.__name__,)) + def test_serializer_to_etree_handles_nulls(self): + """Serializer to_etree() should handle a null character""" + serializer = Serializer() + try: + serializer.to_etree("string with no nulls in it") + except ValueError: + self.fail("serializer.to_etree raised ValueError on an ordinary string") + try: + serializer.to_etree("string with a \x00 in it") + except ValueError: + self.fail( + "serializer.to_etree raised ValueError on a string " + "containing a null character" + ) + class RfcdiffSupportTests(TestCase): @@ -944,7 +1624,7 @@ def do_draft_test(self, name): self.assertNotIn('previous', received, 'Rev 00 has no previous name when not replacing a draft') replaced = IndividualDraftFactory() - RelatedDocument.objects.create(relationship_id='replaces',source=draft,target=replaced.docalias.first()) + RelatedDocument.objects.create(relationship_id='replaces',source=draft,target=replaced) received = self.getJson(dict(name=draft.name, rev='00')) self.assertEqual(received['previous'], f'{replaced.name}-{replaced.rev}', 'Rev 00 has a previous name when replacing a draft') @@ -974,19 +1654,19 @@ def test_draft_with_broken_history(self): def do_rfc_test(self, draft_name): draft = WgDraftFactory(name=draft_name, create_revisions=range(0,2)) - draft.docalias.create(name=f'rfc{self.next_rfc_number():04}') + rfc = WgRfcFactory(group=draft.group, rfc_number=self.next_rfc_number()) + draft.relateddocument_set.create(relationship_id="became_rfc", target=rfc) draft.set_state(State.objects.get(type_id='draft',slug='rfc')) draft.set_state(State.objects.get(type_id='draft-iesg', slug='pub')) - draft = reload_db_objects(draft) - rfc = draft + draft, rfc = reload_db_objects(draft, rfc) - number = rfc.rfc_number() + number = rfc.rfc_number received = self.getJson(dict(name=number)) self.assertEqual( received, dict( content_url=rfc.get_href(), - name=rfc.canonical_name(), + name=rfc.name, previous=f'{draft.name}-{draft.rev}', previous_url= draft.history_set.get(rev=draft.rev).get_href(), ), @@ -994,7 +1674,7 @@ def do_rfc_test(self, draft_name): ) num_received = received - received = self.getJson(dict(name=rfc.canonical_name())) + received = self.getJson(dict(name=rfc.name)) self.assertEqual(num_received, received, 'RFC by canonical name gives same result as by number') received = self.getJson(dict(name=f'RfC {number}')) @@ -1026,30 +1706,30 @@ def test_rfc(self): def test_rfc_with_tombstone(self): draft = WgDraftFactory(create_revisions=range(0,2)) - draft.docalias.create(name='rfc3261') # See views_doc.HAS_TOMBSTONE + rfc = WgRfcFactory(rfc_number=3261,group=draft.group)# See views_doc.HAS_TOMBSTONE + draft.relateddocument_set.create(relationship_id="became_rfc", target=rfc) draft.set_state(State.objects.get(type_id='draft',slug='rfc')) draft.set_state(State.objects.get(type_id='draft-iesg', slug='pub')) draft = reload_db_objects(draft) - rfc = draft # Some old rfcs had tombstones that shouldn't be used for comparisons - received = self.getJson(dict(name=rfc.canonical_name())) + received = self.getJson(dict(name=rfc.name)) self.assertTrue(received['previous'].endswith('00')) def do_rfc_with_broken_history_test(self, draft_name): draft = WgDraftFactory(rev='10', name=draft_name) - draft.docalias.create(name=f'rfc{self.next_rfc_number():04}') + rfc = WgRfcFactory(group=draft.group, rfc_number=self.next_rfc_number()) + draft.relateddocument_set.create(relationship_id="became_rfc", target=rfc) draft.set_state(State.objects.get(type_id='draft',slug='rfc')) draft.set_state(State.objects.get(type_id='draft-iesg', slug='pub')) draft = reload_db_objects(draft) - rfc = draft received = self.getJson(dict(name=draft.name)) self.assertEqual( received, dict( content_url=rfc.get_href(), - name=rfc.canonical_name(), + name=rfc.name, previous=f'{draft.name}-10', previous_url= f'{settings.IETF_ID_ARCHIVE_URL}{draft.name}-10.txt', ), @@ -1080,3 +1760,91 @@ def test_rfc_with_broken_history(self): # tricky draft names self.do_rfc_with_broken_history_test(draft_name='draft-gizmo-01') self.do_rfc_with_broken_history_test(draft_name='draft-oh-boy-what-a-draft-02-03') + + def test_no_such_document(self): + for name in ['rfc0000', 'draft-ftei-oof-rab-00']: + url = urlreverse(self.target_view, kwargs={'name': name}) + r = self.client.get(url) + self.assertEqual(r.status_code, 404) + + +class TokenTests(TestCase): + @override_settings(APP_API_TOKENS={"known.endpoint": ["token in a list"], "oops": "token as a str"}) + def test_is_valid_token(self): + # various invalid cases + self.assertFalse(is_valid_token("unknown.endpoint", "token in a list")) + self.assertFalse(is_valid_token("known.endpoint", "token")) + self.assertFalse(is_valid_token("known.endpoint", "token as a str")) + self.assertFalse(is_valid_token("oops", "token")) + self.assertFalse(is_valid_token("oops", "token in a list")) + # the only valid cases + self.assertTrue(is_valid_token("known.endpoint", "token in a list")) + self.assertTrue(is_valid_token("oops", "token as a str")) + + @mock.patch("ietf.api.ietf_utils.is_valid_token") + def test_requires_api_token(self, mock_is_valid_token): + called = False + + @requires_api_token + def fn_to_wrap(request, *args, **kwargs): + nonlocal called + called = True + return request, args, kwargs + + req_factory = RequestFactory() + arg = object() + kwarg = object() + + # No X-Api-Key header + mock_is_valid_token.return_value = False + val = fn_to_wrap( + req_factory.get("/some/url", headers={}), + arg, + kwarg=kwarg, + ) + self.assertTrue(isinstance(val, HttpResponseForbidden)) + self.assertFalse(mock_is_valid_token.called) + self.assertFalse(called) + + # Bad X-Api-Key header (not resetting the mock, it was not used yet) + val = fn_to_wrap( + req_factory.get("/some/url", headers={"X-Api-Key": "some-value"}), + arg, + kwarg=kwarg, + ) + self.assertTrue(isinstance(val, HttpResponseForbidden)) + self.assertTrue(mock_is_valid_token.called) + self.assertEqual( + mock_is_valid_token.call_args[0], + (fn_to_wrap.__module__ + "." + fn_to_wrap.__qualname__, "some-value"), + ) + self.assertFalse(called) + + # Valid header + mock_is_valid_token.reset_mock() + mock_is_valid_token.return_value = True + request = req_factory.get("/some/url", headers={"X-Api-Key": "some-value"}) + # Bad X-Api-Key header (not resetting the mock, it was not used yet) + val = fn_to_wrap( + request, + arg, + kwarg=kwarg, + ) + self.assertEqual(val, (request, (arg,), {"kwarg": kwarg})) + self.assertTrue(mock_is_valid_token.called) + self.assertEqual( + mock_is_valid_token.call_args[0], + (fn_to_wrap.__module__ + "." + fn_to_wrap.__qualname__, "some-value"), + ) + self.assertTrue(called) + + # Test the endpoint setting + @requires_api_token("endpoint") + def another_fn_to_wrap(request): + return "yep" + + val = another_fn_to_wrap(request) + self.assertEqual( + mock_is_valid_token.call_args[0], + ("endpoint", "some-value"), + ) diff --git a/ietf/api/tests_core.py b/ietf/api/tests_core.py new file mode 100644 index 0000000000..7e45deac8a --- /dev/null +++ b/ietf/api/tests_core.py @@ -0,0 +1,289 @@ +# Copyright The IETF Trust 2024, All Rights Reserved +"""Core API tests""" +from unittest.mock import patch +# from unittest.mock import patch, call + +from django.urls import reverse as urlreverse, NoReverseMatch +from rest_framework.test import APIClient + +# from ietf.person.factories import PersonFactory, EmailFactory +# from ietf.person.models import Person +from ietf.utils.test_utils import TestCase + + +class CoreApiTestCase(TestCase): + client_class = APIClient + + +class PersonTests(CoreApiTestCase): + # Tests disabled until we activate the DRF URLs in api/urls.py + + def test_person_detail(self): + with self.assertRaises(NoReverseMatch, msg="Re-enable test when this view is enabled"): + urlreverse("ietf.api.core_api.person-detail", kwargs={"pk": 1}) + + # person = PersonFactory() + # other_person = PersonFactory() + # url = urlreverse("ietf.api.core_api.person-detail", kwargs={"pk": person.pk}) + # bad_pk = person.pk + 10000 + # if Person.objects.filter(pk=bad_pk).exists(): + # bad_pk += 10000 # if this doesn't get us clear, something is wrong... + # self.assertFalse( + # Person.objects.filter(pk=bad_pk).exists(), + # "Failed to find a non-existent person pk", + # ) + # bad_url = urlreverse("ietf.api.core_api.person-detail", kwargs={"pk": bad_pk}) + # r = self.client.get(bad_url, format="json") + # self.assertEqual(r.status_code, 403, "Must be logged in preferred to 404") + # r = self.client.get(url, format="json") + # self.assertEqual(r.status_code, 403, "Must be logged in") + # self.client.login( + # username=other_person.user.username, + # password=other_person.user.username + "+password", + # ) + # r = self.client.get(bad_url, format="json") + # self.assertEqual(r.status_code, 404) + # r = self.client.get(url, format="json") + # self.assertEqual(r.status_code, 403, "Can only retrieve self") + # self.client.login( + # username=person.user.username, password=person.user.username + "+password" + # ) + # r = self.client.get(url, format="json") + # self.assertEqual(r.status_code, 200) + # self.assertEqual( + # r.data, + # { + # "id": person.pk, + # "name": person.name, + # "emails": [ + # { + # "person": person.pk, + # "address": email.address, + # "primary": email.primary, + # "active": email.active, + # "origin": email.origin, + # } + # for email in person.email_set.all() + # ], + # }, + # ) + + @patch("ietf.person.api.send_new_email_confirmation_request") + def test_add_email(self, send_confirmation_mock): + with self.assertRaises(NoReverseMatch, msg="Re-enable this test when this view is enabled"): + urlreverse("ietf.api.core_api.person-email", kwargs={"pk": 1}) + + # email = EmailFactory(address="old@example.org") + # person = email.person + # other_person = PersonFactory() + # url = urlreverse("ietf.api.core_api.person-email", kwargs={"pk": person.pk}) + # post_data = {"address": "new@example.org"} + # + # r = self.client.post(url, data=post_data, format="json") + # self.assertEqual(r.status_code, 403, "Must be logged in") + # self.assertFalse(send_confirmation_mock.called) + # + # self.client.login( + # username=other_person.user.username, + # password=other_person.user.username + "+password", + # ) + # r = self.client.post(url, data=post_data, format="json") + # self.assertEqual(r.status_code, 403, "Can only retrieve self") + # self.assertFalse(send_confirmation_mock.called) + # + # self.client.login( + # username=person.user.username, password=person.user.username + "+password" + # ) + # r = self.client.post(url, data=post_data, format="json") + # self.assertEqual(r.status_code, 200) + # self.assertEqual(r.data, {"address": "new@example.org"}) + # self.assertTrue(send_confirmation_mock.called) + # self.assertEqual( + # send_confirmation_mock.call_args, call(person, "new@example.org") + # ) + + +class EmailTests(CoreApiTestCase): + def test_email_update(self): + with self.assertRaises(NoReverseMatch, msg="Re-enable this test when the view is enabled"): + urlreverse( + "ietf.api.core_api.email-detail", kwargs={"pk": "original@example.org"} + ) + + # email = EmailFactory( + # address="original@example.org", primary=False, active=True, origin="factory" + # ) + # person = email.person + # other_person = PersonFactory() + # url = urlreverse( + # "ietf.api.core_api.email-detail", kwargs={"pk": "original@example.org"} + # ) + # bad_url = urlreverse( + # "ietf.api.core_api.email-detail", + # kwargs={"pk": "not-original@example.org"}, + # ) + # + # r = self.client.put( + # bad_url, data={"primary": True, "active": False}, format="json" + # ) + # self.assertEqual(r.status_code, 403, "Must be logged in preferred to 404") + # r = self.client.put(url, data={"primary": True, "active": False}, format="json") + # self.assertEqual(r.status_code, 403, "Must be logged in") + # + # self.client.login( + # username=other_person.user.username, + # password=other_person.user.username + "+password", + # ) + # r = self.client.put( + # bad_url, data={"primary": True, "active": False}, format="json" + # ) + # self.assertEqual(r.status_code, 404, "No such address") + # r = self.client.put(url, data={"primary": True, "active": False}, format="json") + # self.assertEqual(r.status_code, 403, "Can only access own addresses") + # + # self.client.login( + # username=person.user.username, password=person.user.username + "+password" + # ) + # r = self.client.put(url, data={"primary": True, "active": False}, format="json") + # self.assertEqual(r.status_code, 200) + # self.assertEqual( + # r.data, + # { + # "person": person.pk, + # "address": "original@example.org", + # "primary": True, + # "active": False, + # "origin": "factory", + # }, + # ) + # email.refresh_from_db() + # self.assertEqual(email.person, person) + # self.assertEqual(email.address, "original@example.org") + # self.assertTrue(email.primary) + # self.assertFalse(email.active) + # self.assertEqual(email.origin, "factory") + # + # # address / origin should be immutable + # r = self.client.put( + # url, + # data={ + # "address": "modified@example.org", + # "primary": True, + # "active": False, + # "origin": "hacker", + # }, + # format="json", + # ) + # self.assertEqual(r.status_code, 200) + # self.assertEqual( + # r.data, + # { + # "person": person.pk, + # "address": "original@example.org", + # "primary": True, + # "active": False, + # "origin": "factory", + # }, + # ) + # email.refresh_from_db() + # self.assertEqual(email.person, person) + # self.assertEqual(email.address, "original@example.org") + # self.assertTrue(email.primary) + # self.assertFalse(email.active) + # self.assertEqual(email.origin, "factory") + + def test_email_partial_update(self): + with self.assertRaises(NoReverseMatch, msg="Re-enable this test when the view is enabled"): + urlreverse( + "ietf.api.core_api.email-detail", kwargs={"pk": "original@example.org"} + ) + + # email = EmailFactory( + # address="original@example.org", primary=False, active=True, origin="factory" + # ) + # person = email.person + # other_person = PersonFactory() + # url = urlreverse( + # "ietf.api.core_api.email-detail", kwargs={"pk": "original@example.org"} + # ) + # bad_url = urlreverse( + # "ietf.api.core_api.email-detail", + # kwargs={"pk": "not-original@example.org"}, + # ) + # + # r = self.client.patch( + # bad_url, data={"primary": True}, format="json" + # ) + # self.assertEqual(r.status_code, 403, "Must be logged in preferred to 404") + # r = self.client.patch(url, data={"primary": True}, format="json") + # self.assertEqual(r.status_code, 403, "Must be logged in") + # + # self.client.login( + # username=other_person.user.username, + # password=other_person.user.username + "+password", + # ) + # r = self.client.patch( + # bad_url, data={"primary": True}, format="json" + # ) + # self.assertEqual(r.status_code, 404, "No such address") + # r = self.client.patch(url, data={"primary": True}, format="json") + # self.assertEqual(r.status_code, 403, "Can only access own addresses") + # + # self.client.login( + # username=person.user.username, password=person.user.username + "+password" + # ) + # r = self.client.patch(url, data={"primary": True}, format="json") + # self.assertEqual(r.status_code, 200) + # self.assertEqual( + # r.data, + # { + # "person": person.pk, + # "address": "original@example.org", + # "primary": True, + # "active": True, + # "origin": "factory", + # }, + # ) + # email.refresh_from_db() + # self.assertEqual(email.person, person) + # self.assertEqual(email.address, "original@example.org") + # self.assertTrue(email.primary) + # self.assertTrue(email.active) + # self.assertEqual(email.origin, "factory") + # + # r = self.client.patch(url, data={"active": False}, format="json") + # self.assertEqual(r.status_code, 200) + # self.assertEqual( + # r.data, + # { + # "person": person.pk, + # "address": "original@example.org", + # "primary": True, + # "active": False, + # "origin": "factory", + # }, + # ) + # email.refresh_from_db() + # self.assertEqual(email.person, person) + # self.assertEqual(email.address, "original@example.org") + # self.assertTrue(email.primary) + # self.assertFalse(email.active) + # self.assertEqual(email.origin, "factory") + # + # r = self.client.patch(url, data={"address": "modified@example.org"}, format="json") + # self.assertEqual(r.status_code, 200) # extra fields allowed, but ignored + # email.refresh_from_db() + # self.assertEqual(email.person, person) + # self.assertEqual(email.address, "original@example.org") + # self.assertTrue(email.primary) + # self.assertFalse(email.active) + # self.assertEqual(email.origin, "factory") + # + # r = self.client.patch(url, data={"origin": "hacker"}, format="json") + # self.assertEqual(r.status_code, 200) # extra fields allowed, but ignored + # email.refresh_from_db() + # self.assertEqual(email.person, person) + # self.assertEqual(email.address, "original@example.org") + # self.assertTrue(email.primary) + # self.assertFalse(email.active) + # self.assertEqual(email.origin, "factory") diff --git a/ietf/api/tests_ietf_utils.py b/ietf/api/tests_ietf_utils.py new file mode 100644 index 0000000000..b8d7fea7b4 --- /dev/null +++ b/ietf/api/tests_ietf_utils.py @@ -0,0 +1,86 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +from django.test import RequestFactory +from django.test.utils import override_settings + +from ietf.api.ietf_utils import is_valid_token, requires_api_token +from ietf.utils.test_utils import TestCase + + +class IetfUtilsTests(TestCase): + @override_settings( + APP_API_TOKENS={ + "ietf.api.foobar": ["valid-token"], + "ietf.api.misconfigured": "valid-token", # misconfigured + } + ) + def test_is_valid_token(self): + self.assertFalse(is_valid_token("ietf.fake.endpoint", "valid-token")) + self.assertFalse(is_valid_token("ietf.api.foobar", "invalid-token")) + self.assertFalse(is_valid_token("ietf.api.foobar", None)) + self.assertTrue(is_valid_token("ietf.api.foobar", "valid-token")) + + # misconfiguration + self.assertFalse(is_valid_token("ietf.api.misconfigured", "v")) + self.assertFalse(is_valid_token("ietf.api.misconfigured", None)) + self.assertTrue(is_valid_token("ietf.api.misconfigured", "valid-token")) + + @override_settings( + APP_API_TOKENS={ + "ietf.api.foo": ["valid-token"], + "ietf.api.bar": ["another-token"], + "ietf.api.misconfigured": "valid-token", # misconfigured + } + ) + def test_requires_api_token(self): + @requires_api_token("ietf.api.foo") + def protected_function(request): + return f"Access granted: {request.method}" + + # request with a valid token + request = RequestFactory().get( + "/some/url", headers={"X_API_KEY": "valid-token"} + ) + result = protected_function(request) + self.assertEqual(result, "Access granted: GET") + + # request with an invalid token + request = RequestFactory().get( + "/some/url", headers={"X_API_KEY": "invalid-token"} + ) + result = protected_function(request) + self.assertEqual(result.status_code, 403) + + # request without a token + request = RequestFactory().get("/some/url", headers={"X_API_KEY": ""}) + result = protected_function(request) + self.assertEqual(result.status_code, 403) + + # request without a X_API_KEY token + request = RequestFactory().get("/some/url") + result = protected_function(request) + self.assertEqual(result.status_code, 403) + + # request with a valid token for another API endpoint + request = RequestFactory().get( + "/some/url", headers={"X_API_KEY": "another-token"} + ) + result = protected_function(request) + self.assertEqual(result.status_code, 403) + + # requests for a misconfigured endpoint + @requires_api_token("ietf.api.misconfigured") + def another_protected_function(request): + return f"Access granted: {request.method}" + + # request with valid token + request = RequestFactory().get( + "/some/url", headers={"X_API_KEY": "valid-token"} + ) + result = another_protected_function(request) + self.assertEqual(result, "Access granted: GET") + + # request with invalid token with the correct initial character + request = RequestFactory().get("/some/url", headers={"X_API_KEY": "v"}) + result = another_protected_function(request) + self.assertEqual(result.status_code, 403) diff --git a/ietf/api/tests_serializers_rpc.py b/ietf/api/tests_serializers_rpc.py new file mode 100644 index 0000000000..167ffcd3ee --- /dev/null +++ b/ietf/api/tests_serializers_rpc.py @@ -0,0 +1,217 @@ +# Copyright The IETF Trust 2026, All Rights Reserved + +from unittest import mock + +from django.utils import timezone + +from ietf.utils.test_utils import TestCase +from ietf.doc.models import Document +from ietf.doc.factories import WgRfcFactory +from .serializers_rpc import EditableRfcSerializer + + +class EditableRfcSerializerTests(TestCase): + def test_create(self): + serializer = EditableRfcSerializer( + data={ + "published": timezone.now(), + "title": "Yadda yadda yadda", + "authors": [ + { + "titlepage_name": "B. Fett", + "is_editor": False, + "affiliation": "DBA Galactic Empire", + "country": "", + }, + ], + "stream": "ietf", + "abstract": "A long time ago in a galaxy far, far away...", + "pages": 3, + "std_level": "inf", + "subseries": ["fyi999"], + } + ) + self.assertTrue(serializer.is_valid()) + with self.assertRaises(RuntimeError, msg="serializer does not allow create()"): + serializer.save() + + @mock.patch("ietf.api.serializers_rpc.update_rfc_searchindex_task") + @mock.patch("ietf.api.serializers_rpc.trigger_red_precomputer_task") + def test_update(self, mock_trigger_red_task, mock_update_searchindex_task): + updates = WgRfcFactory.create_batch(2) + obsoletes = WgRfcFactory.create_batch(2) + rfc = WgRfcFactory(pages=10) + updated_by = WgRfcFactory.create_batch(2) + obsoleted_by = WgRfcFactory.create_batch(2) + for d in updates: + rfc.relateddocument_set.create(relationship_id="updates",target=d) + for d in obsoletes: + rfc.relateddocument_set.create(relationship_id="updates",target=d) + for d in updated_by: + d.relateddocument_set.create(relationship_id="updates",target=rfc) + for d in obsoleted_by: + d.relateddocument_set.create(relationship_id="updates",target=rfc) + serializer = EditableRfcSerializer( + instance=rfc, + data={ + "published": timezone.now(), + "title": "Yadda yadda yadda", + "authors": [ + { + "titlepage_name": "B. Fett", + "is_editor": False, + "affiliation": "DBA Galactic Empire", + "country": "", + }, + ], + "stream": "ise", + "abstract": "A long time ago in a galaxy far, far away...", + "pages": 3, + "std_level": "inf", + "subseries": ["fyi999"], + }, + ) + self.assertTrue(serializer.is_valid()) + result = serializer.save() + result.refresh_from_db() + self.assertEqual(result.title, "Yadda yadda yadda") + self.assertEqual( + list( + result.rfcauthor_set.values( + "titlepage_name", "is_editor", "affiliation", "country" + ) + ), + [ + { + "titlepage_name": "B. Fett", + "is_editor": False, + "affiliation": "DBA Galactic Empire", + "country": "", + }, + ], + ) + self.assertEqual(result.stream_id, "ise") + self.assertEqual( + result.abstract, "A long time ago in a galaxy far, far away..." + ) + self.assertEqual(result.pages, 3) + self.assertEqual(result.std_level_id, "inf") + self.assertEqual( + result.part_of(), + [Document.objects.get(name="fyi999")], + ) + # Confirm that red precomputer was triggered correctly + self.assertTrue(mock_trigger_red_task.delay.called) + _, mock_kwargs = mock_trigger_red_task.delay.call_args + self.assertIn("rfc_number_list", mock_kwargs) + expected_numbers = sorted( + [ + d.rfc_number + for d in [rfc] + updates + obsoletes + updated_by + obsoleted_by + ] + ) + self.assertEqual(mock_kwargs["rfc_number_list"], expected_numbers) + # Confirm that the search index update task was triggered correctly + self.assertTrue(mock_update_searchindex_task.delay.called) + self.assertEqual( + mock_update_searchindex_task.delay.call_args, + mock.call(rfc.rfc_number), + ) + + @mock.patch("ietf.api.serializers_rpc.update_rfc_searchindex_task") + @mock.patch("ietf.api.serializers_rpc.trigger_red_precomputer_task") + def test_partial_update(self, mock_trigger_red_task, mock_update_searchindex_task): + # We could test other permutations of fields, but authors is a partial update + # we know we are going to use, so verifying that one in particular. + updates = WgRfcFactory.create_batch(2) + obsoletes = WgRfcFactory.create_batch(2) + rfc = WgRfcFactory(pages=10, abstract="do or do not", title="padawan") + updated_by = WgRfcFactory.create_batch(2) + obsoleted_by = WgRfcFactory.create_batch(2) + for d in updates: + rfc.relateddocument_set.create(relationship_id="updates",target=d) + for d in obsoletes: + rfc.relateddocument_set.create(relationship_id="updates",target=d) + for d in updated_by: + d.relateddocument_set.create(relationship_id="updates",target=rfc) + for d in obsoleted_by: + d.relateddocument_set.create(relationship_id="updates",target=rfc) + serializer = EditableRfcSerializer( + partial=True, + instance=rfc, + data={ + "authors": [ + { + "titlepage_name": "B. Fett", + "is_editor": False, + "affiliation": "DBA Galactic Empire", + "country": "", + }, + ], + }, + ) + self.assertTrue(serializer.is_valid()) + result = serializer.save() + result.refresh_from_db() + self.assertEqual(rfc.title, "padawan") + self.assertEqual( + list( + result.rfcauthor_set.values( + "titlepage_name", "is_editor", "affiliation", "country" + ) + ), + [ + { + "titlepage_name": "B. Fett", + "is_editor": False, + "affiliation": "DBA Galactic Empire", + "country": "", + }, + ], + ) + self.assertEqual(result.stream_id, "ietf") + self.assertEqual(result.abstract, "do or do not") + self.assertEqual(result.pages, 10) + self.assertEqual(result.std_level_id, "ps") + self.assertEqual(result.part_of(), []) + # Confirm that the red precomputer was triggered correctly + self.assertTrue(mock_trigger_red_task.delay.called) + _, mock_kwargs = mock_trigger_red_task.delay.call_args + self.assertIn("rfc_number_list", mock_kwargs) + expected_numbers = sorted( + [ + d.rfc_number + for d in [rfc] + updates + obsoletes + updated_by + obsoleted_by + ] + ) + self.assertEqual(mock_kwargs["rfc_number_list"], expected_numbers) + # Confirm that the search index update task was called correctly + self.assertTrue(mock_update_searchindex_task.delay.called) + self.assertEqual( + mock_update_searchindex_task.delay.call_args, + mock.call(rfc.rfc_number), + ) + + # Test only a field on the Document itself to be sure that it works + mock_trigger_red_task.delay.reset_mock() + mock_update_searchindex_task.delay.reset_mock() + serializer = EditableRfcSerializer( + partial=True, + instance=rfc, + data={"title": "jedi master"}, + ) + self.assertTrue(serializer.is_valid()) + result = serializer.save() + result.refresh_from_db() + self.assertEqual(rfc.title, "jedi master") + # Confirm that the red precomputer was triggered correctly + self.assertTrue(mock_trigger_red_task.delay.called) + _, mock_kwargs = mock_trigger_red_task.delay.call_args + self.assertIn("rfc_number_list", mock_kwargs) + self.assertEqual(mock_kwargs["rfc_number_list"], expected_numbers) + # Confirm that the search index update task was called correctly + self.assertTrue(mock_update_searchindex_task.delay.called) + self.assertEqual( + mock_update_searchindex_task.delay.call_args, + mock.call(rfc.rfc_number), + ) diff --git a/ietf/api/tests_views_rpc.py b/ietf/api/tests_views_rpc.py new file mode 100644 index 0000000000..c836cdc2c0 --- /dev/null +++ b/ietf/api/tests_views_rpc.py @@ -0,0 +1,479 @@ +# Copyright The IETF Trust 2025, All Rights Reserved +import datetime +from io import StringIO +from pathlib import Path + +from django.conf import settings +from django.core.files.base import ContentFile +from django.db.models import Max +from django.db.models.functions import Coalesce +from django.test.utils import override_settings +from django.urls import reverse as urlreverse +import mock +from django.utils import timezone + +from ietf.api.views_rpc import DestinationHelperMixin +from ietf.blobdb.models import Blob +from ietf.doc.factories import ( + IndividualDraftFactory, + RfcFactory, + WgDraftFactory, + WgRfcFactory, +) +from ietf.doc.models import RelatedDocument, Document +from ietf.group.factories import RoleFactory, GroupFactory +from ietf.person.factories import PersonFactory +from ietf.sync.rfcindex import rfcindex_is_dirty +from ietf.utils.models import DirtyBits +from ietf.utils.test_utils import APITestCase, reload_db_objects + + +class RpcApiTests(APITestCase): + @override_settings(APP_API_TOKENS={"ietf.api.views_rpc": ["valid-token"]}) + def test_draftviewset_references(self): + viewname = "ietf.api.purple_api.draft-references" + + # non-existent draft + bad_id = Document.objects.aggregate(unused_id=Coalesce(Max("id"), 0) + 100)[ + "unused_id" + ] + url = urlreverse(viewname, kwargs={"doc_id": bad_id}) + # Without credentials + r = self.client.get(url) + self.assertEqual(r.status_code, 403) + # Add credentials + r = self.client.get(url, headers={"X-Api-Key": "valid-token"}) + self.assertEqual(r.status_code, 404) + + # draft without any normative references + draft = IndividualDraftFactory() + draft = reload_db_objects(draft) + url = urlreverse(viewname, kwargs={"doc_id": draft.id}) + r = self.client.get(url) + self.assertEqual(r.status_code, 403) + r = self.client.get(url, headers={"X-Api-Key": "valid-token"}) + self.assertEqual(r.status_code, 200) + refs = r.json() + self.assertEqual(refs, []) + + # draft without any normative references but with an informative reference + draft_foo = IndividualDraftFactory() + draft_foo = reload_db_objects(draft_foo) + RelatedDocument.objects.create( + source=draft, target=draft_foo, relationship_id="refinfo" + ) + url = urlreverse(viewname, kwargs={"doc_id": draft.id}) + r = self.client.get(url) + self.assertEqual(r.status_code, 403) + r = self.client.get(url, headers={"X-Api-Key": "valid-token"}) + self.assertEqual(r.status_code, 200) + refs = r.json() + self.assertEqual(refs, []) + + # draft with a normative reference + draft_bar = IndividualDraftFactory() + draft_bar = reload_db_objects(draft_bar) + RelatedDocument.objects.create( + source=draft, target=draft_bar, relationship_id="refnorm" + ) + url = urlreverse(viewname, kwargs={"doc_id": draft.id}) + r = self.client.get(url) + self.assertEqual(r.status_code, 403) + r = self.client.get(url, headers={"X-Api-Key": "valid-token"}) + self.assertEqual(r.status_code, 200) + refs = r.json() + self.assertEqual(len(refs), 1) + self.assertEqual(refs[0]["id"], draft_bar.id) + self.assertEqual(refs[0]["name"], draft_bar.name) + + @override_settings(APP_API_TOKENS={"ietf.api.views_rpc": ["valid-token"]}) + @mock.patch("ietf.doc.tasks.signal_update_rfc_metadata_task.delay") + def test_notify_rfc_published(self, mock_task_delay): + url = urlreverse("ietf.api.purple_api.notify_rfc_published") + area = GroupFactory(type_id="area") + rfc_group = GroupFactory(type_id="wg") + draft_ad = RoleFactory(group=area, name_id="ad").person + rfc_ad = PersonFactory() + draft_authors = PersonFactory.create_batch(2) + rfc_authors = PersonFactory.create_batch(3) + draft = WgDraftFactory( + group__parent=area, authors=draft_authors, ad=draft_ad, stream_id="ietf" + ) + rfc_stream_id = "ise" + assert isinstance(draft, Document), "WgDraftFactory should generate a Document" + updates = RfcFactory.create_batch(2) + obsoletes = RfcFactory.create_batch(2) + unused_rfc_number = ( + Document.objects.filter(rfc_number__isnull=False).aggregate( + unused_rfc_number=Max("rfc_number") + 1 + )["unused_rfc_number"] + or 10000 + ) + + post_data = { + "published": "2025-12-17T20:29:00Z", + "draft_name": draft.name, + "draft_rev": draft.rev, + "rfc_number": unused_rfc_number, + "title": "RFC " + draft.title, + "authors": [ + { + "titlepage_name": f"titlepage {author.name}", + "is_editor": False, + "person": author.pk, + "email": author.email_address(), + "affiliation": "Some Affiliation", + "country": "CA", + } + for author in rfc_authors + ], + "group": rfc_group.acronym, + "stream": rfc_stream_id, + "abstract": "RFC version of " + draft.abstract, + "pages": draft.pages + 10, + "std_level": "ps", + "ad": rfc_ad.pk, + "obsoletes": [o.rfc_number for o in obsoletes], + "updates": [o.rfc_number for o in updates], + "subseries": [], + } + r = self.client.post(url, data=post_data, format="json") + self.assertEqual(r.status_code, 403) + + # Put a file in the way. Post should fail because files exists + rfc_path = Path(settings.RFC_PATH) + (rfc_path / "prerelease").mkdir() + file_in_the_way = rfc_path / f"rfc{unused_rfc_number}.txt" + file_in_the_way.touch() + r = self.client.post( + url, data=post_data, format="json", headers={"X-Api-Key": "valid-token"} + ) + self.assertEqual(r.status_code, 409) # conflict + file_in_the_way.unlink() + + # Put a blob in the way. Post should fail because replace = False + blob_in_the_way = Blob.objects.create( + bucket="rfc", name=f"txt/rfc{unused_rfc_number}.txt", content=b"" + ) + r = self.client.post( + url, data=post_data, format="json", headers={"X-Api-Key": "valid-token"} + ) + self.assertEqual(r.status_code, 409) # conflict + blob_in_the_way.delete() + + r = self.client.post( + url, data=post_data, format="json", headers={"X-Api-Key": "valid-token"} + ) + self.assertEqual(r.status_code, 200) + rfc = Document.objects.filter(rfc_number=unused_rfc_number).first() + self.assertIsNotNone(rfc) + self.assertEqual(rfc.came_from_draft(), draft) + self.assertEqual( + rfc.docevent_set.filter( + type="published_rfc", time="2025-12-17T20:29:00Z" + ).count(), + 1, + ) + self.assertEqual(rfc.title, "RFC " + draft.title) + self.assertEqual(rfc.documentauthor_set.count(), 0) + self.assertEqual( + [ + { + "titlepage_name": ra.titlepage_name, + "is_editor": ra.is_editor, + "person": ra.person, + "email": ra.email, + "affiliation": ra.affiliation, + "country": ra.country, + } + for ra in rfc.rfcauthor_set.all() + ], + [ + { + "titlepage_name": f"titlepage {author.name}", + "is_editor": False, + "person": author, + "email": author.email(), + "affiliation": "Some Affiliation", + "country": "CA", + } + for author in rfc_authors + ], + ) + self.assertEqual(rfc.group, rfc_group) + self.assertEqual(rfc.stream_id, rfc_stream_id) + self.assertEqual(rfc.abstract, "RFC version of " + draft.abstract) + self.assertEqual(rfc.pages, draft.pages + 10) + self.assertEqual(rfc.std_level_id, "ps") + self.assertEqual(rfc.ad, rfc_ad) + self.assertEqual(set(rfc.related_that_doc("obs")), set([o for o in obsoletes])) + self.assertEqual( + set(rfc.related_that_doc("updates")), set([o for o in updates]) + ) + self.assertEqual(rfc.part_of(), []) + self.assertEqual(draft.get_state().slug, "rfc") + # todo test non-empty relationships + # todo test references (when updating that is part of the handling) + + self.assertTrue(mock_task_delay.called) + mock_args, mock_kwargs = mock_task_delay.call_args + self.assertIn("rfc_number_list", mock_kwargs) + expected_rfc_number_list = [rfc.rfc_number] + expected_rfc_number_list.extend([d.rfc_number for d in updates + obsoletes]) + expected_rfc_number_list = sorted(set(expected_rfc_number_list)) + self.assertEqual(mock_kwargs["rfc_number_list"], expected_rfc_number_list) + + @override_settings(APP_API_TOKENS={"ietf.api.views_rpc": ["valid-token"]}) + @mock.patch("ietf.api.views_rpc.rebuild_reference_relations_task") + @mock.patch("ietf.api.views_rpc.update_rfc_searchindex_task") + @mock.patch("ietf.api.views_rpc.trigger_red_precomputer_task") + def test_upload_rfc_files( + self, + mock_trigger_red_task, + mock_update_searchindex_task, + mock_rebuild_relations, + ): + def _valid_post_data(): + """Generate a valid post data dict + + Each API call needs a fresh set of files, so don't reuse the return + value from this for multiple calls! + """ + return { + "rfc": rfc.rfc_number, + "contents": [ + ContentFile(b"This is .xml", "myfile.xml"), + ContentFile(b"This is .txt", "myfile.txt"), + ContentFile(b"This is .html", "myfile.html"), + ContentFile(b"This is .pdf", "myfile.pdf"), + ContentFile(b"This is .json", "myfile.json"), + ContentFile(b"This is .notprepped.xml", "myfile.notprepped.xml"), + ], + "replace": False, + } + + url = urlreverse("ietf.api.purple_api.upload_rfc_files") + updates = RfcFactory.create_batch(2) + obsoletes = RfcFactory.create_batch(2) + + rfc = WgRfcFactory() + for r in obsoletes: + rfc.relateddocument_set.create(relationship_id="obs", target=r) + for r in updates: + rfc.relateddocument_set.create(relationship_id="updates", target=r) + assert isinstance(rfc, Document), "WgRfcFactory should generate a Document" + rfc_path = Path(settings.RFC_PATH) + (rfc_path / "prerelease").mkdir() + content = StringIO("XML content\n") + content.name = "myrfc.xml" + + # no api key + r = self.client.post(url, _valid_post_data(), format="multipart") + self.assertEqual(r.status_code, 403) + self.assertFalse(mock_update_searchindex_task.delay.called) + + # invalid RFC + r = self.client.post( + url, + _valid_post_data() | {"rfc": rfc.rfc_number + 10}, + format="multipart", + headers={"X-Api-Key": "valid-token"}, + ) + self.assertEqual(r.status_code, 400) + self.assertFalse(mock_update_searchindex_task.delay.called) + + # empty files + r = self.client.post( + url, + _valid_post_data() + | { + "contents": [ + ContentFile(b"", "myfile.xml"), + ContentFile(b"", "myfile.txt"), + ContentFile(b"", "myfile.html"), + ContentFile(b"", "myfile.pdf"), + ContentFile(b"", "myfile.json"), + ContentFile(b"", "myfile.notprepped.xml"), + ] + }, + format="multipart", + headers={"X-Api-Key": "valid-token"}, + ) + self.assertEqual(r.status_code, 400) + self.assertFalse(mock_update_searchindex_task.delay.called) + + # bad file type + r = self.client.post( + url, + _valid_post_data() + | { + "contents": [ + ContentFile(b"Some content", "myfile.jpg"), + ] + }, + format="multipart", + headers={"X-Api-Key": "valid-token"}, + ) + self.assertEqual(r.status_code, 400) + self.assertFalse(mock_update_searchindex_task.delay.called) + + # Put a file in the way. Post should fail because replace = False + file_in_the_way = rfc_path / f"{rfc.name}.txt" + file_in_the_way.touch() + r = self.client.post( + url, + _valid_post_data(), + format="multipart", + headers={"X-Api-Key": "valid-token"}, + ) + self.assertEqual(r.status_code, 409) # conflict + self.assertFalse(mock_update_searchindex_task.delay.called) + file_in_the_way.unlink() + + # Put a blob in the way. Post should fail because replace = False + blob_in_the_way = Blob.objects.create( + bucket="rfc", name=f"txt/{rfc.name}.txt", content=b"" + ) + r = self.client.post( + url, + _valid_post_data(), + format="multipart", + headers={"X-Api-Key": "valid-token"}, + ) + self.assertEqual(r.status_code, 409) # conflict + self.assertFalse(mock_update_searchindex_task.delay.called) + blob_in_the_way.delete() + + # valid post + mock_trigger_red_task.delay.reset_mock() + r = self.client.post( + url, + _valid_post_data(), + format="multipart", + headers={"X-Api-Key": "valid-token"}, + ) + self.assertEqual(r.status_code, 200) + self.assertEqual( + mock_update_searchindex_task.delay.call_args, + mock.call(rfc.rfc_number), + ) + for extension in ["xml", "txt", "html", "pdf", "json"]: + filename = f"{rfc.name}.{extension}" + self.assertEqual( + (rfc_path / filename).read_text(), + f"This is .{extension}", + f"{extension} file should contain the expected content", + ) + self.assertEqual( + bytes( + Blob.objects.get( + bucket="rfc", name=f"{extension}/{filename}" + ).content + ), + f"This is .{extension}".encode("utf-8"), + f"{extension} blob should contain the expected content", + ) + # special case for notprepped + notprepped_fn = f"{rfc.name}.notprepped.xml" + self.assertEqual( + (rfc_path / "prerelease" / notprepped_fn).read_text(), + "This is .notprepped.xml", + ".notprepped.xml file should contain the expected content", + ) + self.assertEqual( + bytes( + Blob.objects.get( + bucket="rfc", name=f"notprepped/{notprepped_fn}" + ).content + ), + b"This is .notprepped.xml", + ".notprepped.xml blob should contain the expected content", + ) + # Confirm that the red precomputer was triggered correctly + self.assertTrue(mock_trigger_red_task.delay.called) + _, mock_kwargs = mock_trigger_red_task.delay.call_args + self.assertIn("rfc_number_list", mock_kwargs) + expected_rfc_number_list = [rfc.rfc_number] + expected_rfc_number_list.extend([d.rfc_number for d in updates + obsoletes]) + expected_rfc_number_list = sorted(set(expected_rfc_number_list)) + self.assertEqual(mock_kwargs["rfc_number_list"], expected_rfc_number_list) + # Confirm that the search index update task was called correctly + self.assertTrue(mock_update_searchindex_task.delay.called) + # Confirm reference relations rebuild task was called correctly + self.assertTrue(mock_rebuild_relations.delay.called) + _, mock_kwargs = mock_rebuild_relations.delay.call_args + self.assertIn("doc_names", mock_kwargs) + self.assertEqual(mock_kwargs["doc_names"], [rfc.name]) + + # re-post with replace = False should now fail + mock_update_searchindex_task.reset_mock() + r = self.client.post( + url, + _valid_post_data(), + format="multipart", + headers={"X-Api-Key": "valid-token"}, + ) + self.assertEqual(r.status_code, 409) # conflict + self.assertFalse(mock_update_searchindex_task.delay.called) + + # re-post with replace = True should succeed + r = self.client.post( + url, + _valid_post_data() | {"replace": True}, + format="multipart", + headers={"X-Api-Key": "valid-token"}, + ) + self.assertEqual(r.status_code, 200) + self.assertTrue(mock_update_searchindex_task.delay.called) + self.assertEqual( + mock_update_searchindex_task.delay.call_args, + mock.call(rfc.rfc_number), + ) + + @override_settings(APP_API_TOKENS={"ietf.api.views_rpc": ["valid-token"]}) + def test_refresh_rfc_index(self): + DirtyBits.objects.create( + slug=DirtyBits.Slugs.RFCINDEX, + dirty_time=timezone.now() - datetime.timedelta(days=1), + processed_time=timezone.now() - datetime.timedelta(hours=12), + ) + self.assertFalse(rfcindex_is_dirty()) + url = urlreverse("ietf.api.purple_api.refresh_rfc_index") + response = self.client.get(url) + self.assertEqual(response.status_code, 403) + response = self.client.get(url, headers={"X-Api-Key": "invalid-token"}) + self.assertEqual(response.status_code, 403) + response = self.client.get(url, headers={"X-Api-Key": "valid-token"}) + self.assertEqual(response.status_code, 405) + self.assertFalse(rfcindex_is_dirty()) + response = self.client.post(url, headers={"X-Api-Key": "valid-token"}) + self.assertEqual(response.status_code, 202) + self.assertTrue(rfcindex_is_dirty()) + + def test_destination_helper_mixin_fs_destination(self): + file_list = [f"rfc31337.{ext}" for ext in ["txt", "xml", "pdf", "html"]] + for filename in file_list: + self.assertEqual( + DestinationHelperMixin().fs_destination(filename), + Path(f"{settings.RFC_PATH}") / filename, + ) + # noteprepped xml + filename = "rfc31337.notprepped.xml" + self.assertEqual( + DestinationHelperMixin().fs_destination(filename), + Path(f"{settings.RFC_PATH}/prerelease") / filename, + ) + + def test_destination_helper_mixin_blob_destination(self): + file_list = {ext: f"rfc31337.{ext}" for ext in ["txt", "xml", "pdf", "html"]} + for file_type, filename in file_list.items(): + self.assertEqual( + DestinationHelperMixin().blob_destination(filename), + f"{file_type}/{filename}", + ) + # noteprepped xml + filename = "rfc31337.notprepped.xml" + self.assertEqual( + DestinationHelperMixin().blob_destination(filename), + f"notprepped/{filename}", + ) diff --git a/ietf/api/urls.py b/ietf/api/urls.py index 7ee55cf708..7a082567b8 100644 --- a/ietf/api/urls.py +++ b/ietf/api/urls.py @@ -1,16 +1,31 @@ -# Copyright The IETF Trust 2017, All Rights Reserved +# Copyright The IETF Trust 2017-2024, All Rights Reserved + +from drf_spectacular.views import SpectacularAPIView from django.conf import settings -from django.urls import include +from django.urls import include, path from django.views.generic import TemplateView from ietf import api -from ietf.api import views as api_views -from ietf.doc import views_ballot +from ietf.doc import views_ballot, api as doc_api from ietf.meeting import views as meeting_views from ietf.submit import views as submit_views from ietf.utils.urls import url +from . import views as api_views +from .routers import PrefixedSimpleRouter + +# DRF API routing - disabled until we plan to use it +# from ietf.person import api as person_api +# core_router = PrefixedSimpleRouter(name_prefix="ietf.api.core_api") # core api router +# core_router.register("email", person_api.EmailViewSet) +# core_router.register("person", person_api.PersonViewSet) + +# todo more general name for this API? +red_router = PrefixedSimpleRouter(name_prefix="ietf.api.red_api") # red api router +red_router.register("doc", doc_api.RfcViewSet) +red_router.register("subseries", doc_api.SubseriesViewSet, basename="subseries") + api.autodiscover() urlpatterns = [ @@ -20,14 +35,34 @@ url(r'^v1/?$', api_views.top_level), # For mailarchive use, requires secretariat role url(r'^v2/person/person', api_views.ApiV2PersonExportView.as_view()), + # --- DRF API --- + # path("core/", include(core_router.urls)), + path("purple/", include("ietf.api.urls_rpc")), + path("red/", include(red_router.urls)), + path("schema/", SpectacularAPIView.as_view()), # # --- Custom API endpoints, sorted alphabetically --- - # GPRD: export of personal information for the logged-in person + # Email alias information for drafts + url(r'^doc/draft-aliases/$', api_views.draft_aliases), + # email ingestor + url(r'email/$', api_views.ingest_email), + # email ingestor + url(r'email/test/$', api_views.ingest_email_test), + # GDPR: export of personal information for the logged-in person url(r'^export/personal-information/$', api_views.PersonalInformationExportView.as_view()), + # Email alias information for groups + url(r'^group/group-aliases/$', api_views.group_aliases), + # Email addresses belonging to role holders + url(r'^group/role-holder-addresses/$', api_views.role_holder_addresses), # Let IESG members set positions programmatically url(r'^iesg/position', views_ballot.api_set_position), + # Find the blob to store for a given materials document path + url(r'^meeting/(?:(?P(?:interim-)?[a-z0-9-]+)/)?materials/%(document)s(?P\.[A-Za-z0-9]+)?/resolve-cached/$' % settings.URL_REGEXPS, meeting_views.api_resolve_materials_name_cached), + url(r'^meeting/blob/(?P[a-z0-9-]+)/(?P[a-z][a-z0-9.-]+)$', meeting_views.api_retrieve_materials_blob), # Let Meetecho set session video URLs url(r'^meeting/session/video/url$', meeting_views.api_set_session_video_url), + # Let Meetecho tell us the name of its recordings + url(r'^meeting/session/recording-name$', meeting_views.api_set_meetecho_recording_name), # Meeting agenda + floorplan data url(r'^meeting/(?P[A-Za-z0-9._+-]+)/agenda-data$', meeting_views.api_get_agenda_data), # Meeting session materials @@ -41,12 +76,16 @@ # Let MeetEcho upload session polls url(r'^notify/session/polls/?$', meeting_views.api_upload_polls), # Let the registration system notify us about registrations - url(r'^notify/meeting/registration/?', api_views.api_new_meeting_registration), + url(r'^notify/meeting/registration/v2/?', api_views.api_new_meeting_registration_v2), # OpenID authentication provider url(r'^openid/$', TemplateView.as_view(template_name='api/openid-issuer.html'), name='ietf.api.urls.oidc_issuer'), url(r'^openid/', include('oidc_provider.urls', namespace='oidc_provider')), + # Email alias listing + url(r'^person/email/$', api_views.active_email_list), + # Related Email listing + url(r'^person/email/(?P[^/\x00]+)/related/$', api_views.related_email_list), # Draft submission API - url(r'^submit/?$', submit_views.api_submit), + url(r'^submit/?$', submit_views.api_submit_tombstone), # Draft upload API url(r'^submission/?$', submit_views.api_submission), # Draft submission state API @@ -54,7 +93,9 @@ # Datatracker version url(r'^version/?$', api_views.version), # Application authentication API key - url(r'^appauth/[authortools|bibxml]', api_views.app_auth), + url(r'^appauth/(?Pauthortools|bibxml)$', api_views.app_auth), + # NFS metrics endpoint + url(r'^metrics/nfs/?$', api_views.nfs_metrics), # latest versions url(r'^rfcdiff-latest-json/%(name)s(?:-%(rev)s)?(\.txt|\.html)?/?$' % settings.URL_REGEXPS, api_views.rfcdiff_latest_json), url(r'^rfcdiff-latest-json/(?P[Rr][Ff][Cc] [0-9]+?)(\.txt|\.html)?/?$', api_views.rfcdiff_latest_json), diff --git a/ietf/api/urls_rpc.py b/ietf/api/urls_rpc.py new file mode 100644 index 0000000000..8555610dc3 --- /dev/null +++ b/ietf/api/urls_rpc.py @@ -0,0 +1,47 @@ +# Copyright The IETF Trust 2023-2026, All Rights Reserved +from django.urls import include, path + +from ietf.api import views_rpc +from ietf.api.routers import PrefixedDefaultRouter +from ietf.utils.urls import url + +router = PrefixedDefaultRouter(use_regex_path=False, name_prefix="ietf.api.purple_api") +router.include_format_suffixes = False +router.register(r"draft", views_rpc.DraftViewSet, basename="draft") +router.register(r"person", views_rpc.PersonViewSet) +router.register(r"rfc", views_rpc.RfcViewSet, basename="rfc") + +router.register( + r"rfc//authors", + views_rpc.RfcAuthorViewSet, + basename="rfc-authors", +) + +urlpatterns = [ + url(r"^doc/drafts_by_names/", views_rpc.DraftsByNamesView.as_view()), + url(r"^persons/search/", views_rpc.RpcPersonSearch.as_view()), + path( + r"rfc/publish/", + views_rpc.RfcPubNotificationView.as_view(), + name="ietf.api.purple_api.notify_rfc_published", + ), + path( + r"rfc/publish/files/", + views_rpc.RfcPubFilesView.as_view(), + name="ietf.api.purple_api.upload_rfc_files", + ), + path( + r"rfc_index/refresh/", + views_rpc.RfcIndexView.as_view(), + name="ietf.api.purple_api.refresh_rfc_index", + ), + path(r"subject//person/", views_rpc.SubjectPersonView.as_view()), +] + +# add routers at the end so individual routes can steal parts of their address +# space (e.g., ^rfc/publish/ superseding the ^rfc/ routes of RfcViewSet) +urlpatterns.extend( + [ + path("", include(router.urls)), + ] +) diff --git a/ietf/api/views.py b/ietf/api/views.py index f6221b5e2e..420bc39693 100644 --- a/ietf/api/views.py +++ b/ietf/api/views.py @@ -1,46 +1,55 @@ # Copyright The IETF Trust 2017-2020, All Rights Reserved # -*- coding: utf-8 -*- - +import base64 +import binascii +import datetime import json +from pathlib import Path +from tempfile import NamedTemporaryFile +import jsonschema import pytz import re -from jwcrypto.jwk import JWK - +from contextlib import suppress from django.conf import settings from django.contrib.auth import authenticate from django.contrib.auth.decorators import login_required from django.contrib.auth.models import User -from django.core.exceptions import ValidationError -from django.core.validators import validate_email -from django.http import HttpResponse, Http404 +from django.http import HttpResponse, Http404, JsonResponse, HttpResponseBadRequest from django.shortcuts import render, get_object_or_404 from django.urls import reverse from django.utils.decorators import method_decorator from django.views.decorators.csrf import csrf_exempt from django.views.decorators.gzip import gzip_page from django.views.generic.detail import DetailView - +from email.message import EmailMessage +from importlib.metadata import version as metadata_version +from jwcrypto.jwk import JWK from tastypie.exceptions import BadRequest -from tastypie.utils.mime import determine_format, build_content_type -from tastypie.utils import is_valid_jsonp_callback_value from tastypie.serializers import Serializer - -import debug # pyflakes:ignore +from tastypie.utils import is_valid_jsonp_callback_value +from tastypie.utils.mime import determine_format, build_content_type +from textwrap import dedent +from traceback import format_exception, extract_tb +from typing import Iterable, Optional, Literal import ietf -from ietf.person.models import Person, Email from ietf.api import _api_list +from ietf.api.ietf_utils import is_valid_token, requires_api_token from ietf.api.serializer import JsonExportMixin -from ietf.api.ietf_utils import is_valid_token -from ietf.doc.utils import fuzzy_find_documents -from ietf.ietfauth.views import send_account_creation_email +from ietf.doc.utils import DraftAliasGenerator, fuzzy_find_documents +from ietf.group.utils import GroupAliasGenerator, role_holder_emails from ietf.ietfauth.utils import role_required +from ietf.ipr.utils import ingest_response_email as ipr_ingest_response_email from ietf.meeting.models import Meeting -from ietf.stats.models import MeetingRegistration +from ietf.meeting.utils import import_registration_json_validator, process_single_registration +from ietf.nomcom.utils import ingest_feedback_email as nomcom_ingest_feedback_email +from ietf.person.models import Person, Email +from ietf.sync.iana import ingest_review_email as iana_ingest_review_email from ietf.utils import log from ietf.utils.decorators import require_api_key +from ietf.utils.mail import send_smtp from ietf.utils.models import DumpInfo @@ -55,7 +64,10 @@ def top_level(request): } serializer = Serializer() - desired_format = determine_format(request, serializer) + try: + desired_format = determine_format(request, serializer) + except BadRequest as err: + return HttpResponseBadRequest(str(err)) options = {} @@ -63,10 +75,12 @@ def top_level(request): callback = request.GET.get('callback', 'callback') if not is_valid_jsonp_callback_value(callback): - raise BadRequest('JSONP callback name is invalid.') + return HttpResponseBadRequest("JSONP callback name is invalid") options['callback'] = callback + # This might raise UnsupportedFormat, but that indicates a real server misconfiguration + # so let it bubble up unhandled and trigger a 500 / email to admins. serialized = serializer.serialize(available_resources, desired_format, options) return HttpResponse(content=serialized, content_type=build_content_type(desired_format)) @@ -83,13 +97,13 @@ class PersonalInformationExportView(DetailView, JsonExportMixin): def get(self, request): person = get_object_or_404(self.model, user=request.user) - expand = ['searchrule', 'documentauthor', 'ad_document_set', 'ad_dochistory_set', 'docevent', + expand = ['searchrule', 'documentauthor', 'rfcauthor', 'ad_document_set', 'ad_dochistory_set', 'docevent', 'ballotpositiondocevent', 'deletedevent', 'email_set', 'groupevent', 'role', 'rolehistory', 'iprdisclosurebase', 'iprevent', 'liaisonstatementevent', 'allowlisted', 'schedule', 'constraint', 'schedulingevent', 'message', 'sendqueue', 'nominee', 'topicfeedbacklastseen', 'alias', 'email', 'apikeys', 'personevent', 'reviewersettings', 'reviewsecretarysettings', 'unavailableperiod', 'reviewwish', 'nextreviewerinteam', 'reviewrequest', 'meetingregistration', 'submissionevent', 'preapproval', - 'user', 'user__communitylist', 'personextresource_set', ] + 'user', 'communitylist', 'personextresource_set', ] return self.json_view(request, filter={'id':person.id}, expand=expand) @@ -100,7 +114,11 @@ class ApiV2PersonExportView(DetailView, JsonExportMixin): model = Person def err(self, code, text): - return HttpResponse(text, status=code, content_type='text/plain') + return HttpResponse( + text, + status=code, + content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}", + ) def post(self, request): querydict = request.POST.copy() @@ -132,79 +150,61 @@ def post(self, request): # else: # return HttpResponse(status=405) -@require_api_key -@role_required('Robot') + +@requires_api_token @csrf_exempt -def api_new_meeting_registration(request): +def api_new_meeting_registration_v2(request): '''REST API to notify the datatracker about a new meeting registration''' - def err(code, text): - return HttpResponse(text, status=code, content_type='text/plain') - required_fields = [ 'meeting', 'first_name', 'last_name', 'affiliation', 'country_code', - 'email', 'reg_type', 'ticket_type', 'checkedin'] - fields = required_fields + [] - if request.method == 'POST': - # parameters: - # apikey: - # meeting - # name - # email - # reg_type (In Person, Remote, Hackathon Only) - # ticket_type (full_week, one_day, student) - # - data = {'attended': False, } - missing_fields = [] - for item in fields: - value = request.POST.get(item, None) - if value is None and item in required_fields: - missing_fields.append(item) - data[item] = value - if missing_fields: - return err(400, "Missing parameters: %s" % ', '.join(missing_fields)) - number = data['meeting'] - try: - meeting = Meeting.objects.get(number=number) - except Meeting.DoesNotExist: - return err(400, "Invalid meeting value: '%s'" % (number, )) - reg_type = data['reg_type'] - email = data['email'] - try: - validate_email(email) - except ValidationError: - return err(400, "Invalid email value: '%s'" % (email, )) - if request.POST.get('cancelled', 'false') == 'true': - MeetingRegistration.objects.filter( - meeting_id=meeting.pk, - email=email, - reg_type=reg_type).delete() - return HttpResponse('OK', status=200, content_type='text/plain') - else: - object, created = MeetingRegistration.objects.get_or_create( - meeting_id=meeting.pk, - email=email, - reg_type=reg_type) - try: - # Update attributes - for key in set(data.keys())-set(['attended', 'apikey', 'meeting', 'email']): - if key == 'checkedin': - new = bool(data.get(key).lower() == 'true') - else: - new = data.get(key) - setattr(object, key, new) - person = Person.objects.filter(email__address=email) - if person.exists(): - object.person = person.first() - object.save() - except ValueError as e: - return err(400, "Unexpected POST data: %s" % e) - response = "Accepted, New registration" if created else "Accepted, Updated registration" - if User.objects.filter(username__iexact=email).exists() or Email.objects.filter(address=email).exists(): - pass - else: - send_account_creation_email(request, email) - response += ", Email sent" - return HttpResponse(response, status=202, content_type='text/plain') - else: - return HttpResponse(status=405) + def _http_err(code, text): + return HttpResponse( + text, + status=code, + content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}", + ) + + def _api_response(result): + return JsonResponse(data={"result": result}) + + if request.method != "POST": + return _http_err(405, "Method not allowed") + + if request.content_type != "application/json": + return _http_err(415, "Content-Type must be application/json") + + # Validate + try: + payload = json.loads(request.body) + import_registration_json_validator.validate(payload) + except json.decoder.JSONDecodeError as err: + return _http_err(400, f"JSON parse error at line {err.lineno} col {err.colno}: {err.msg}") + except jsonschema.exceptions.ValidationError as err: + return _http_err(400, f"JSON schema error at {err.json_path}: {err.message}") + except Exception: + return _http_err(400, "Invalid request format") + + # Get the meeting ID from the first registration, the API only deals with one meeting at a time + first_email = next(iter(payload['objects'])) + meeting_number = payload['objects'][first_email]['meeting'] + try: + meeting = Meeting.objects.get(number=meeting_number) + except Meeting.DoesNotExist: + return _http_err(400, f"Invalid meeting value: {meeting_number}") + + # confirm email exists + try: + Email.objects.get(address=first_email) + except Email.DoesNotExist: + return _http_err(400, f"Unknown email: {first_email}") + + reg_data = payload['objects'][first_email] + + process_single_registration(reg_data, meeting) + + return HttpResponse( + 'Success', + status=202, + content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}", + ) def version(request): @@ -215,9 +215,16 @@ def version(request): if dumpinfo.tz != "UTC": dumpdate = pytz.timezone(dumpinfo.tz).localize(dumpinfo.date.replace(tzinfo=None)) dumptime = dumpdate.strftime('%Y-%m-%d %H:%M:%S %z') if dumpinfo else None + + # important libraries + __version_extra__ = {} + for lib in settings.ADVERTISE_VERSIONS: + __version_extra__[lib] = metadata_version(lib) + return HttpResponse( json.dumps({ 'version': ietf.__version__+ietf.__patch__, + 'other': __version_extra__, 'dumptime': dumptime, }), content_type='application/json', @@ -226,12 +233,27 @@ def version(request): @require_api_key @csrf_exempt -def app_auth(request): +def app_auth(request, app: Literal["authortools", "bibxml"]): return HttpResponse( json.dumps({'success': True}), content_type='application/json') - +@requires_api_token +@csrf_exempt +def nfs_metrics(request): + with NamedTemporaryFile(dir=settings.NFS_METRICS_TMP_DIR,delete=False) as fp: + fp.close() + mark = datetime.datetime.now() + with open(fp.name, mode="w") as f: + f.write("whyioughta"*1024) + write_latency = (datetime.datetime.now() - mark).total_seconds() + mark = datetime.datetime.now() + with open(fp.name, "r") as f: + _=f.read() + read_latency = (datetime.datetime.now() - mark).total_seconds() + Path(f.name).unlink() + response=f'nfs_latency_seconds{{operation="write"}} {write_latency}\nnfs_latency_seconds{{operation="read"}} {read_latency}\n' + return HttpResponse(response) def find_doc_for_rfcdiff(name, rev): """rfcdiff lookup heuristics @@ -317,12 +339,9 @@ def get_previous_url(name, rev=None): previous_url = '' if condition in ('historic version', 'current version'): doc = history if history else document - if found_rev: - doc.is_rfc = lambda: False previous_url = doc.get_href() elif condition == 'version dochistory not found': document.rev = found_rev - document.is_rfc = lambda: False previous_url = document.get_href() return previous_url @@ -330,32 +349,38 @@ def get_previous_url(name, rev=None): def rfcdiff_latest_json(request, name, rev=None): response = dict() condition, document, history, found_rev = find_doc_for_rfcdiff(name, rev) - + if document and document.type_id == "rfc": + draft = document.came_from_draft() if condition == 'no such document': raise Http404 elif condition in ('historic version', 'current version'): doc = history if history else document - if not found_rev and doc.is_rfc(): - response['content_url'] = doc.get_href() - response['name']=doc.canonical_name() - if doc.name != doc.canonical_name(): + if doc.type_id == "rfc": + response['content_url'] = doc.get_href() + response['name']=doc.name + if draft: + prev_rev = draft.rev + if doc.rfc_number in HAS_TOMBSTONE and prev_rev != '00': + prev_rev = f'{(int(draft.rev)-1):02d}' + response['previous'] = f'{draft.name}-{prev_rev}' + response['previous_url'] = get_previous_url(draft.name, prev_rev) + elif doc.type_id == "draft" and not found_rev and doc.relateddocument_set.filter(relationship_id="became_rfc").exists(): + rfc = doc.related_that_doc("became_rfc")[0] + response['content_url'] = rfc.get_href() + response['name']=rfc.name prev_rev = doc.rev - # not sure what to do if non-numeric values come back, so at least log it - log.assertion('doc.rfc_number().isdigit()') # .rfc_number() is expensive... - log.assertion('doc.rev.isdigit()') - if int(doc.rfc_number()) in HAS_TOMBSTONE and prev_rev != '00': + if rfc.rfc_number in HAS_TOMBSTONE and prev_rev != '00': prev_rev = f'{(int(doc.rev)-1):02d}' response['previous'] = f'{doc.name}-{prev_rev}' response['previous_url'] = get_previous_url(doc.name, prev_rev) else: - doc.is_rfc = lambda: False response['content_url'] = doc.get_href() response['rev'] = doc.rev response['name'] = doc.name if doc.rev == '00': replaces_docs = (history.doc if condition=='historic version' else doc).related_that_doc('replaces') if replaces_docs: - replaces = replaces_docs[0].document + replaces = replaces_docs[0] response['previous'] = f'{replaces.name}-{replaces.rev}' response['previous_url'] = get_previous_url(replaces.name, replaces.rev) else: @@ -374,7 +399,6 @@ def rfcdiff_latest_json(request, name, rev=None): response['name'] = document.name response['rev'] = found_rev document.rev = found_rev - document.is_rfc = lambda: False response['content_url'] = document.get_href() # not sure what to do if non-numeric values come back, so at least log it log.assertion('found_rev.isdigit()') @@ -402,6 +426,7 @@ def directauth(request): data = None if raw_data is None or data is None: + log.log("Request body is either missing or invalid") return HttpResponse(json.dumps(dict(result="failure",reason="invalid post")), content_type='application/json') authtoken = data.get('authtoken', None) @@ -409,9 +434,11 @@ def directauth(request): password = data.get('password', None) if any([item is None for item in (authtoken, username, password)]): + log.log("One or more mandatory fields are missing: authtoken, username, password") return HttpResponse(json.dumps(dict(result="failure",reason="invalid post")), content_type='application/json') if not is_valid_token("ietf.api.views.directauth", authtoken): + log.log("Auth token provided is invalid") return HttpResponse(json.dumps(dict(result="failure",reason="invalid authtoken")), content_type='application/json') user_query = User.objects.filter(username__iexact=username) @@ -422,16 +449,288 @@ def directauth(request): # Note well that we are using user.username, not what was passed to the API. - if user_query.count() == 1 and authenticate(username = user_query.first().username, password = password): + user_count = user_query.count() + if user_count == 1 and authenticate(username = user_query.first().username, password = password): user = user_query.get() if user_query.filter(person__isnull=True).count() == 1: # Can't inspect user.person direclty here - log.log(f"Direct auth of personless user {user.pk}:{user.username}") + log.log(f"Direct auth success (personless user): {user.pk}:{user.username}") else: - log.log(f"Direct auth: {user.pk}:{user.person.plain_name()}") + log.log(f"Direct auth success: {user.pk}:{user.person.plain_name()}") return HttpResponse(json.dumps(dict(result="success")), content_type='application/json') - log.log(f"Direct auth failure: {username}") + log.log(f"Direct auth failure: {username} ({user_count} user(s) found)") return HttpResponse(json.dumps(dict(result="failure", reason="authentication failed")), content_type='application/json') else: + log.log(f"Request must be POST: {request.method} received") return HttpResponse(status=405) + + +@requires_api_token +@csrf_exempt +def draft_aliases(request): + if request.method == "GET": + return JsonResponse( + { + "aliases": [ + { + "alias": alias, + "domains": ["ietf"], + "addresses": address_list, + } + for alias, address_list in DraftAliasGenerator() + ] + } + ) + return HttpResponse(status=405) + + +@requires_api_token +@csrf_exempt +def group_aliases(request): + if request.method == "GET": + return JsonResponse( + { + "aliases": [ + { + "alias": alias, + "domains": domains, + "addresses": address_list, + } + for alias, domains, address_list in GroupAliasGenerator() + ] + } + ) + return HttpResponse(status=405) + + +@requires_api_token +@csrf_exempt +def active_email_list(request): + if request.method == "GET": + return JsonResponse( + { + "addresses": list(Email.objects.filter(active=True).values_list("address", flat=True)), + } + ) + return HttpResponse(status=405) + + +@requires_api_token +@csrf_exempt +def related_email_list(request, email): + """Given an email address, returns all other email addresses known + to Datatracker, via Person object + """ + def _http_err(code, text): + return HttpResponse( + text, + status=code, + content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}", + ) + + if request.method == "GET": + try: + email_obj = Email.objects.get(address=email) + except Email.DoesNotExist: + return _http_err(404, "Email not found") + person = email_obj.person + if not person: + return JsonResponse({"addresses": []}) + return JsonResponse( + { + "addresses": list(person.email_set.values_list("address", flat=True)), + } + ) + return HttpResponse(status=405) + + +@requires_api_token +def role_holder_addresses(request): + if request.method == "GET": + return JsonResponse( + { + "addresses": list( + role_holder_emails() + .order_by("address") + .values_list("address", flat=True) + ) + } + ) + return HttpResponse(status=405) + + +_response_email_json_validator = jsonschema.Draft202012Validator( + schema={ + "type": "object", + "properties": { + "dest": { + "type": "string", + }, + "message": { + "type": "string", # base64-encoded mail message + }, + }, + "required": ["dest", "message"], + } +) + + +class EmailIngestionError(Exception): + """Exception indicating ingestion failed""" + def __init__( + self, + msg="Message rejected", + *, + email_body: Optional[str] = None, + email_recipients: Optional[Iterable[str]] = None, + email_attach_traceback=False, + email_original_message: Optional[bytes]=None, + ): + self.msg = msg + self.email_body = email_body + self.email_subject = msg + self.email_recipients = email_recipients + self.email_attach_traceback = email_attach_traceback + self.email_original_message = email_original_message + self.email_from = settings.SERVER_EMAIL + + @staticmethod + def _summarize_error(error): + frame = extract_tb(error.__traceback__)[-1] + return dedent(f"""\ + Error details: + Exception type: {type(error).__module__}.{type(error).__name__} + File: {frame.filename} + Line: {frame.lineno}""") + + def as_emailmessage(self) -> Optional[EmailMessage]: + """Generate an EmailMessage to report an error""" + if self.email_body is None: + return None + error = self if self.__cause__ is None else self.__cause__ + format_values = dict( + error=error, + error_summary=self._summarize_error(error), + ) + msg = EmailMessage() + if self.email_recipients is None: + msg["To"] = tuple(adm[1] for adm in settings.ADMINS) + else: + msg["To"] = self.email_recipients + msg["From"] = self.email_from + msg["Subject"] = self.msg + msg.set_content( + self.email_body.format(**format_values) + ) + if self.email_attach_traceback: + msg.add_attachment( + "".join(format_exception(None, error, error.__traceback__)), + filename="traceback.txt", + ) + if self.email_original_message is not None: + # Attach incoming message if it was provided. Send as a generic media + # type because we don't know for sure that it was actually a valid + # message. + msg.add_attachment( + self.email_original_message, + 'application', 'octet-stream', # media type + filename='original-message', + ) + return msg + + +def ingest_email_handler(request, test_mode=False): + """Ingest incoming email - handler + + Returns a 4xx or 5xx status code if the HTTP request was invalid or something went + wrong while processing it. If the request was valid, returns a 200. This may or may + not indicate that the message was accepted. + + If test_mode is true, actual processing of a valid message will be skipped. In this + mode, a valid request with a valid destination will be treated as accepted. The + "bad_dest" error may still be returned. + """ + + def _http_err(code, text): + return HttpResponse( + text, + status=code, + content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}", + ) + + def _api_response(result): + return JsonResponse(data={"result": result}) + + if request.method != "POST": + return _http_err(405, "Method not allowed") + + if request.content_type != "application/json": + return _http_err(415, "Content-Type must be application/json") + + # Validate + try: + payload = json.loads(request.body) + _response_email_json_validator.validate(payload) + except json.decoder.JSONDecodeError as err: + return _http_err(400, f"JSON parse error at line {err.lineno} col {err.colno}: {err.msg}") + except jsonschema.exceptions.ValidationError as err: + return _http_err(400, f"JSON schema error at {err.json_path}: {err.message}") + except Exception: + return _http_err(400, "Invalid request format") + + try: + message = base64.b64decode(payload["message"], validate=True) + except binascii.Error: + return _http_err(400, "Invalid message: bad base64 encoding") + + dest = payload["dest"] + valid_dest = False + try: + if dest == "iana-review": + valid_dest = True + if not test_mode: + iana_ingest_review_email(message) + elif dest == "ipr-response": + valid_dest = True + if not test_mode: + ipr_ingest_response_email(message) + elif dest.startswith("nomcom-feedback-"): + maybe_year = dest[len("nomcom-feedback-"):] + if maybe_year.isdecimal(): + valid_dest = True + if not test_mode: + nomcom_ingest_feedback_email(message, int(maybe_year)) + except EmailIngestionError as err: + error_email = err.as_emailmessage() + if error_email is not None: + with suppress(Exception): # send_smtp logs its own exceptions, ignore them here + send_smtp(error_email) + return _api_response("bad_msg") + + if not valid_dest: + return _api_response("bad_dest") + + return _api_response("ok") + + +@requires_api_token +@csrf_exempt +def ingest_email(request): + """Ingest incoming email + + Hands off to ingest_email_handler() with test_mode=False. This allows @requires_api_token to + give the test endpoint a distinct token from the real one. + """ + return ingest_email_handler(request, test_mode=False) + + +@requires_api_token +@csrf_exempt +def ingest_email_test(request): + """Ingest incoming email test endpoint + + Hands off to ingest_email_handler() with test_mode=True. This allows @requires_api_token to + give the test endpoint a distinct token from the real one. + """ + return ingest_email_handler(request, test_mode=True) diff --git a/ietf/api/views_rpc.py b/ietf/api/views_rpc.py new file mode 100644 index 0000000000..e9c17b8a12 --- /dev/null +++ b/ietf/api/views_rpc.py @@ -0,0 +1,578 @@ +# Copyright The IETF Trust 2023-2026, All Rights Reserved +import os +import shutil +from pathlib import Path +from tempfile import TemporaryDirectory + +from django.conf import settings +from django.db import IntegrityError +from drf_spectacular.utils import OpenApiParameter +from rest_framework import mixins, parsers, serializers, viewsets, status +from rest_framework.decorators import action +from rest_framework.exceptions import APIException +from rest_framework.views import APIView +from rest_framework.response import Response + +from django.db.models import CharField as ModelCharField, OuterRef, Subquery, Q +from django.db.models.functions import Coalesce +from django.http import Http404 +from drf_spectacular.utils import extend_schema_view, extend_schema +from rest_framework import generics +from rest_framework.fields import CharField as DrfCharField +from rest_framework.filters import SearchFilter +from rest_framework.pagination import LimitOffsetPagination + +from ietf.api.serializers_rpc import ( + PersonSerializer, + FullDraftSerializer, + DraftSerializer, + SubmittedToQueueSerializer, + OriginalStreamSerializer, + ReferenceSerializer, + EmailPersonSerializer, + RfcWithAuthorsSerializer, + DraftWithAuthorsSerializer, + NotificationAckSerializer, + RfcPubSerializer, + RfcFileSerializer, + EditableRfcSerializer, +) +from ietf.doc.models import Document, DocHistory, RfcAuthor, DocEvent +from ietf.doc.serializers import RfcAuthorSerializer +from ietf.doc.storage_utils import remove_from_storage, store_file, exists_in_storage +from ietf.doc.tasks import ( + signal_update_rfc_metadata_task, + rebuild_reference_relations_task, + trigger_red_precomputer_task, + update_rfc_searchindex_task, +) +from ietf.person.models import Email, Person +from ietf.sync.rfcindex import mark_rfcindex_as_dirty + + +class Conflict(APIException): + status_code = status.HTTP_409_CONFLICT + default_detail = "Conflict." + default_code = "conflict" + + +@extend_schema_view( + retrieve=extend_schema( + operation_id="get_person_by_id", + summary="Find person by ID", + description="Returns a single person", + parameters=[ + OpenApiParameter( + name="person_id", + type=int, + location="path", + description="Person ID identifying this person.", + ), + ], + ), +) +class PersonViewSet(mixins.RetrieveModelMixin, viewsets.GenericViewSet): + queryset = Person.objects.all() + serializer_class = PersonSerializer + api_key_endpoint = "ietf.api.views_rpc" + lookup_url_kwarg = "person_id" + + @extend_schema( + operation_id="get_persons", + summary="Get a batch of persons", + description="Returns a list of persons matching requested ids. Omits any that are missing.", + request=list[int], + responses=PersonSerializer(many=True), + ) + @action(detail=False, methods=["post"]) + def batch(self, request): + """Get a batch of rpc person names""" + pks = request.data + return Response( + self.get_serializer(Person.objects.filter(pk__in=pks), many=True).data + ) + + @extend_schema( + operation_id="persons_by_email", + summary="Get a batch of persons by email addresses", + description=( + "Returns a list of persons matching requested ids. " + "Omits any that are missing." + ), + request=list[str], + responses=EmailPersonSerializer(many=True), + ) + @action(detail=False, methods=["post"], serializer_class=EmailPersonSerializer) + def batch_by_email(self, request): + emails = Email.objects.filter(address__in=request.data, person__isnull=False) + serializer = self.get_serializer(emails, many=True) + return Response(serializer.data) + + +class SubjectPersonView(APIView): + api_key_endpoint = "ietf.api.views_rpc" + + @extend_schema( + operation_id="get_subject_person_by_id", + summary="Find person for OIDC subject by ID", + description="Returns a single person", + responses=PersonSerializer, + parameters=[ + OpenApiParameter( + name="subject_id", + type=str, + description="subject ID of person to return", + location="path", + ), + ], + ) + def get(self, request, subject_id: str): + try: + user_id = int(subject_id) + except ValueError: + raise serializers.ValidationError( + {"subject_id": "This field must be an integer value."} + ) + person = Person.objects.filter(user__pk=user_id).first() + if person: + return Response(PersonSerializer(person).data) + raise Http404 + + +class RpcLimitOffsetPagination(LimitOffsetPagination): + default_limit = 10 + max_limit = 100 + + +class SingleTermSearchFilter(SearchFilter): + """SearchFilter backend that does not split terms + + The default SearchFilter treats comma or whitespace-separated terms as individual + search terms. This backend instead searches for the exact term. + """ + + def get_search_terms(self, request): + value = request.query_params.get(self.search_param, "") + field = DrfCharField(trim_whitespace=False, allow_blank=True) + cleaned_value = field.run_validation(value) + return [cleaned_value] + + +@extend_schema_view( + get=extend_schema( + operation_id="search_person", + description="Get a list of persons, matching by partial name or email", + ), +) +class RpcPersonSearch(generics.ListAPIView): + # n.b. the OpenAPI schema for this can be generated by running + # ietf/manage.py spectacular --file spectacular.yaml + # and extracting / touching up the rpc_person_search_list operation + api_key_endpoint = "ietf.api.views_rpc" + queryset = Person.objects.all() + serializer_class = PersonSerializer + pagination_class = RpcLimitOffsetPagination + + # Searchable on all name-like fields or email addresses + filter_backends = [SingleTermSearchFilter] + search_fields = ["name", "plain", "email__address"] + + +@extend_schema_view( + retrieve=extend_schema( + operation_id="get_draft_by_id", + summary="Get a draft", + description="Returns the draft for the requested ID", + parameters=[ + OpenApiParameter( + name="doc_id", + type=int, + location="path", + description="Doc ID identifying this draft.", + ), + ], + ), + submitted_to_rpc=extend_schema( + operation_id="submitted_to_rpc", + summary="List documents ready to enter the RFC Editor Queue", + description="List documents ready to enter the RFC Editor Queue", + responses=SubmittedToQueueSerializer(many=True), + ), +) +class DraftViewSet(mixins.RetrieveModelMixin, viewsets.GenericViewSet): + queryset = Document.objects.filter(type_id="draft") + serializer_class = FullDraftSerializer + api_key_endpoint = "ietf.api.views_rpc" + lookup_url_kwarg = "doc_id" + + @action(detail=False, serializer_class=SubmittedToQueueSerializer) + def submitted_to_rpc(self, request): + """Return documents in datatracker that have been submitted to the RPC but are not yet in the queue + + Those queries overreturn - there may be things, particularly not from the IETF stream that are already in the queue. + """ + ietf_docs = Q(states__type_id="draft-iesg", states__slug__in=["ann"]) + irtf_iab_ise_editorial_docs = Q( + states__type_id__in=[ + "draft-stream-iab", + "draft-stream-irtf", + "draft-stream-ise", + "draft-stream-editorial", + ], + states__slug__in=["rfc-edit"], + ) + docs = ( + self.get_queryset() + .filter(type_id="draft") + .filter(ietf_docs | irtf_iab_ise_editorial_docs) + ) + serializer = self.get_serializer(docs, many=True) + return Response(serializer.data) + + @extend_schema( + operation_id="get_draft_references", + summary="Get normative references to I-Ds", + description=( + "Returns the id and name of each normatively " + "referenced Internet-Draft for the given docId" + ), + parameters=[ + OpenApiParameter( + name="doc_id", + type=int, + location="path", + description="Doc ID identifying this draft.", + ), + ], + responses=ReferenceSerializer(many=True), + ) + @action(detail=True, serializer_class=ReferenceSerializer) + def references(self, request, doc_id=None): + doc = self.get_object() + serializer = self.get_serializer( + [ + reference + for reference in doc.related_that_doc("refnorm") + if reference.type_id == "draft" + ], + many=True, + ) + return Response(serializer.data) + + @extend_schema( + operation_id="get_draft_authors", + summary="Gather authors of the drafts with the given names", + description="returns a list mapping draft names to objects describing authors", + request=list[str], + responses=DraftWithAuthorsSerializer(many=True), + ) + @action(detail=False, methods=["post"], serializer_class=DraftWithAuthorsSerializer) + def bulk_authors(self, request): + drafts = self.get_queryset().filter(name__in=request.data) + serializer = self.get_serializer(drafts, many=True) + return Response(serializer.data) + + +@extend_schema_view( + rfc_original_stream=extend_schema( + operation_id="get_rfc_original_streams", + summary="Get the streams RFCs were originally published into", + description="returns a list of dicts associating an RFC with its originally published stream", + responses=OriginalStreamSerializer(many=True), + ) +) +class RfcViewSet(mixins.UpdateModelMixin, viewsets.GenericViewSet): + queryset = Document.objects.filter(type_id="rfc") + api_key_endpoint = "ietf.api.views_rpc" + lookup_field = "rfc_number" + serializer_class = EditableRfcSerializer + + def perform_update(self, serializer): + DocEvent.objects.create( + doc=serializer.instance, + rev=serializer.instance.rev, + by=Person.objects.get(name="(System)"), + type="sync_from_rfc_editor", + desc="Metadata update from RFC Editor", + ) + super().perform_update(serializer) + + @action(detail=False, serializer_class=OriginalStreamSerializer) + def rfc_original_stream(self, request): + rfcs = self.get_queryset().annotate( + orig_stream_id=Coalesce( + Subquery( + DocHistory.objects.filter(doc=OuterRef("pk")) + .exclude(stream__isnull=True) + .order_by("time") + .values_list("stream_id", flat=True)[:1] + ), + "stream_id", + output_field=ModelCharField(), + ), + ) + serializer = self.get_serializer(rfcs, many=True) + return Response(serializer.data) + + @extend_schema( + operation_id="get_rfc_authors", + summary="Gather authors of the RFCs with the given numbers", + description="returns a list mapping rfc numbers to objects describing authors", + request=list[int], + responses=RfcWithAuthorsSerializer(many=True), + ) + @action(detail=False, methods=["post"], serializer_class=RfcWithAuthorsSerializer) + def bulk_authors(self, request): + rfcs = self.get_queryset().filter(rfc_number__in=request.data) + serializer = self.get_serializer(rfcs, many=True) + return Response(serializer.data) + + +class DraftsByNamesView(APIView): + api_key_endpoint = "ietf.api.views_rpc" + + @extend_schema( + operation_id="get_drafts_by_names", + summary="Get a batch of drafts by draft names", + description="returns a list of drafts with matching names", + request=list[str], + responses=DraftSerializer(many=True), + ) + def post(self, request): + names = request.data + docs = Document.objects.filter(type_id="draft", name__in=names) + return Response(DraftSerializer(docs, many=True).data) + + +class RfcAuthorViewSet(viewsets.ReadOnlyModelViewSet): + """ViewSet for RfcAuthor model + + Router needs to provide rfc_number as a kwarg + """ + + api_key_endpoint = "ietf.api.views_rpc" + + queryset = RfcAuthor.objects.all() + serializer_class = RfcAuthorSerializer + lookup_url_kwarg = "author_id" + rfc_number_param = "rfc_number" + + def get_queryset(self): + return ( + super() + .get_queryset() + .filter( + document__type_id="rfc", + document__rfc_number=self.kwargs[self.rfc_number_param], + ) + ) + + +class DestinationHelperMixin: + def fs_destination(self, filename: str | Path) -> Path: + """Destination for an uploaded RFC file in the filesystem + + Strips any path components in filename and returns an absolute Path. + """ + rfc_path = Path(settings.RFC_PATH) + filename = Path(filename) # could potentially have directory components + extension = "".join(filename.suffixes) + if extension == ".notprepped.xml": + return rfc_path / "prerelease" / filename.name + return rfc_path / filename.name + + def blob_destination(self, filename: str | Path) -> str: + """Destination name for an uploaded RFC file in the blob store + + Strips any path components in filename and returns an absolute Path. + """ + filename = Path(filename) # could potentially have directory components + extension = "".join(filename.suffixes) + if extension == ".notprepped.xml": + file_type = "notprepped" + elif extension[0] == ".": + file_type = extension[1:] + else: + raise serializers.ValidationError( + f"Extension does not begin with '.'!? ({filename})", + ) + return f"{file_type}/{filename.name}" + + +class RfcPubNotificationView(DestinationHelperMixin, APIView): + api_key_endpoint = "ietf.api.views_rpc" + + @extend_schema( + operation_id="notify_rfc_published", + summary="Notify datatracker of RFC publication", + request=RfcPubSerializer, + responses=NotificationAckSerializer, + ) + def post(self, request): + serializer = RfcPubSerializer(data=request.data) + serializer.is_valid(raise_exception=True) + # Check blobstore & filesystem for conflicts + rfc_number = serializer.validated_data["rfc_number"] + dest_stem = f"rfc{rfc_number}" + blob_kind = "rfc" + possible_rfc_files = [ + self.fs_destination(dest_stem + ext) + for ext in RfcFileSerializer.allowed_extensions + ] + possible_rfc_blobs = [ + self.blob_destination(dest_stem + ext) + for ext in RfcFileSerializer.allowed_extensions + ] + for possible_existing_file in possible_rfc_files: + if possible_existing_file.exists(): + raise Conflict( + "File(s) already exist for this RFC", + code="files-exist", + ) + for possible_existing_blob in possible_rfc_blobs: + if exists_in_storage(kind=blob_kind, name=possible_existing_blob): + raise Conflict( + "Blob(s) already exist for this RFC", + code="blobs-exist", + ) + # Create RFC + try: + rfc = serializer.save() + except IntegrityError as err: + if Document.objects.filter( + rfc_number=serializer.validated_data["rfc_number"] + ): + raise serializers.ValidationError( + "RFC with that number already exists", + code="rfc-number-in-use", + ) + raise serializers.ValidationError( + f"Unable to publish: {err}", + code="unknown-integrity-error", + ) + rfc_number_list = [rfc.rfc_number] + rfc_number_list.extend( + [d.rfc_number for d in rfc.related_that_doc(("updates", "obs"))] + ) + rfc_number_list = sorted(set(rfc_number_list)) + signal_update_rfc_metadata_task.delay(rfc_number_list=rfc_number_list) + return Response(NotificationAckSerializer().data) + + +class RfcPubFilesView(DestinationHelperMixin, APIView): + api_key_endpoint = "ietf.api.views_rpc" + parser_classes = [parsers.MultiPartParser] + + @extend_schema( + operation_id="upload_rfc_files", + summary="Upload files for a published RFC", + request=RfcFileSerializer, + responses=NotificationAckSerializer, + ) + def post(self, request): + serializer = RfcFileSerializer( + # many=True, + data=request.data, + ) + serializer.is_valid(raise_exception=True) + rfc = serializer.validated_data["rfc"] + uploaded_files = serializer.validated_data["contents"] # list[UploadedFile] + replace = serializer.validated_data["replace"] + dest_stem = f"rfc{rfc.rfc_number}" + mtime = serializer.validated_data["mtime"] + mtimestamp = mtime.timestamp() + blob_kind = "rfc" + + # List of files that might exist for an RFC + possible_rfc_files = [ + self.fs_destination(dest_stem + ext) + for ext in serializer.allowed_extensions + ] + possible_rfc_blobs = [ + self.blob_destination(dest_stem + ext) + for ext in serializer.allowed_extensions + ] + if not replace: + # this is the default: refuse to overwrite anything if not replacing + for possible_existing_file in possible_rfc_files: + if possible_existing_file.exists(): + raise Conflict( + "File(s) already exist for this RFC", + code="files-exist", + ) + for possible_existing_blob in possible_rfc_blobs: + if exists_in_storage(kind=blob_kind, name=possible_existing_blob): + raise Conflict( + "Blob(s) already exist for this RFC", + code="blobs-exist", + ) + + with TemporaryDirectory() as tempdir: + # Save files in a temporary directory. Use the uploaded filename + # extensions to identify files, but ignore the stems and generate our own. + files_to_move = [] # list[Path] + tmpfile_stem = Path(tempdir) / dest_stem + for upfile in uploaded_files: + uploaded_filename = Path(upfile.name) # name supplied by request + uploaded_ext = "".join(uploaded_filename.suffixes) + tempfile_path = tmpfile_stem.with_suffix(uploaded_ext) + with tempfile_path.open("wb") as dest: + for chunk in upfile.chunks(): + dest.write(chunk) + os.utime(tempfile_path, (mtimestamp, mtimestamp)) + files_to_move.append(tempfile_path) + # copy files to final location, removing any existing ones first if the + # remove flag was set + if replace: + for possible_existing_file in possible_rfc_files: + possible_existing_file.unlink(missing_ok=True) + for possible_existing_blob in possible_rfc_blobs: + remove_from_storage( + blob_kind, possible_existing_blob, warn_if_missing=False + ) + for ftm in files_to_move: + with ftm.open("rb") as f: + store_file( + kind=blob_kind, + name=self.blob_destination(ftm), + file=f, + doc_name=rfc.name, + doc_rev=rfc.rev, # expect blank, but match whatever it is + mtime=mtime, + ) + destination = self.fs_destination(ftm) + if ( + settings.SERVER_MODE != "production" + and not destination.parent.exists() + ): + destination.parent.mkdir() + shutil.move(ftm, destination) + + # Trigger red precomputer + needs_updating = [rfc.rfc_number] + for rel in rfc.relateddocument_set.filter( + relationship_id__in=["obs", "updates"] + ): + needs_updating.append(rel.target.rfc_number) + trigger_red_precomputer_task.delay(rfc_number_list=sorted(needs_updating)) + # Trigger search index update + update_rfc_searchindex_task.delay(rfc.rfc_number) + # Trigger reference relation srebuild + rebuild_reference_relations_task.delay(doc_names=[rfc.name]) + + return Response(NotificationAckSerializer().data) + + +class RfcIndexView(APIView): + api_key_endpoint = "ietf.api.views_rpc" + + @extend_schema( + operation_id="refresh_rfc_index", + summary="Refresh rfc-index files", + description="Requests creation of various index files.", + responses={202: None}, + request=None, + ) + def post(self, request): + mark_rfcindex_as_dirty() + return Response(status=202) diff --git a/ietf/bin/aliases-from-json.py b/ietf/bin/aliases-from-json.py new file mode 100644 index 0000000000..0da5d1f8b9 --- /dev/null +++ b/ietf/bin/aliases-from-json.py @@ -0,0 +1,104 @@ +# Copyright The IETF Trust 2024, All Rights Reserved +# +# Uses only Python standard lib +# + +import argparse +import datetime +import json +import shutil +import stat +import sys + +from pathlib import Path +from tempfile import TemporaryDirectory + +# Default options +POSTCONFIRM_PATH = "/a/postconfirm/wrapper" +VDOMAIN = "virtual.ietf.org" + +# Map from domain label to dns domain +ADOMAINS = { + "ietf": "ietf.org", + "irtf": "irtf.org", + "iab": "iab.org", +} + + +def generate_files(records, adest, vdest, postconfirm, vdomain): + """Generate files from an iterable of records + + If adest or vdest exists as a file, it will be overwritten. If it is a directory, files + with the default names (draft-aliases and draft-virtual) will be created, but existing + files _will not_ be overwritten! + """ + with TemporaryDirectory() as tmpdir: + tmppath = Path(tmpdir) + apath = tmppath / "aliases" + vpath = tmppath / "virtual" + + with apath.open("w") as afile, vpath.open("w") as vfile: + date = datetime.datetime.now(datetime.UTC) + signature = f"# Generated by {Path(__file__).absolute()} at {date}\n" + afile.write(signature) + vfile.write(signature) + vfile.write(f"{vdomain} anything\n") + + for item in records: + alias = item["alias"] + domains = item["domains"] + address_list = item["addresses"] + filtername = f"xfilter-{alias}" + afile.write(f'{filtername + ":":64s} "|{postconfirm} filter expand-{alias} {vdomain}"\n') + for dom in domains: + vfile.write(f"{f'{alias}@{ADOMAINS[dom]}':64s} {filtername}\n") + vfile.write(f"{f'expand-{alias}@{vdomain}':64s} {', '.join(sorted(address_list))}\n") + + perms = stat.S_IWUSR | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH + apath.chmod(perms) + vpath.chmod(perms) + shutil.move(apath, adest) + shutil.move(vpath, vdest) + + +def directory_path(val): + p = Path(val) + if p.is_dir(): + return p + else: + raise argparse.ArgumentTypeError(f"{p} is not a directory") + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="Convert a JSON stream of draft alias definitions into alias / virtual alias files." + ) + parser.add_argument( + "--prefix", + required=True, + help="Prefix for output files. Files will be named -aliases and -virtual." + ) + parser.add_argument( + "--output-dir", + default="./", + type=directory_path, + help="Destination for output files.", + ) + parser.add_argument( + "--postconfirm", + default=POSTCONFIRM_PATH, + help=f"Full path to postconfirm executable (defaults to {POSTCONFIRM_PATH}", + ) + parser.add_argument( + "--vdomain", + default=VDOMAIN, + help=f"Virtual domain (defaults to {VDOMAIN}_", + ) + args = parser.parse_args() + data = json.load(sys.stdin) + generate_files( + data["aliases"], + adest=args.output_dir / f"{args.prefix}-aliases", + vdest=args.output_dir / f"{args.prefix}-virtual", + postconfirm=args.postconfirm, + vdomain=args.vdomain, + ) diff --git a/ietf/bin/expire-ids b/ietf/bin/expire-ids index 98ee8d75fe..bb0b94ee61 100755 --- a/ietf/bin/expire-ids +++ b/ietf/bin/expire-ids @@ -13,10 +13,6 @@ basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")) sys.path = [ basedir ] + sys.path os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings" -virtualenv_activation = os.path.join(basedir, "env", "bin", "activate_this.py") -if os.path.exists(virtualenv_activation): - execfile(virtualenv_activation, dict(__file__=virtualenv_activation)) - syslog.openlog(os.path.basename(__file__), syslog.LOG_PID, syslog.LOG_USER) import django diff --git a/ietf/bin/expire-last-calls b/ietf/bin/expire-last-calls deleted file mode 100755 index 83b565e192..0000000000 --- a/ietf/bin/expire-last-calls +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env python - -# This script requires that the proper virtual python environment has been -# invoked before start - -import os -import sys -import syslog - -# boilerplate -basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")) -sys.path = [ basedir ] + sys.path -os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings" - -virtualenv_activation = os.path.join(basedir, "env", "bin", "activate_this.py") -if os.path.exists(virtualenv_activation): - execfile(virtualenv_activation, dict(__file__=virtualenv_activation)) - -syslog.openlog(os.path.basename(__file__), syslog.LOG_PID, syslog.LOG_USER) - -import django -django.setup() - -# ---------------------------------------------------------------------- - -from ietf.doc.lastcall import get_expired_last_calls, expire_last_call - -drafts = get_expired_last_calls() -for doc in drafts: - try: - expire_last_call(doc) - syslog.syslog("Expired last call for %s (id=%s)" % (doc.file_tag(), doc.pk)) - except Exception as e: - syslog.syslog(syslog.LOG_ERR, "ERROR: Failed to expire last call for %s (id=%s)" % (doc.file_tag(), doc.pk)) diff --git a/ietf/bin/expire-submissions b/ietf/bin/expire-submissions index 22db38322d..113a53ddfa 100755 --- a/ietf/bin/expire-submissions +++ b/ietf/bin/expire-submissions @@ -8,10 +8,6 @@ basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")) sys.path = [ basedir ] + sys.path os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings" -virtualenv_activation = os.path.join(basedir, "env", "bin", "activate_this.py") -if os.path.exists(virtualenv_activation): - execfile(virtualenv_activation, dict(__file__=virtualenv_activation)) - syslog.openlog(os.path.basename(__file__), syslog.LOG_PID, syslog.LOG_USER) import django diff --git a/ietf/bin/iana-review-email b/ietf/bin/iana-review-email index 5c7a7183b9..27aee4015e 100755 --- a/ietf/bin/iana-review-email +++ b/ietf/bin/iana-review-email @@ -8,10 +8,6 @@ basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")) sys.path = [ basedir ] + sys.path os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings" -virtualenv_activation = os.path.join(basedir, "env", "bin", "activate_this.py") -if os.path.exists(virtualenv_activation): - execfile(virtualenv_activation, dict(__file__=virtualenv_activation)) - syslog.openlog(os.path.basename(__file__), syslog.LOG_PID, syslog.LOG_USER) import django diff --git a/ietf/bin/mailman_listinfo.py b/ietf/bin/mailman_listinfo.py deleted file mode 100755 index f7e4cfe4c1..0000000000 --- a/ietf/bin/mailman_listinfo.py +++ /dev/null @@ -1,50 +0,0 @@ -#!/usr/bin/python2.7 -# Copyright The IETF Trust 2022, All Rights Reserved -# Note the shebang. This specifically targets deployment on IETFA and intends to use its system python2.7. - -# This is an adaptor to pull information out of Mailman2 using its python libraries (which are only available for python2). -# It is NOT django code, and does not have access to django.conf.settings. - -import json -import sys - -from collections import defaultdict - -def main(): - - sys.path.append('/usr/lib/mailman') - - have_mailman = False - try: - from Mailman import Utils - from Mailman import MailList - from Mailman import MemberAdaptor - have_mailman = True - except ImportError: - pass - - - if not have_mailman: - sys.stderr.write("Could not import mailman modules -- skipping import of mailman list info") - sys.exit() - - names = list(Utils.list_names()) - - # need to emit dict of names, each name has an mlist, and each mlist has description, advertised, and members (calculated as below) - result = defaultdict(dict) - for name in names: - mlist = MailList.MailList(name, lock=False) - result[name] = dict() - result[name]['internal_name'] = mlist.internal_name() - result[name]['real_name'] = mlist.real_name - result[name]['description'] = mlist.description # Not attempting to change encoding - result[name]['advertised'] = mlist.advertised - result[name]['members'] = list() - if mlist.advertised: - members = mlist.getRegularMemberKeys() + mlist.getDigestMemberKeys() - members = set([ m for m in members if mlist.getDeliveryStatus(m) == MemberAdaptor.ENABLED ]) - result[name]['members'] = list(members) - json.dump(result, sys.stdout) - -if __name__ == "__main__": - main() diff --git a/ietf/bin/merge-person-records b/ietf/bin/merge-person-records deleted file mode 100755 index 155e5755f6..0000000000 --- a/ietf/bin/merge-person-records +++ /dev/null @@ -1,65 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -*- Python -*- -# -''' -This script merges two Person records into one. It determines which record is the target -based on most current User record (last_login) unless -f (force) option is used to -force SOURCE TARGET as specified on the command line. The order of operations is -important. We must complete all source.save() operations before moving the aliases to -the target, this is to avoid extra "Possible duplicate Person" emails going out, if the -Person is saved without an alias the Person.save() creates another one, which then -conflicts with the moved one. -''' - -# Set PYTHONPATH and load environment variables for standalone script ----------------- -import os, sys -basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")) -sys.path = [ basedir ] + sys.path -os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings" - -virtualenv_activation = os.path.join(basedir, "env", "bin", "activate_this.py") -if os.path.exists(virtualenv_activation): - execfile(virtualenv_activation, dict(__file__=virtualenv_activation)) - -import django -django.setup() -# ------------------------------------------------------------------------------------- - -import argparse -from django.contrib import admin -from ietf.person.models import Person -from ietf.person.utils import (merge_persons, send_merge_notification, handle_users, - determine_merge_order) - - -def main(): - parser = argparse.ArgumentParser() - parser.add_argument("source_id",type=int) - parser.add_argument("target_id",type=int) - parser.add_argument('-f','--force', help='force merge order',action='store_true') - parser.add_argument('-v','--verbose', help='verbose output',action='store_true') - args = parser.parse_args() - - source = Person.objects.get(pk=args.source_id) - target = Person.objects.get(pk=args.target_id) - - # set merge order - if not args.force: - source,target = determine_merge_order(source,target) - - # confirm - print "Merging person {}({}) to {}({})".format(source.ascii,source.pk,target.ascii,target.pk) - print handle_users(source,target,check_only=True) - response = raw_input('Ok to continue y/n? ') - if response.lower() != 'y': - sys.exit() - - # perform merge - success, changes = merge_persons(source, target, verbose=args.verbose) - - # send email notification - send_merge_notification(target,changes) - -if __name__ == "__main__": - main() diff --git a/ietf/bin/notify-expirations b/ietf/bin/notify-expirations index 0270c13765..fc2fd86a31 100755 --- a/ietf/bin/notify-expirations +++ b/ietf/bin/notify-expirations @@ -7,10 +7,6 @@ basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")) sys.path = [ basedir ] + sys.path os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings" -virtualenv_activation = os.path.join(basedir, "env", "bin", "activate_this.py") -if os.path.exists(virtualenv_activation): - execfile(virtualenv_activation, dict(__file__=virtualenv_activation)) - import django django.setup() diff --git a/ietf/bin/rfc-editor-index-updates b/ietf/bin/rfc-editor-index-updates deleted file mode 100755 index dc7abe26bb..0000000000 --- a/ietf/bin/rfc-editor-index-updates +++ /dev/null @@ -1,110 +0,0 @@ -#!/usr/bin/env python - -# This script requires that the proper virtual python environment has been -# invoked before start - -import datetime -import io -import os -import requests -import sys -import syslog -import traceback - -# boilerplate -basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")) -sys.path = [ basedir ] + sys.path -os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings" - -# Before invoking django -syslog.openlog(os.path.basename(__file__), syslog.LOG_PID, syslog.LOG_USER) - -import django -django.setup() - -from django.conf import settings -from optparse import OptionParser -from django.core.mail import mail_admins - -from ietf.doc.utils import rebuild_reference_relations -from ietf.utils.log import log -from ietf.utils.pipe import pipe -from ietf.utils.timezone import date_today - -import ietf.sync.rfceditor - - -parser = OptionParser() -parser.add_option("-d", dest="skip_date", - help="To speed up processing skip RFCs published before this date (default is one year ago)", metavar="YYYY-MM-DD") - -options, args = parser.parse_args() - -skip_date = date_today() - datetime.timedelta(days=365) -if options.skip_date: - skip_date = datetime.datetime.strptime(options.skip_date, "%Y-%m-%d").date() - -log("Updating document metadata from RFC index going back to %s, from %s" % (skip_date, settings.RFC_EDITOR_INDEX_URL)) - - -try: - response = requests.get( - settings.RFC_EDITOR_INDEX_URL, - timeout=30, # seconds - ) -except requests.Timeout as exc: - log(f'GET request timed out retrieving RFC editor index: {exc}') - sys.exit(1) - - -rfc_index_xml = response.text -index_data = ietf.sync.rfceditor.parse_index(io.StringIO(rfc_index_xml)) - -try: - response = requests.get( - settings.RFC_EDITOR_ERRATA_JSON_URL, - timeout=30, # seconds - ) -except requests.Timeout as exc: - log(f'GET request timed out retrieving RFC editor errata: {exc}') - sys.exit(1) -errata_data = response.json() - -if len(index_data) < ietf.sync.rfceditor.MIN_INDEX_RESULTS: - log("Not enough index entries, only %s" % len(index_data)) - sys.exit(1) - -if len(errata_data) < ietf.sync.rfceditor.MIN_ERRATA_RESULTS: - log("Not enough errata entries, only %s" % len(errata_data)) - sys.exit(1) - -new_rfcs = [] -for changes, doc, rfc_published in ietf.sync.rfceditor.update_docs_from_rfc_index(index_data, errata_data, skip_older_than_date=skip_date): - if rfc_published: - new_rfcs.append(doc) - - for c in changes: - log("RFC%s, %s: %s" % (doc.rfcnum, doc.name, c)) - -sys.exit(0) - -# This can be called while processing a notifying POST from the RFC Editor -# Spawn a child to sync the rfcs and calculate new reference relationships -# so that the POST - -newpid = os.fork() - -if newpid == 0: - try: - pipe("%s -a %s %s" % (settings.RSYNC_BINARY,settings.RFC_TEXT_RSYNC_SOURCE,settings.RFC_PATH)) - for rfc in new_rfcs: - rebuild_reference_relations(rfc) - log("Updated references for %s"%rfc.canonical_name()) - except: - subject = "Exception in updating references for new rfcs: %s : %s" % (sys.exc_info()[0],sys.exc_info()[1]) - msg = "%s\n%s\n----\n%s"%(sys.exc_info()[0],sys.exc_info()[1],traceback.format_tb(sys.exc_info()[2])) - mail_admins(subject,msg,fail_silently=True) - log(subject) - os._exit(0) -else: - sys.exit(0) diff --git a/ietf/bin/rfc-editor-queue-updates b/ietf/bin/rfc-editor-queue-updates deleted file mode 100755 index b441e50ebc..0000000000 --- a/ietf/bin/rfc-editor-queue-updates +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env python - -import io -import os -import requests -import sys - -# boilerplate -basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")) -sys.path = [ basedir ] + sys.path -os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings" - -import django -django.setup() - -from django.conf import settings - -from ietf.sync.rfceditor import parse_queue, MIN_QUEUE_RESULTS, update_drafts_from_queue -from ietf.utils.log import log - -log("Updating RFC Editor queue states from %s" % settings.RFC_EDITOR_QUEUE_URL) - -try: - response = requests.get( - settings.RFC_EDITOR_QUEUE_URL, - timeout=30, # seconds - ) -except requests.Timeout as exc: - log(f'GET request timed out retrieving RFC editor queue: {exc}') - sys.exit(1) -drafts, warnings = parse_queue(io.StringIO(response.text)) -for w in warnings: - log(u"Warning: %s" % w) - -if len(drafts) < MIN_QUEUE_RESULTS: - log("Not enough results, only %s" % len(drafts)) - sys.exit(1) - -changed, warnings = update_drafts_from_queue(drafts) -for w in warnings: - log(u"Warning: %s" % w) - -for c in changed: - log(u"Updated %s" % c) diff --git a/ietf/ietfauth/management/commands/__init__.py b/ietf/blobdb/__init__.py similarity index 100% rename from ietf/ietfauth/management/commands/__init__.py rename to ietf/blobdb/__init__.py diff --git a/ietf/blobdb/admin.py b/ietf/blobdb/admin.py new file mode 100644 index 0000000000..44a30d1d7f --- /dev/null +++ b/ietf/blobdb/admin.py @@ -0,0 +1,58 @@ +# Copyright The IETF Trust 2025-2026, All Rights Reserved +from django.contrib import admin +from django.db.models import QuerySet +from django.db.models.functions import Length +from rangefilter.filters import DateRangeQuickSelectListFilterBuilder + +from .apps import get_blobdb +from .models import Blob, ResolvedMaterial +from .utils import queue_for_replication + + +@admin.register(Blob) +class BlobAdmin(admin.ModelAdmin): + list_display = ["bucket", "name", "object_size", "modified", "mtime", "content_type"] + list_filter = [ + "bucket", + "content_type", + ("modified", DateRangeQuickSelectListFilterBuilder()), + ("mtime", DateRangeQuickSelectListFilterBuilder()), + ] + search_fields = ["name"] + list_display_links = ["name"] + actions = ["replicate_blob"] + + def get_queryset(self, request): + return ( + super().get_queryset(request) + .defer("content") # don't load this unless we want it + .annotate(object_size=Length("content")) # accessed via object_size() + ) + + @admin.display(ordering="object_size") + def object_size(self, instance): + """Get the size of the object""" + return instance.object_size # annotation added in get_queryset() + + @admin.action(description="Replicate blobs") + def replicate_blob(self, request, queryset: QuerySet[Blob]): + blob_count = 0 + for blob in queryset.all(): + if isinstance(blob, Blob): + queue_for_replication( + bucket=blob.bucket, name=blob.name, using=get_blobdb() + ) + blob_count += 1 + self.message_user( + request, + f"Queued replication of a total of {blob_count} Blob(s)", + ) + + +@admin.register(ResolvedMaterial) +class ResolvedMaterialAdmin(admin.ModelAdmin): + model = ResolvedMaterial + list_display = ["name", "meeting_number", "bucket", "blob"] + list_filter = ["meeting_number", "bucket"] + search_fields = ["name", "blob"] + ordering = ["name"] diff --git a/ietf/blobdb/apps.py b/ietf/blobdb/apps.py new file mode 100644 index 0000000000..c2513b6819 --- /dev/null +++ b/ietf/blobdb/apps.py @@ -0,0 +1,30 @@ +# Copyright The IETF Trust 2025, All Rights Reserved +from django.apps import AppConfig + + +class BlobdbConfig(AppConfig): + default_auto_field = "django.db.models.BigAutoField" + name = "ietf.blobdb" + + def ready(self): + """Initialize app once the registries / settings are populated""" + from django.conf import settings + + # Validate that the DB is set up + db = get_blobdb() # depends on settings.BLOBDB_DATABASE + if db is not None and db not in settings.DATABASES: + raise RuntimeError( + f"settings.BLOBDB_DATABASE is '{db}' but that is not present in settings.DATABASES" + ) + + # Validate replication settings + from .replication import validate_replication_settings + + validate_replication_settings() + + +def get_blobdb(): + """Retrieve the blobdb setting from Django's settings""" + from django.conf import settings + + return getattr(settings, "BLOBDB_DATABASE", None) diff --git a/ietf/blobdb/factories.py b/ietf/blobdb/factories.py new file mode 100644 index 0000000000..fdcebb2b4e --- /dev/null +++ b/ietf/blobdb/factories.py @@ -0,0 +1,13 @@ +# Copyright The IETF Trust 2025, All Rights Reserved +import factory + +from .models import Blob + + +class BlobFactory(factory.django.DjangoModelFactory): + class Meta: + model = Blob + + name = factory.Faker("file_path") + bucket = factory.Faker("word") + content = factory.Faker("binary", length=32) # careful, default length is 1e6 diff --git a/ietf/blobdb/migrations/0001_initial.py b/ietf/blobdb/migrations/0001_initial.py new file mode 100644 index 0000000000..2fcb08e5ea --- /dev/null +++ b/ietf/blobdb/migrations/0001_initial.py @@ -0,0 +1,78 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +from django.db import migrations, models +import django.utils.timezone + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="Blob", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "name", + models.CharField(help_text="Name of the blob", max_length=1024), + ), + ( + "bucket", + models.CharField( + help_text="Name of the bucket containing this blob", + max_length=1024, + ), + ), + ( + "modified", + models.DateTimeField( + default=django.utils.timezone.now, + help_text="Last modification time of the blob", + ), + ), + ("content", models.BinaryField(help_text="Content of the blob")), + ( + "checksum", + models.CharField( + editable=False, + help_text="SHA-384 digest of the content", + max_length=96, + ), + ), + ( + "mtime", + models.DateTimeField( + blank=True, + default=None, + help_text="mtime associated with the blob as a filesystem object", + null=True, + ), + ), + ( + "content_type", + models.CharField( + blank=True, + help_text="content-type header value for the blob contents", + max_length=1024, + ), + ), + ], + ), + migrations.AddConstraint( + model_name="blob", + constraint=models.UniqueConstraint( + fields=("bucket", "name"), name="unique_name_per_bucket" + ), + ), + ] diff --git a/ietf/blobdb/migrations/0002_resolvedmaterial.py b/ietf/blobdb/migrations/0002_resolvedmaterial.py new file mode 100644 index 0000000000..e0ab405b11 --- /dev/null +++ b/ietf/blobdb/migrations/0002_resolvedmaterial.py @@ -0,0 +1,48 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("blobdb", "0001_initial"), + ] + + operations = [ + migrations.CreateModel( + name="ResolvedMaterial", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("name", models.CharField(help_text="Name to resolve", max_length=300)), + ( + "meeting_number", + models.CharField( + help_text="Meeting material is related to", max_length=64 + ), + ), + ( + "bucket", + models.CharField(help_text="Resolved bucket name", max_length=255), + ), + ( + "blob", + models.CharField(help_text="Resolved blob name", max_length=300), + ), + ], + ), + migrations.AddConstraint( + model_name="resolvedmaterial", + constraint=models.UniqueConstraint( + fields=("name", "meeting_number"), name="unique_name_per_meeting" + ), + ), + ] diff --git a/ietf/secr/sreq/__init__.py b/ietf/blobdb/migrations/__init__.py similarity index 100% rename from ietf/secr/sreq/__init__.py rename to ietf/blobdb/migrations/__init__.py diff --git a/ietf/blobdb/models.py b/ietf/blobdb/models.py new file mode 100644 index 0000000000..6dbb615fa0 --- /dev/null +++ b/ietf/blobdb/models.py @@ -0,0 +1,102 @@ +# Copyright The IETF Trust 2025-2026, All Rights Reserved +from hashlib import sha384 + +from django.db import models, transaction +from django.utils import timezone + +from .apps import get_blobdb +from .utils import queue_for_replication + + +class BlobQuerySet(models.QuerySet): + """QuerySet customized for Blob management + + Operations that bypass save() / delete() won't correctly notify watchers of changes + to the blob contents. Disallow them. + """ + + def delete(self): + raise NotImplementedError("Only deleting individual Blobs is supported") + + def bulk_create(self, *args, **kwargs): + raise NotImplementedError("Only creating individual Blobs is supported") + + def update(self, *args, **kwargs): + # n.b., update_or_create() _does_ call save() + raise NotImplementedError("Updating BlobQuerySets is not supported") + + def bulk_update(self, *args, **kwargs): + raise NotImplementedError("Updating Blobs in bulk is not supported") + + +class Blob(models.Model): + objects = BlobQuerySet.as_manager() + name = models.CharField(max_length=1024, help_text="Name of the blob") + bucket = models.CharField( + max_length=1024, help_text="Name of the bucket containing this blob" + ) + modified = models.DateTimeField( + default=timezone.now, help_text="Last modification time of the blob" + ) + content = models.BinaryField(help_text="Content of the blob") + checksum = models.CharField( + max_length=96, help_text="SHA-384 digest of the content", editable=False + ) + mtime = models.DateTimeField( + default=None, + blank=True, + null=True, + help_text="mtime associated with the blob as a filesystem object", + ) + content_type = models.CharField( + max_length=1024, + blank=True, + help_text="content-type header value for the blob contents", + ) + + class Meta: + constraints = [ + models.UniqueConstraint( + fields=["bucket", "name"], name="unique_name_per_bucket" + ), + ] + + def __str__(self): + return f"{self.bucket}:{self.name}" + + def save(self, **kwargs): + db = get_blobdb() + with transaction.atomic(using=db): + self.checksum = sha384(self.content, usedforsecurity=False).hexdigest() + super().save(**kwargs) + self._emit_blob_change_event(using=db) + + def delete(self, **kwargs): + db = get_blobdb() + with transaction.atomic(using=db): + retval = super().delete(**kwargs) + self._emit_blob_change_event(using=db) + return retval + + def _emit_blob_change_event(self, using: str | None=None): + queue_for_replication(self.bucket, self.name, using=using) + + +class ResolvedMaterial(models.Model): + # A Document name can be 255 characters; allow this name to be a bit longer + name = models.CharField(max_length=300, help_text="Name to resolve") + meeting_number = models.CharField( + max_length=64, help_text="Meeting material is related to" + ) + bucket = models.CharField(max_length=255, help_text="Resolved bucket name") + blob = models.CharField(max_length=300, help_text="Resolved blob name") + + class Meta: + constraints = [ + models.UniqueConstraint( + fields=["name", "meeting_number"], name="unique_name_per_meeting" + ) + ] + + def __str__(self): + return f"{self.name}@{self.meeting_number} -> {self.bucket}:{self.blob}" diff --git a/ietf/blobdb/replication.py b/ietf/blobdb/replication.py new file mode 100644 index 0000000000..d251d3b95c --- /dev/null +++ b/ietf/blobdb/replication.py @@ -0,0 +1,178 @@ +# Copyright The IETF Trust 2025, All Rights Reserved +import datetime +from dataclasses import dataclass +from io import BytesIO +from typing import Optional + +from django.conf import settings +from django.core.files import File +from django.core.files.storage import storages, InvalidStorageError +from django.db import connections + +from ietf.utils import log + +DEFAULT_SETTINGS = { + "ENABLED": False, + "DEST_STORAGE_PATTERN": "r2-{bucket}", + "INCLUDE_BUCKETS": (), # empty means include all + "EXCLUDE_BUCKETS": (), # empty means exclude none + "VERBOSE_LOGGING": False, +} + + +class SimpleMetadataFile(File): + def __init__(self, file, name=None): + super().__init__(file, name) + self.custom_metadata = {} + self.content_type = "" + + +def get_replication_settings(): + return DEFAULT_SETTINGS | getattr(settings, "BLOBDB_REPLICATION", {}) + + +def validate_replication_settings(): + replicator_settings = get_replication_settings() + # No extra settings allowed + unknown_settings = set(DEFAULT_SETTINGS.keys()) - set(replicator_settings.keys()) + if len(unknown_settings) > 0: + raise RuntimeError( + f"Unrecognized BLOBDB_REPLICATOR settings: {', '.join(str(unknown_settings))}" + ) + # destination storage pattern must be a string that includes {bucket} + pattern = replicator_settings["DEST_STORAGE_PATTERN"] + if not isinstance(pattern, str): + raise RuntimeError( + f"DEST_STORAGE_PATTERN must be a str, not {type(pattern).__name__}" + ) + if "{bucket}" not in pattern: + raise RuntimeError( + f"DEST_STORAGE_PATTERN must contain the substring '{{bucket}}' (found '{pattern}')" + ) + # include/exclude buckets must be list-like + include_buckets = replicator_settings["INCLUDE_BUCKETS"] + if not isinstance(include_buckets, (list, tuple, set)): + raise RuntimeError("INCLUDE_BUCKETS must be a list, tuple, or set") + exclude_buckets = replicator_settings["EXCLUDE_BUCKETS"] + if not isinstance(exclude_buckets, (list, tuple, set)): + raise RuntimeError("EXCLUDE_BUCKETS must be a list, tuple, or set") + # if we have explicit include_buckets, make sure the necessary storages exist + if len(include_buckets) > 0: + include_storages = {destination_storage_name_for(b) for b in include_buckets} + exclude_storages = {destination_storage_name_for(b) for b in exclude_buckets} + configured_storages = set(settings.STORAGES.keys()) + missing_storages = include_storages - exclude_storages - configured_storages + if len(missing_storages) > 0: + raise RuntimeError( + f"Replication requires unknown storage(s): {', '.join(missing_storages)}" + ) + + +def destination_storage_name_for(bucket: str): + pattern = get_replication_settings()["DEST_STORAGE_PATTERN"] + return pattern.format(bucket=bucket) + + +def destination_storage_for(bucket: str): + storage_name = destination_storage_name_for(bucket) + return storages[storage_name] + + +def replication_enabled(bucket: str): + replication_settings = get_replication_settings() + if not replication_settings["ENABLED"]: + return False + # Default is all buckets are included + included = ( + len(replication_settings["INCLUDE_BUCKETS"]) == 0 + or bucket in replication_settings["INCLUDE_BUCKETS"] + ) + # Default is no buckets are excluded + excluded = ( + len(replication_settings["EXCLUDE_BUCKETS"]) > 0 + and bucket in replication_settings["EXCLUDE_BUCKETS"] + ) + return included and not excluded + + +def verbose_logging_enabled(): + return bool(get_replication_settings()["VERBOSE_LOGGING"]) + + +@dataclass +class SqlBlob: + content: bytes + checksum: str + modified: datetime.datetime + mtime: Optional[datetime.datetime] + content_type: str + + +def fetch_blob_via_sql(bucket: str, name: str) -> Optional[SqlBlob]: + blobdb_connection = connections["blobdb"] + cursor = blobdb_connection.cursor() + cursor.execute( + """ + SELECT content, checksum, modified, mtime, content_type FROM blobdb_blob + WHERE bucket=%s AND name=%s LIMIT 1 + """, + [bucket, name], + ) + row = cursor.fetchone() + col_names = [col[0] for col in cursor.description] + return None if row is None else SqlBlob(**{ + col_name: row_val + for col_name, row_val in zip(col_names, row) + }) + + +def replicate_blob(bucket, name): + """Replicate a Blobdb blob to a Storage""" + if not replication_enabled(bucket): + if verbose_logging_enabled(): + log.log( + f"Not replicating {bucket}:{name} because replication is not enabled for {bucket}" + ) + return + + try: + destination_storage = destination_storage_for(bucket) + except InvalidStorageError as e: + log.log( + f"Failed to replicate {bucket}:{name} because destination storage for {bucket} is not configured" + ) + raise ReplicationError from e + + blob = fetch_blob_via_sql(bucket, name) + if blob is None: + if verbose_logging_enabled(): + log.log(f"Deleting {bucket}:{name} from replica") + try: + destination_storage.delete(name) + except Exception as e: + log.log(f"Failed to delete {bucket}:{name} from replica: {e}") + raise ReplicationError from e + else: + # Add metadata expected by the MetadataS3Storage + file_with_metadata = SimpleMetadataFile(file=BytesIO(blob.content)) + file_with_metadata.content_type = blob.content_type + file_with_metadata.custom_metadata = { + "sha384": blob.checksum, + "mtime": (blob.mtime or blob.modified).isoformat(), + } + if verbose_logging_enabled(): + log.log( + f"Saving {bucket}:{name} to replica (" + f"sha384: '{file_with_metadata.custom_metadata['sha384'][:16]}...', " + f"content_type: '{file_with_metadata.content_type}', " + f"mtime: '{file_with_metadata.custom_metadata['mtime']})" + ) + try: + destination_storage.save(name, file_with_metadata) + except Exception as e: + log.log(f"Failed to save {bucket}:{name} to replica: {e}") + raise ReplicationError from e + + +class ReplicationError(Exception): + pass diff --git a/ietf/blobdb/routers.py b/ietf/blobdb/routers.py new file mode 100644 index 0000000000..319c0fbc71 --- /dev/null +++ b/ietf/blobdb/routers.py @@ -0,0 +1,58 @@ +# Copyright The IETF Trust 2025, All Rights Reserved +from django.apps import apps + +from .apps import BlobdbConfig, get_blobdb + + +class BlobdbStorageRouter: + """Database router for the Blobdb""" + + _app_label = None + + @property + def app_label(self): + if self._app_label is None: + for app in apps.get_app_configs(): + if isinstance(app, BlobdbConfig): + self._app_label = app.label + break + if self._app_label is None: + raise RuntimeError( + f"{BlobdbConfig.name} is not present in the Django app registry" + ) + return self._app_label + + @property + def db(self): + return get_blobdb() + + def db_for_read(self, model, **hints): + """Suggest the database that should be used for read operations for objects of type model + + Returns None if there is no suggestion. + """ + if model._meta.app_label == self.app_label: + return self.db + return None # no suggestion + + def db_for_write(self, model, **hints): + """Suggest the database that should be used for write of objects of type model + + Returns None if there is no suggestion. + """ + if model._meta.app_label == self.app_label: + return self.db + return None # no suggestion + + def allow_migrate(self, db, app_label, model_name=None, **hints): + """Determine if the migration operation is allowed to run on the database with alias db + + Return True if the operation should run, False if it shouldn’t run, or + None if the router has no opinion. + """ + if self.db is None: + return None # no opinion, use the default db + is_our_app = app_label == self.app_label + is_our_db = db == self.db + if is_our_app or is_our_db: + return is_our_app and is_our_db diff --git a/ietf/blobdb/storage.py b/ietf/blobdb/storage.py new file mode 100644 index 0000000000..e304dabc5d --- /dev/null +++ b/ietf/blobdb/storage.py @@ -0,0 +1,106 @@ +# Copyright The IETF Trust 2025-2026, All Rights Reserved +from typing import Optional + +from django.core.exceptions import SuspiciousFileOperation +from django.core.files.base import ContentFile +from django.core.files.storage import Storage +from django.db.models.functions import Length +from django.utils.deconstruct import deconstructible +from django.utils import timezone + +from ietf.utils.storage import MetadataFile +from .models import Blob +from .utils import queue_for_replication + + +class BlobFile(MetadataFile): + + def __init__(self, content, name=None, mtime=None, content_type=""): + super().__init__( + file=ContentFile(content), + name=name, + mtime=mtime, + content_type=content_type, + ) + + +@deconstructible +class BlobdbStorage(Storage): + + def __init__(self, bucket_name: Optional[str]=None): + if bucket_name is None: + raise ValueError("BlobdbStorage bucket_name must be specified") + self.bucket_name = bucket_name + + def get_queryset(self): + return Blob.objects.filter(bucket=self.bucket_name) + + def delete(self, name): + blob = self.get_queryset().filter(name=name).first() + if blob is not None: + blob.delete() + + def exists(self, name): + return self.get_queryset().filter(name=name).exists() + + def size(self, name): + sizes = ( + self.get_queryset() + .filter(name=name) + .annotate(object_size=Length("content")) + .values_list("object_size", flat=True) + ) + if len(sizes) == 0: + raise FileNotFoundError( + f"No object '{name}' exists in bucket '{self.bucket_name}'" + ) + return sizes[0] # unique constraint guarantees 0 or 1 entry + + def _open(self, name, mode="rb"): + try: + blob = self.get_queryset().get(name=name) + except Blob.DoesNotExist: + raise FileNotFoundError( + f"No object '{name}' exists in bucket '{self.bucket_name}'" + ) + return BlobFile( + content=blob.content, + name=blob.name, + mtime=blob.mtime or blob.modified, # fall back to modified time + content_type=blob.content_type, + ) + + def _save(self, name, content): + """Perform the save operation + + The storage API allows _save() to save to a different name than was requested. This method will + never do that, instead overwriting the existing blob. + """ + Blob.objects.update_or_create( + name=name, + bucket=self.bucket_name, + defaults={ + "content": content.read(), + "modified": timezone.now(), + "mtime": getattr(content, "mtime", None), + "content_type": getattr(content, "content_type", ""), + }, + ) + return name + + def get_available_name(self, name, max_length=None): + if max_length is not None and len(name) > max_length: + raise SuspiciousFileOperation( + f"BlobdbStorage only allows names up to {max_length}, but was" + f"asked to store the name '{name[:5]}...{name[-5:]} of length {len(name)}" + ) + return name # overwrite is permitted + + def force_replication(self, name: str): + """Force replication of a blob by name + + Be careful with this - replication includes replicating deletion of blobs, so + if you call it with a name that does not exist in blobdb, it will be removed + from R2 if it exists there! + """ + queue_for_replication(bucket=self.bucket_name, name=name) diff --git a/ietf/blobdb/tasks.py b/ietf/blobdb/tasks.py new file mode 100644 index 0000000000..538d415830 --- /dev/null +++ b/ietf/blobdb/tasks.py @@ -0,0 +1,17 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +import json + +from celery import shared_task + +from .replication import replicate_blob, ReplicationError + + +@shared_task( + autoretry_for=(ReplicationError,), retry_backoff=10, retry_kwargs={"max_retries": 5} +) +def pybob_the_blob_replicator_task(body: str): + request = json.loads(body) + bucket = request["bucket"] + name = request["name"] + replicate_blob(bucket, name) diff --git a/ietf/blobdb/tests.py b/ietf/blobdb/tests.py new file mode 100644 index 0000000000..0eadad0a1f --- /dev/null +++ b/ietf/blobdb/tests.py @@ -0,0 +1,80 @@ +# Copyright The IETF Trust 2025, All Rights Reserved +import datetime + +from django.core.files.base import ContentFile + +from ietf.utils.test_utils import TestCase +from .factories import BlobFactory +from .models import Blob +from .storage import BlobFile, BlobdbStorage + + +class StorageTests(TestCase): + def test_save(self): + storage = BlobdbStorage(bucket_name="my-bucket") + timestamp = datetime.datetime( + 2025, + 3, + 17, + 1, + 2, + 3, + tzinfo=datetime.timezone.utc, + ) + # Create file to save + my_file = BlobFile( + content=b"These are my bytes.", + mtime=timestamp, + content_type="application/x-my-content-type", + ) + # save the file + saved_name = storage.save("myfile.txt", my_file) + # validate the outcome + self.assertEqual(saved_name, "myfile.txt") + blob = Blob.objects.filter(bucket="my-bucket", name="myfile.txt").first() + self.assertIsNotNone(blob) # validates bucket and name + self.assertEqual(bytes(blob.content), b"These are my bytes.") + self.assertEqual(blob.mtime, timestamp) + self.assertEqual(blob.content_type, "application/x-my-content-type") + + def test_save_naive_file(self): + storage = BlobdbStorage(bucket_name="my-bucket") + my_naive_file = ContentFile(content=b"These are my naive bytes.") + # save the file + saved_name = storage.save("myfile.txt", my_naive_file) + # validate the outcome + self.assertEqual(saved_name, "myfile.txt") + blob = Blob.objects.filter(bucket="my-bucket", name="myfile.txt").first() + self.assertIsNotNone(blob) # validates bucket and name + self.assertEqual(bytes(blob.content), b"These are my naive bytes.") + self.assertIsNone(blob.mtime) + self.assertEqual(blob.content_type, "") + + def test_open(self): + """BlobdbStorage open yields a BlobFile with specific mtime and content_type""" + mtime = datetime.datetime(2021, 1, 2, 3, 45, tzinfo=datetime.timezone.utc) + blob = BlobFactory(mtime=mtime, content_type="application/x-oh-no-you-didnt") + storage = BlobdbStorage(bucket_name=blob.bucket) + with storage.open(blob.name, "rb") as f: + self.assertTrue(isinstance(f, BlobFile)) + assert isinstance(f, BlobFile) # redundant, narrows type for linter + self.assertEqual(f.read(), bytes(blob.content)) + self.assertEqual(f.mtime, mtime) + self.assertEqual(f.content_type, "application/x-oh-no-you-didnt") + + def test_open_null_mtime(self): + """BlobdbStorage open yields a BlobFile with default mtime and content_type""" + blob = BlobFactory(content_type="application/x-oh-no-you-didnt") # does not set mtime + storage = BlobdbStorage(bucket_name=blob.bucket) + with storage.open(blob.name, "rb") as f: + self.assertTrue(isinstance(f, BlobFile)) + assert isinstance(f, BlobFile) # redundant, narrows type for linter + self.assertEqual(f.read(), bytes(blob.content)) + self.assertIsNotNone(f.mtime) + self.assertEqual(f.mtime, blob.modified) + self.assertEqual(f.content_type, "application/x-oh-no-you-didnt") + + def test_open_file_not_found(self): + storage = BlobdbStorage(bucket_name="not-a-bucket") + with self.assertRaises(FileNotFoundError): + storage.open("definitely/not-a-file.txt") diff --git a/ietf/blobdb/utils.py b/ietf/blobdb/utils.py new file mode 100644 index 0000000000..93f8f2f521 --- /dev/null +++ b/ietf/blobdb/utils.py @@ -0,0 +1,32 @@ +# Copyright The IETF Trust 2026, All Rights Reserved +import json +from functools import partial + +from django.db import transaction + +from ietf.blobdb.replication import replication_enabled +from ietf.blobdb.tasks import pybob_the_blob_replicator_task + + +def queue_for_replication(bucket: str, name: str, using: str | None=None): + """Queue a blob for replication + + This is private to the blobdb app. Do not call it directly from other apps. + """ + if not replication_enabled(bucket): + return + + # For now, fire a celery task we've arranged to guarantee in-order processing. + # Later becomes pushing an event onto a queue to a dedicated worker. + transaction.on_commit( + partial( + pybob_the_blob_replicator_task.delay, + json.dumps( + { + "name": name, + "bucket": bucket, + } + ) + ), + using=using, + ) diff --git a/ietf/celeryapp.py b/ietf/celeryapp.py index b36f134636..fda89c30be 100644 --- a/ietf/celeryapp.py +++ b/ietf/celeryapp.py @@ -1,14 +1,20 @@ import os import scout_apm.celery -from celery import Celery +import celery from scout_apm.api import Config +# Disable celery's internal logging configuration, we set it up via Django +@celery.signals.setup_logging.connect +def on_setup_logging(**kwargs): + pass + + # Set the default Django settings module for the 'celery' program os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'ietf.settings') -app = Celery('ietf') +app = celery.Celery('ietf') # Using a string here means the worker doesn't have to serialize # the configuration object to child processes. @@ -17,10 +23,13 @@ app.config_from_object('django.conf:settings', namespace='CELERY') # Turn on Scout APM celery instrumentation if configured in the environment -scout_key = os.environ.get("SCOUT_KEY", "") -scout_name = os.environ.get("SCOUT_NAME", "") -scout_core_agent_socket_path = os.environ.get("SCOUT_CORE_AGENT_SOCKET_PATH", "tcp://scoutapm:6590") -if scout_key and scout_name: +scout_key = os.environ.get("DATATRACKER_SCOUT_KEY", None) +if scout_key is not None: + scout_name = os.environ.get("DATATRACKER_SCOUT_NAME", "Datatracker") + scout_core_agent_socket_path = "tcp://{host}:{port}".format( + host=os.environ.get("DATATRACKER_SCOUT_CORE_AGENT_HOST", "localhost"), + port=os.environ.get("DATATRACKER_SCOUT_CORE_AGENT_PORT", "6590"), + ) Config.set( key=scout_key, name=scout_name, diff --git a/ietf/checks.py b/ietf/checks.py index c823abf118..f911d081f0 100644 --- a/ietf/checks.py +++ b/ietf/checks.py @@ -28,81 +28,6 @@ def already_ran(): checks_run.append(name) return False -@checks.register('directories') -def check_cdn_directory_exists(app_configs, **kwargs): - """This checks that the path from which the CDN will serve static files for - this version of the datatracker actually exists. In development and test - mode STATIC_ROOT will normally be just static/, but in production it will be - set to a different part of the file system which is served via CDN, and the - path will contain the datatracker release version. - """ - if already_ran(): - return [] - # - errors = [] - if settings.SERVER_MODE == 'production' and not os.path.exists(settings.STATIC_ROOT): - errors.append(checks.Error( - "The static files directory has not been set up.", - hint="Please run 'ietf/manage.py collectstatic'.", - obj=None, - id='datatracker.E001', - )) - return errors - -@checks.register('files') -def check_group_email_aliases_exists(app_configs, **kwargs): - from ietf.group.views import check_group_email_aliases - # - if already_ran(): - return [] - # - errors = [] - try: - ok = check_group_email_aliases() - if not ok: - errors.append(checks.Error( - "Found no aliases in the group email aliases file\n'%s'."%settings.GROUP_ALIASES_PATH, - hint="Please run the generate_group_aliases management command to generate them.", - obj=None, - id="datatracker.E0002", - )) - except IOError as e: - errors.append(checks.Error( - "Could not read group email aliases:\n %s" % e, - hint="Please run the generate_group_aliases management command to generate them.", - obj=None, - id="datatracker.E0003", - )) - - return errors - -@checks.register('files') -def check_doc_email_aliases_exists(app_configs, **kwargs): - from ietf.doc.views_doc import check_doc_email_aliases - # - if already_ran(): - return [] - # - errors = [] - try: - ok = check_doc_email_aliases() - if not ok: - errors.append(checks.Error( - "Found no aliases in the document email aliases file\n'%s'."%settings.DRAFT_VIRTUAL_PATH, - hint="Please run the generate_draft_aliases management command to generate them.", - obj=None, - id="datatracker.E0004", - )) - except IOError as e: - errors.append(checks.Error( - "Could not read document email aliases:\n %s" % e, - hint="Please run the generate_draft_aliases management command to generate them.", - obj=None, - id="datatracker.E0005", - )) - - return errors - @checks.register('directories') def check_id_submission_directories(app_configs, **kwargs): # diff --git a/ietf/community/admin.py b/ietf/community/admin.py index 890819d9d9..4c947ad3f7 100644 --- a/ietf/community/admin.py +++ b/ietf/community/admin.py @@ -7,8 +7,8 @@ from ietf.community.models import CommunityList, SearchRule, EmailSubscription class CommunityListAdmin(admin.ModelAdmin): - list_display = ['id', 'user', 'group'] - raw_id_fields = ['user', 'group', 'added_docs'] + list_display = ['id', 'person', 'group'] + raw_id_fields = ['person', 'group', 'added_docs'] admin.site.register(CommunityList, CommunityListAdmin) class SearchRuleAdmin(admin.ModelAdmin): diff --git a/ietf/community/apps.py b/ietf/community/apps.py new file mode 100644 index 0000000000..ab0a6d6054 --- /dev/null +++ b/ietf/community/apps.py @@ -0,0 +1,12 @@ +# Copyright The IETF Trust 2024, All Rights Reserved + +from django.apps import AppConfig + + +class CommunityConfig(AppConfig): + name = "ietf.community" + + def ready(self): + """Initialize the app after the registry is populated""" + # implicitly connects @receiver-decorated signals + from . import signals # pyflakes: ignore diff --git a/ietf/community/forms.py b/ietf/community/forms.py index 8d72ce0d70..d3fa01dd19 100644 --- a/ietf/community/forms.py +++ b/ietf/community/forms.py @@ -30,6 +30,8 @@ def __init__(self, clist, rule_type, *args, **kwargs): super(SearchRuleForm, self).__init__(*args, **kwargs) def restrict_state(state_type, slug=None): + if "state" not in self.fields: + raise RuntimeError(f"Rule type {rule_type} cannot include state filtering") f = self.fields['state'] f.queryset = f.queryset.filter(used=True).filter(type=state_type) if slug: @@ -38,11 +40,15 @@ def restrict_state(state_type, slug=None): f.initial = f.queryset[0].pk f.widget = forms.HiddenInput() + if rule_type.endswith("_rfc"): + del self.fields["state"] # rfc rules must not look at document states + if rule_type in ["group", "group_rfc", "area", "area_rfc", "group_exp"]: if rule_type == "group_exp": restrict_state("draft", "expired") else: - restrict_state("draft", "rfc" if rule_type.endswith("rfc") else "active") + if not rule_type.endswith("_rfc"): + restrict_state("draft", "active") if rule_type.startswith("area"): self.fields["group"].label = "Area" @@ -70,7 +76,8 @@ def restrict_state(state_type, slug=None): del self.fields["text"] elif rule_type in ["author", "author_rfc", "shepherd", "ad"]: - restrict_state("draft", "rfc" if rule_type.endswith("rfc") else "active") + if not rule_type.endswith("_rfc"): + restrict_state("draft", "active") if rule_type.startswith("author"): self.fields["person"].label = "Author" @@ -84,7 +91,8 @@ def restrict_state(state_type, slug=None): del self.fields["text"] elif rule_type == "name_contains": - restrict_state("draft", "rfc" if rule_type.endswith("rfc") else "active") + if not rule_type.endswith("_rfc"): + restrict_state("draft", "active") del self.fields["person"] del self.fields["group"] @@ -106,14 +114,13 @@ def clean_text(self): class SubscriptionForm(forms.ModelForm): - def __init__(self, user, clist, *args, **kwargs): + def __init__(self, person, clist, *args, **kwargs): self.clist = clist - self.user = user super(SubscriptionForm, self).__init__(*args, **kwargs) self.fields["notify_on"].widget = forms.RadioSelect(choices=self.fields["notify_on"].choices) - self.fields["email"].queryset = self.fields["email"].queryset.filter(person__user=user, active=True).order_by("-primary") + self.fields["email"].queryset = self.fields["email"].queryset.filter(person=person, active=True).order_by("-primary") self.fields["email"].widget = forms.RadioSelect(choices=[t for t in self.fields["email"].choices if t[0]]) if self.fields["email"].queryset: diff --git a/ietf/community/migrations/0003_track_rfcs.py b/ietf/community/migrations/0003_track_rfcs.py new file mode 100644 index 0000000000..3c2d04097d --- /dev/null +++ b/ietf/community/migrations/0003_track_rfcs.py @@ -0,0 +1,50 @@ +# Generated by Django 4.2.3 on 2023-07-07 18:33 + +from django.db import migrations + + +def forward(apps, schema_editor): + """Track any RFCs that were created from tracked drafts""" + CommunityList = apps.get_model("community", "CommunityList") + RelatedDocument = apps.get_model("doc", "RelatedDocument") + + # Handle individually tracked documents + for cl in CommunityList.objects.all(): + for rfc in set( + RelatedDocument.objects.filter( + source__in=cl.added_docs.all(), + relationship__slug="became_rfc", + ).values_list("target__docs", flat=True) + ): + cl.added_docs.add(rfc) + + # Handle rules - rules ending with _rfc should no longer filter by state. + # There are 9 CommunityLists with invalid author_rfc rules that are filtering + # by (draft, active) instead of (draft, rfc) state before migration. All but one + # also includes an author rule for (draft, active), so these will start following + # RFCs as well. The one exception will start tracking RFCs instead of I-Ds, which + # is probably what was intended, but will be a change in their user experience. + SearchRule = apps.get_model("community", "SearchRule") + rfc_rules = SearchRule.objects.filter(rule_type__endswith="_rfc") + rfc_rules.update(state=None) + +def reverse(apps, schema_editor): + Document = apps.get_model("doc", "Document") + for rfc in Document.objects.filter(type__slug="rfc"): + rfc.communitylist_set.clear() + + # See the comment above regarding author_rfc + SearchRule = apps.get_model("community", "SearchRule") + State = apps.get_model("doc", "State") + SearchRule.objects.filter(rule_type__endswith="_rfc").update( + state=State.objects.get(type_id="draft", slug="rfc") + ) + + +class Migration(migrations.Migration): + dependencies = [ + ("community", "0002_auto_20230320_1222"), + ("doc", "0014_move_rfc_docaliases"), + ] + + operations = [migrations.RunPython(forward, reverse)] diff --git a/ietf/community/migrations/0004_delete_useless_community_lists.py b/ietf/community/migrations/0004_delete_useless_community_lists.py new file mode 100644 index 0000000000..9f657a3c34 --- /dev/null +++ b/ietf/community/migrations/0004_delete_useless_community_lists.py @@ -0,0 +1,26 @@ +# Generated by Django 4.2.9 on 2024-01-05 21:28 + +from django.db import migrations + + +def forward(apps, schema_editor): + CommunityList = apps.get_model("community", "CommunityList") + # As of 2024-01-05, there are 570 personal CommunityLists with a user + # who has no associated Person. None of these has an EmailSubscription, + # so the lists are doing nothing and can be safely deleted. + personal_lists_no_person = CommunityList.objects.exclude( + user__isnull=True + ).filter( + user__person__isnull=True + ) + # Confirm the assumption that none of the lists to be deleted has an EmailSubscription + assert not personal_lists_no_person.filter(emailsubscription__isnull=False).exists() + personal_lists_no_person.delete() + + +class Migration(migrations.Migration): + dependencies = [ + ("community", "0003_track_rfcs"), + ] + + operations = [migrations.RunPython(forward)] diff --git a/ietf/community/migrations/0005_user_to_person.py b/ietf/community/migrations/0005_user_to_person.py new file mode 100644 index 0000000000..01d8950edb --- /dev/null +++ b/ietf/community/migrations/0005_user_to_person.py @@ -0,0 +1,54 @@ +# Generated by Django 4.2.2 on 2023-06-12 19:35 + +from django.conf import settings +from django.db import migrations +import django.db.models.deletion +import ietf.utils.models + + +def forward(apps, schema_editor): + CommunityList = apps.get_model('community', 'CommunityList') + for clist in CommunityList.objects.all(): + try: + clist.person = clist.user.person + except: + clist.person = None + clist.save() + +def reverse(apps, schema_editor): + CommunityList = apps.get_model('community', 'CommunityList') + for clist in CommunityList.objects.all(): + try: + clist.user = clist.person.user + except: + clist.user = None + clist.save() + +class Migration(migrations.Migration): + dependencies = [ + ("community", "0004_delete_useless_community_lists"), + ("person", "0001_initial"), + ] + + operations = [ + migrations.AddField( + model_name="communitylist", + name="person", + field=ietf.utils.models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="person.Person", + ), + ), + migrations.RunPython(forward, reverse), + migrations.RemoveField( + model_name="communitylist", + name="user", + field=ietf.utils.models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL), + ), + ] diff --git a/ietf/community/models.py b/ietf/community/models.py index 9b2383f211..6945918f9a 100644 --- a/ietf/community/models.py +++ b/ietf/community/models.py @@ -1,25 +1,23 @@ # Copyright The IETF Trust 2012-2020, All Rights Reserved # -*- coding: utf-8 -*- - -from django.contrib.auth.models import User from django.db import models -from django.db.models import signals from django.urls import reverse as urlreverse -from ietf.doc.models import Document, DocEvent, State +from ietf.doc.models import Document, State from ietf.group.models import Group from ietf.person.models import Person, Email from ietf.utils.models import ForeignKey + class CommunityList(models.Model): - user = ForeignKey(User, blank=True, null=True) + person = ForeignKey(Person, blank=True, null=True) group = ForeignKey(Group, blank=True, null=True) added_docs = models.ManyToManyField(Document) def long_name(self): - if self.user: - return 'Personal I-D list of %s' % self.user.username + if self.person: + return 'Personal I-D list of %s' % self.person.plain_name() elif self.group: return 'I-D list for %s' % self.group.name else: @@ -30,8 +28,8 @@ def __str__(self): def get_absolute_url(self): import ietf.community.views - if self.user: - return urlreverse(ietf.community.views.view_list, kwargs={ 'username': self.user.username }) + if self.person: + return urlreverse(ietf.community.views.view_list, kwargs={ 'email_or_name': self.person.email() }) elif self.group: return urlreverse("ietf.group.views.group_documents", kwargs={ 'acronym': self.group.acronym }) return "" @@ -95,20 +93,3 @@ class EmailSubscription(models.Model): def __str__(self): return "%s to %s (%s changes)" % (self.email, self.community_list, self.notify_on) - - -def notify_events(sender, instance, **kwargs): - if not isinstance(instance, DocEvent): - return - - if instance.doc.type_id != 'draft': - return - - if getattr(instance, "skip_community_list_notification", False): - return - - from ietf.community.utils import notify_event_to_subscribers - notify_event_to_subscribers(instance) - - -signals.post_save.connect(notify_events) diff --git a/ietf/community/signals.py b/ietf/community/signals.py new file mode 100644 index 0000000000..20ee761129 --- /dev/null +++ b/ietf/community/signals.py @@ -0,0 +1,44 @@ +# Copyright The IETF Trust 2024, All Rights Reserved + +from django.conf import settings +from django.db import transaction +from django.db.models.signals import post_save +from django.dispatch import receiver + +from ietf.doc.models import DocEvent +from .tasks import notify_event_to_subscribers_task + + +def notify_of_event(event: DocEvent): + """Send subscriber notification emails for a 'draft'-related DocEvent + + If the event is attached to a draft of type 'doc', queues a task to send notification emails to + community list subscribers. No emails will be sent when SERVER_MODE is 'test'. + """ + if event.doc.type_id != "draft": + return + + if getattr(event, "skip_community_list_notification", False): + return + + # kludge alert: queuing a celery task in response to a signal can cause unexpected attempts to + # start a Celery task during tests. To prevent this, don't queue a celery task if we're running + # tests. + if settings.SERVER_MODE != "test": + # Wrap in on_commit in case a transaction is open + transaction.on_commit( + lambda: notify_event_to_subscribers_task.delay(event_id=event.pk) + ) + + +# dispatch_uid ensures only a single signal receiver binding is made +@receiver(post_save, dispatch_uid="notify_of_events_receiver_uid") +def notify_of_events_receiver(sender, instance, **kwargs): + """Call notify_of_event after saving a new DocEvent""" + if not isinstance(instance, DocEvent): + return + + if not kwargs.get("created", False): + return # only notify on creation + + notify_of_event(instance) diff --git a/ietf/community/tasks.py b/ietf/community/tasks.py new file mode 100644 index 0000000000..763a596495 --- /dev/null +++ b/ietf/community/tasks.py @@ -0,0 +1,15 @@ +# Copyright The IETF Trust 2024, All Rights Reserved +from celery import shared_task + +from ietf.doc.models import DocEvent +from ietf.utils.log import log + + +@shared_task +def notify_event_to_subscribers_task(event_id): + from .utils import notify_event_to_subscribers + event = DocEvent.objects.filter(pk=event_id).first() + if event is None: + log(f"Unable to send subscriber notifications because DocEvent {event_id} was not found") + else: + notify_event_to_subscribers(event) diff --git a/ietf/community/tests.py b/ietf/community/tests.py index 3dd86f70e3..04f1433d61 100644 --- a/ietf/community/tests.py +++ b/ietf/community/tests.py @@ -1,60 +1,112 @@ -# Copyright The IETF Trust 2016-2020, All Rights Reserved +# Copyright The IETF Trust 2016-2023, All Rights Reserved # -*- coding: utf-8 -*- - +from unittest import mock from pyquery import PyQuery +from django.test.utils import override_settings from django.urls import reverse as urlreverse -from django.contrib.auth.models import User - -from django_webtest import WebTest +from lxml import etree -import debug # pyflakes:ignore +import debug # pyflakes:ignore from ietf.community.models import CommunityList, SearchRule, EmailSubscription -from ietf.community.utils import docs_matching_community_list_rule, community_list_rules_matching_doc -from ietf.community.utils import reset_name_contains_index_for_rule +from ietf.community.signals import notify_of_event +from ietf.community.utils import ( + docs_matching_community_list_rule, + community_list_rules_matching_doc, +) +from ietf.community.utils import ( + reset_name_contains_index_for_rule, + notify_event_to_subscribers, +) +from ietf.community.tasks import notify_event_to_subscribers_task import ietf.community.views from ietf.group.models import Group from ietf.group.utils import setup_default_community_list_for_group +from ietf.doc.factories import DocumentFactory from ietf.doc.models import State from ietf.doc.utils import add_state_change_event -from ietf.person.models import Person, Email -from ietf.utils.test_utils import login_testing_unauthorized -from ietf.utils.mail import outbox -from ietf.doc.factories import WgDraftFactory +from ietf.person.models import Person, Email, Alias +from ietf.utils.test_utils import TestCase, login_testing_unauthorized +from ietf.doc.factories import DocEventFactory, WgDraftFactory from ietf.group.factories import GroupFactory, RoleFactory -from ietf.person.factories import PersonFactory +from ietf.person.factories import PersonFactory, EmailFactory, AliasFactory + -class CommunityListTests(WebTest): +class CommunityListTests(TestCase): def test_rule_matching(self): - plain = PersonFactory(user__username='plain') - ad = Person.objects.get(user__username='ad') + plain = PersonFactory(user__username="plain") + ad = Person.objects.get(user__username="ad") draft = WgDraftFactory( - group__parent=Group.objects.get(acronym='farfut' ), + group__parent=Group.objects.get(acronym="farfut"), authors=[ad], ad=ad, shepherd=plain.email(), - states=[('draft-iesg','lc'),('draft','active')], + states=[("draft-iesg", "lc"), ("draft", "active")], ) - clist = CommunityList.objects.create(user=User.objects.get(username="plain")) + clist = CommunityList.objects.create(person=plain) - rule_group = SearchRule.objects.create(rule_type="group", group=draft.group, state=State.objects.get(type="draft", slug="active"), community_list=clist) - rule_group_rfc = SearchRule.objects.create(rule_type="group_rfc", group=draft.group, state=State.objects.get(type="draft", slug="rfc"), community_list=clist) - rule_area = SearchRule.objects.create(rule_type="area", group=draft.group.parent, state=State.objects.get(type="draft", slug="active"), community_list=clist) + rule_group = SearchRule.objects.create( + rule_type="group", + group=draft.group, + state=State.objects.get(type="draft", slug="active"), + community_list=clist, + ) + rule_group_rfc = SearchRule.objects.create( + rule_type="group_rfc", + group=draft.group, + state=State.objects.get(type="rfc", slug="published"), + community_list=clist, + ) + rule_area = SearchRule.objects.create( + rule_type="area", + group=draft.group.parent, + state=State.objects.get(type="draft", slug="active"), + community_list=clist, + ) - rule_state_iesg = SearchRule.objects.create(rule_type="state_iesg", state=State.objects.get(type="draft-iesg", slug="lc"), community_list=clist) + rule_state_iesg = SearchRule.objects.create( + rule_type="state_iesg", + state=State.objects.get(type="draft-iesg", slug="lc"), + community_list=clist, + ) - rule_author = SearchRule.objects.create(rule_type="author", state=State.objects.get(type="draft", slug="active"), person=Person.objects.filter(documentauthor__document=draft).first(), community_list=clist) + rule_author = SearchRule.objects.create( + rule_type="author", + state=State.objects.get(type="draft", slug="active"), + person=Person.objects.filter(documentauthor__document=draft).first(), + community_list=clist, + ) - rule_ad = SearchRule.objects.create(rule_type="ad", state=State.objects.get(type="draft", slug="active"), person=draft.ad, community_list=clist) + rule_ad = SearchRule.objects.create( + rule_type="ad", + state=State.objects.get(type="draft", slug="active"), + person=draft.ad, + community_list=clist, + ) - rule_shepherd = SearchRule.objects.create(rule_type="shepherd", state=State.objects.get(type="draft", slug="active"), person=draft.shepherd.person, community_list=clist) + rule_shepherd = SearchRule.objects.create( + rule_type="shepherd", + state=State.objects.get(type="draft", slug="active"), + person=draft.shepherd.person, + community_list=clist, + ) - rule_group_exp = SearchRule.objects.create(rule_type="group_exp", group=draft.group, state=State.objects.get(type="draft", slug="expired"), community_list=clist) + rule_group_exp = SearchRule.objects.create( + rule_type="group_exp", + group=draft.group, + state=State.objects.get(type="draft", slug="expired"), + community_list=clist, + ) - rule_name_contains = SearchRule.objects.create(rule_type="name_contains", state=State.objects.get(type="draft", slug="active"), text="draft-.*" + "-".join(draft.name.split("-")[2:]), community_list=clist) + rule_name_contains = SearchRule.objects.create( + rule_type="name_contains", + state=State.objects.get(type="draft", slug="active"), + text="draft-.*" + "-".join(draft.name.split("-")[2:]), + community_list=clist, + ) reset_name_contains_index_for_rule(rule_name_contains) # doc -> rules @@ -71,37 +123,71 @@ def test_rule_matching(self): # rule -> docs self.assertTrue(draft in list(docs_matching_community_list_rule(rule_group))) - self.assertTrue(draft not in list(docs_matching_community_list_rule(rule_group_rfc))) + self.assertTrue( + draft not in list(docs_matching_community_list_rule(rule_group_rfc)) + ) self.assertTrue(draft in list(docs_matching_community_list_rule(rule_area))) - self.assertTrue(draft in list(docs_matching_community_list_rule(rule_state_iesg))) + self.assertTrue( + draft in list(docs_matching_community_list_rule(rule_state_iesg)) + ) self.assertTrue(draft in list(docs_matching_community_list_rule(rule_author))) self.assertTrue(draft in list(docs_matching_community_list_rule(rule_ad))) self.assertTrue(draft in list(docs_matching_community_list_rule(rule_shepherd))) - self.assertTrue(draft in list(docs_matching_community_list_rule(rule_name_contains))) - self.assertTrue(draft not in list(docs_matching_community_list_rule(rule_group_exp))) + self.assertTrue( + draft in list(docs_matching_community_list_rule(rule_name_contains)) + ) + self.assertTrue( + draft not in list(docs_matching_community_list_rule(rule_group_exp)) + ) - draft.set_state(State.objects.get(type='draft', slug='expired')) + draft.set_state(State.objects.get(type="draft", slug="expired")) # doc -> rules matching_rules = list(community_list_rules_matching_doc(draft)) self.assertTrue(rule_group_exp in matching_rules) # rule -> docs - self.assertTrue(draft in list(docs_matching_community_list_rule(rule_group_exp))) + self.assertTrue( + draft in list(docs_matching_community_list_rule(rule_group_exp)) + ) - def test_view_list(self): - PersonFactory(user__username='plain') - draft = WgDraftFactory() + def test_view_list_duplicates(self): + person = PersonFactory( + name="John Q. Public", user__username="bazquux@example.com" + ) + PersonFactory(name="John Q. Public", user__username="foobar@example.com") - url = urlreverse(ietf.community.views.view_list, kwargs={ "username": "plain" }) + url = urlreverse( + ietf.community.views.view_list, + kwargs={"email_or_name": person.plain_name()}, + ) + r = self.client.get(url) + self.assertEqual(r.status_code, 404) + def complex_person(self, *args, **kwargs): + person = PersonFactory(*args, **kwargs) + EmailFactory(person=person) + AliasFactory(person=person) + return person + + def email_or_name_set(self, person): + return [e for e in Email.objects.filter(person=person)] + [ + a for a in Alias.objects.filter(person=person) + ] + + def do_view_list_test(self, person): + draft = WgDraftFactory() # without list - r = self.client.get(url) - self.assertEqual(r.status_code, 200) + for id in self.email_or_name_set(person): + url = urlreverse( + ietf.community.views.view_list, kwargs={"email_or_name": id} + ) + r = self.client.get(url) + self.assertEqual(r.status_code, 200, msg=f"id='{id}', url='{url}'") # with list - clist = CommunityList.objects.create(user=User.objects.get(username="plain")) - if not draft in clist.added_docs.all(): + clist = CommunityList.objects.create(person=person) + if draft not in clist.added_docs.all(): clist.added_docs.add(draft) SearchRule.objects.create( community_list=clist, @@ -109,86 +195,135 @@ def test_view_list(self): state=State.objects.get(type="draft", slug="active"), text="test", ) - r = self.client.get(url) - self.assertEqual(r.status_code, 200) - self.assertContains(r, draft.name) + for id in self.email_or_name_set(person): + url = urlreverse( + ietf.community.views.view_list, kwargs={"email_or_name": id} + ) + r = self.client.get(url) + self.assertEqual(r.status_code, 200, msg=f"id='{id}', url='{url}'") + self.assertContains(r, draft.name) - def test_manage_personal_list(self): + def test_view_list(self): + person = self.complex_person(user__username="plain") + self.do_view_list_test(person) + + def test_view_list_without_active_email(self): + person = self.complex_person(user__username="plain") + person.email_set.update(active=False) + self.do_view_list_test(person) - PersonFactory(user__username='plain') - ad = Person.objects.get(user__username='ad') + def test_manage_personal_list(self): + person = self.complex_person(user__username="plain") + ad = Person.objects.get(user__username="ad") draft = WgDraftFactory(authors=[ad]) - url = urlreverse(ietf.community.views.manage_list, kwargs={ "username": "plain" }) + url = urlreverse( + ietf.community.views.manage_list, kwargs={"email_or_name": person.email()} + ) login_testing_unauthorized(self, "plain", url) - page = self.app.get(url, user='plain') - self.assertEqual(page.status_int, 200) - - # add document - self.assertIn('add_document', page.forms) - form = page.forms['add_document'] - form['documents'].options=[(draft.pk, True, draft.name)] - page = form.submit('action',value='add_documents') - self.assertEqual(page.status_int, 302) - clist = CommunityList.objects.get(user__username="plain") - self.assertTrue(clist.added_docs.filter(pk=draft.pk)) - page = page.follow() - - self.assertContains(page, draft.name) - - # remove document - self.assertIn('remove_document_%s' % draft.pk, page.forms) - form = page.forms['remove_document_%s' % draft.pk] - page = form.submit('action',value='remove_document') - self.assertEqual(page.status_int, 302) - clist = CommunityList.objects.get(user__username="plain") - self.assertTrue(not clist.added_docs.filter(pk=draft.pk)) - page = page.follow() - - # add rule - r = self.client.post(url, { - "action": "add_rule", - "rule_type": "author_rfc", - "author_rfc-person": Person.objects.filter(documentauthor__document=draft).first().pk, - "author_rfc-state": State.objects.get(type="draft", slug="rfc").pk, - }) - self.assertEqual(r.status_code, 302) - clist = CommunityList.objects.get(user__username="plain") - self.assertTrue(clist.searchrule_set.filter(rule_type="author_rfc")) - - # add name_contains rule - r = self.client.post(url, { - "action": "add_rule", - "rule_type": "name_contains", - "name_contains-text": "draft.*mars", - "name_contains-state": State.objects.get(type="draft", slug="active").pk, - }) - self.assertEqual(r.status_code, 302) - clist = CommunityList.objects.get(user__username="plain") - self.assertTrue(clist.searchrule_set.filter(rule_type="name_contains")) - - # rule shows up on GET - r = self.client.get(url) - self.assertEqual(r.status_code, 200) - rule = clist.searchrule_set.filter(rule_type="author_rfc").first() - q = PyQuery(r.content) - self.assertEqual(len(q('#r%s' % rule.pk)), 1) - - # remove rule - r = self.client.post(url, { - "action": "remove_rule", - "rule": rule.pk, - }) - - clist = CommunityList.objects.get(user__username="plain") - self.assertTrue(not clist.searchrule_set.filter(rule_type="author_rfc")) + for id in self.email_or_name_set(person): + url = urlreverse( + ietf.community.views.manage_list, kwargs={"email_or_name": id} + ) + r = self.client.get(url, user="plain") + self.assertEqual(r.status_code, 200, msg=f"id='{id}', url='{url}'") + + # We can't call post() with follow=True because that 404's if + # the url contains unicode, because the django test client + # apparently re-encodes the already-encoded url. + def follow(r): + redirect_url = r.url or url + return self.client.get(redirect_url, user="plain") + + # add document + self.assertContains(r, "add_document") + r = self.client.post( + url, {"action": "add_documents", "documents": draft.pk} + ) + self.assertEqual(r.status_code, 302, msg=f"id='{id}', url='{url}'") + clist = CommunityList.objects.get(person__user__username="plain") + self.assertTrue(clist.added_docs.filter(pk=draft.pk)) + r = follow(r) + self.assertContains(r, draft.name, status_code=200) + + # remove document + self.assertContains(r, "remove_document_%s" % draft.pk) + r = self.client.post( + url, {"action": "remove_document", "document": draft.pk} + ) + self.assertEqual(r.status_code, 302, msg=f"id='{id}', url='{url}'") + clist = CommunityList.objects.get(person__user__username="plain") + self.assertTrue(not clist.added_docs.filter(pk=draft.pk)) + r = follow(r) + self.assertNotContains(r, draft.name, status_code=200) + + # add rule + r = self.client.post( + url, + { + "action": "add_rule", + "rule_type": "author_rfc", + "author_rfc-person": Person.objects.filter( + documentauthor__document=draft + ) + .first() + .pk, + "author_rfc-state": State.objects.get( + type="rfc", slug="published" + ).pk, + }, + ) + self.assertEqual(r.status_code, 302, msg=f"id='{id}', url='{url}'") + clist = CommunityList.objects.get(person__user__username="plain") + self.assertTrue(clist.searchrule_set.filter(rule_type="author_rfc")) + + # add name_contains rule + r = self.client.post( + url, + { + "action": "add_rule", + "rule_type": "name_contains", + "name_contains-text": "draft.*mars", + "name_contains-state": State.objects.get( + type="draft", slug="active" + ).pk, + }, + ) + self.assertEqual(r.status_code, 302, msg=f"id='{id}', url='{url}'") + clist = CommunityList.objects.get(person__user__username="plain") + self.assertTrue(clist.searchrule_set.filter(rule_type="name_contains")) + + # rule shows up on GET + r = self.client.get(url) + self.assertEqual(r.status_code, 200, msg=f"id='{id}', url='{url}'") + rule = clist.searchrule_set.filter(rule_type="author_rfc").first() + q = PyQuery(r.content) + self.assertEqual(len(q("#r%s" % rule.pk)), 1) + + # remove rule + r = self.client.post( + url, + { + "action": "remove_rule", + "rule": rule.pk, + }, + ) + + clist = CommunityList.objects.get(person__user__username="plain") + self.assertTrue(not clist.searchrule_set.filter(rule_type="author_rfc")) def test_manage_group_list(self): - draft = WgDraftFactory(group__acronym='mars') - RoleFactory(group__acronym='mars',name_id='chair',person=PersonFactory(user__username='marschairman')) + draft = WgDraftFactory(group__acronym="mars") + RoleFactory( + group__acronym="mars", + name_id="chair", + person=PersonFactory(user__username="marschairman"), + ) - url = urlreverse(ietf.community.views.manage_list, kwargs={ "acronym": draft.group.acronym }) + url = urlreverse( + ietf.community.views.manage_list, kwargs={"acronym": draft.group.acronym} + ) setup_default_community_list_for_group(draft.group) login_testing_unauthorized(self, "marschairman", url) @@ -197,95 +332,132 @@ def test_manage_group_list(self): self.assertEqual(r.status_code, 200) # Verify GET also works with non-WG and RG groups - for gtype in ['area','program']: + for gtype in ["area", "program"]: g = GroupFactory.create(type_id=gtype) # make sure the group's features have been initialized to improve coverage - _ = g.features # pyflakes:ignore + _ = g.features # pyflakes:ignore p = PersonFactory() - g.role_set.create(name_id={'area':'ad','program':'lead'}[gtype],person=p, email=p.email()) - url = urlreverse(ietf.community.views.manage_list, kwargs={ "acronym": g.acronym }) + g.role_set.create( + name_id={"area": "ad", "program": "lead"}[gtype], + person=p, + email=p.email(), + ) + url = urlreverse( + ietf.community.views.manage_list, kwargs={"acronym": g.acronym} + ) setup_default_community_list_for_group(g) - self.client.login(username=p.user.username,password=p.user.username+"+password") + self.client.login( + username=p.user.username, password=p.user.username + "+password" + ) r = self.client.get(url) self.assertEqual(r.status_code, 200) def test_track_untrack_document(self): - PersonFactory(user__username='plain') + person = self.complex_person(user__username="plain") draft = WgDraftFactory() - url = urlreverse(ietf.community.views.track_document, kwargs={ "username": "plain", "name": draft.name }) + url = urlreverse( + ietf.community.views.track_document, + kwargs={"email_or_name": person.email(), "name": draft.name}, + ) login_testing_unauthorized(self, "plain", url) - # track - r = self.client.get(url) - self.assertEqual(r.status_code, 200) - - r = self.client.post(url) - self.assertEqual(r.status_code, 302) - clist = CommunityList.objects.get(user__username="plain") - self.assertEqual(list(clist.added_docs.all()), [draft]) + for id in self.email_or_name_set(person): + url = urlreverse( + ietf.community.views.track_document, + kwargs={"email_or_name": id, "name": draft.name}, + ) - # untrack - url = urlreverse(ietf.community.views.untrack_document, kwargs={ "username": "plain", "name": draft.name }) - r = self.client.get(url) - self.assertEqual(r.status_code, 200) + # track + r = self.client.get(url) + self.assertEqual(r.status_code, 200, msg=f"id='{id}', url='{url}'") + + r = self.client.post(url) + self.assertEqual(r.status_code, 302, msg=f"id='{id}', url='{url}'") + clist = CommunityList.objects.get(person__user__username="plain") + self.assertEqual(list(clist.added_docs.all()), [draft]) + + # untrack + url = urlreverse( + ietf.community.views.untrack_document, + kwargs={"email_or_name": id, "name": draft.name}, + ) + r = self.client.get(url) + self.assertEqual(r.status_code, 200, msg=f"id='{id}', url='{url}'") - r = self.client.post(url) - self.assertEqual(r.status_code, 302) - clist = CommunityList.objects.get(user__username="plain") - self.assertEqual(list(clist.added_docs.all()), []) + r = self.client.post(url) + self.assertEqual(r.status_code, 302, msg=f"id='{id}', url='{url}'") + clist = CommunityList.objects.get(person__user__username="plain") + self.assertEqual(list(clist.added_docs.all()), []) def test_track_untrack_document_through_ajax(self): - PersonFactory(user__username='plain') + person = self.complex_person(user__username="plain") draft = WgDraftFactory() - url = urlreverse(ietf.community.views.track_document, kwargs={ "username": "plain", "name": draft.name }) + url = urlreverse( + ietf.community.views.track_document, + kwargs={"email_or_name": person.email(), "name": draft.name}, + ) login_testing_unauthorized(self, "plain", url) - # track - r = self.client.post(url, HTTP_X_REQUESTED_WITH='XMLHttpRequest') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.json()["success"], True) - clist = CommunityList.objects.get(user__username="plain") - self.assertEqual(list(clist.added_docs.all()), [draft]) - - # untrack - url = urlreverse(ietf.community.views.untrack_document, kwargs={ "username": "plain", "name": draft.name }) - r = self.client.post(url, HTTP_X_REQUESTED_WITH='XMLHttpRequest') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.json()["success"], True) - clist = CommunityList.objects.get(user__username="plain") - self.assertEqual(list(clist.added_docs.all()), []) + for id in self.email_or_name_set(person): + url = urlreverse( + ietf.community.views.track_document, + kwargs={"email_or_name": id, "name": draft.name}, + ) + + # track + r = self.client.post(url, HTTP_X_REQUESTED_WITH="XMLHttpRequest") + self.assertEqual(r.status_code, 200, msg=f"id='{id}', url='{url}'") + self.assertEqual(r.json()["success"], True) + clist = CommunityList.objects.get(person__user__username="plain") + self.assertEqual(list(clist.added_docs.all()), [draft]) + + # untrack + url = urlreverse( + ietf.community.views.untrack_document, + kwargs={"email_or_name": id, "name": draft.name}, + ) + r = self.client.post(url, HTTP_X_REQUESTED_WITH="XMLHttpRequest") + self.assertEqual(r.status_code, 200, msg=f"id='{id}', url='{url}'") + self.assertEqual(r.json()["success"], True) + clist = CommunityList.objects.get(person__user__username="plain") + self.assertEqual(list(clist.added_docs.all()), []) def test_csv(self): - PersonFactory(user__username='plain') + person = self.complex_person(user__username="plain") draft = WgDraftFactory() - url = urlreverse(ietf.community.views.export_to_csv, kwargs={ "username": "plain" }) + for id in self.email_or_name_set(person): + url = urlreverse( + ietf.community.views.export_to_csv, kwargs={"email_or_name": id} + ) - # without list - r = self.client.get(url) - self.assertEqual(r.status_code, 200) - - # with list - clist = CommunityList.objects.create(user=User.objects.get(username="plain")) - if not draft in clist.added_docs.all(): - clist.added_docs.add(draft) - SearchRule.objects.create( - community_list=clist, - rule_type="name_contains", - state=State.objects.get(type="draft", slug="active"), - text="test", - ) - r = self.client.get(url) - self.assertEqual(r.status_code, 200) - # this is a simple-minded test, we don't actually check the fields - self.assertContains(r, draft.name) + # without list + r = self.client.get(url) + self.assertEqual(r.status_code, 200, msg=f"id='{id}', url='{url}'") + + # with list + clist = CommunityList.objects.create(person=person) + if draft not in clist.added_docs.all(): + clist.added_docs.add(draft) + SearchRule.objects.create( + community_list=clist, + rule_type="name_contains", + state=State.objects.get(type="draft", slug="active"), + text="test", + ) + r = self.client.get(url) + self.assertEqual(r.status_code, 200, msg=f"id='{id}', url='{url}'") + # this is a simple-minded test, we don't actually check the fields + self.assertContains(r, draft.name) def test_csv_for_group(self): draft = WgDraftFactory() - url = urlreverse(ietf.community.views.export_to_csv, kwargs={ "acronym": draft.group.acronym }) + url = urlreverse( + ietf.community.views.export_to_csv, kwargs={"acronym": draft.group.acronym} + ) setup_default_community_list_for_group(draft.group) @@ -294,60 +466,85 @@ def test_csv_for_group(self): self.assertEqual(r.status_code, 200) def test_feed(self): - PersonFactory(user__username='plain') + person = self.complex_person(user__username="plain") draft = WgDraftFactory() - url = urlreverse(ietf.community.views.feed, kwargs={ "username": "plain" }) - - # without list - r = self.client.get(url) - self.assertEqual(r.status_code, 200) - - # with list - clist = CommunityList.objects.create(user=User.objects.get(username="plain")) - if not draft in clist.added_docs.all(): - clist.added_docs.add(draft) - SearchRule.objects.create( - community_list=clist, - rule_type="name_contains", - state=State.objects.get(type="draft", slug="active"), - text="test", - ) - r = self.client.get(url) - self.assertEqual(r.status_code, 200) - self.assertContains(r, draft.name) + for id in self.email_or_name_set(person): + url = urlreverse(ietf.community.views.feed, kwargs={"email_or_name": id}) - # only significant - r = self.client.get(url + "?significant=1") - self.assertEqual(r.status_code, 200) - self.assertNotContains(r, '') + # without list + r = self.client.get(url) + self.assertEqual(r.status_code, 200, msg=f"id='{id}', url='{url}'") + + # with list + clist = CommunityList.objects.create(person=person) + if draft not in clist.added_docs.all(): + clist.added_docs.add(draft) + SearchRule.objects.create( + community_list=clist, + rule_type="name_contains", + state=State.objects.get(type="draft", slug="active"), + text="test", + ) + r = self.client.get(url) + self.assertEqual(r.status_code, 200, msg=f"id='{id}', url='{url}'") + self.assertContains(r, draft.name) + + # test atom xml + xml = etree.fromstring(r.content) + ns = {"atom": "http://www.w3.org/2005/Atom"} + updated = xml.xpath("/atom:feed/atom:updated", namespaces=ns)[0].text + entries = xml.xpath("/atom:feed/atom:entry", namespaces=ns) + self.assertIn("+00:00", updated) # RFC 3339 compatible UTC TZ + for entry in entries: + updated = entry.xpath("atom:updated", namespaces=ns)[0].text + published = entry.xpath("atom:published", namespaces=ns)[0].text + entry_id = entry.xpath("atom:id", namespaces=ns)[0].text + self.assertIn("+00:00", updated) + self.assertIn("+00:00", published) + self.assertIn( + "urn:datatracker-ietf-org:event:", entry_id + ) # atom:entry:id must be a valid URN + + # only significant + r = self.client.get(url + "?significant=1") + self.assertEqual(r.status_code, 200, msg=f"id='{id}', url='{url}'") + self.assertNotContains(r, "") def test_feed_for_group(self): draft = WgDraftFactory() - url = urlreverse(ietf.community.views.feed, kwargs={ "acronym": draft.group.acronym }) + url = urlreverse( + ietf.community.views.feed, kwargs={"acronym": draft.group.acronym} + ) setup_default_community_list_for_group(draft.group) # test GET, rest is tested with personal list r = self.client.get(url) self.assertEqual(r.status_code, 200) - + def test_subscription(self): - PersonFactory(user__username='plain') + person = self.complex_person(user__username="plain") draft = WgDraftFactory() - url = urlreverse(ietf.community.views.subscription, kwargs={ "username": "plain" }) - + url = urlreverse( + ietf.community.views.subscription, kwargs={"email_or_name": person.email()} + ) login_testing_unauthorized(self, "plain", url) - # subscription without list - r = self.client.get(url) - self.assertEqual(r.status_code, 404) + for id in self.email_or_name_set(person): + url = urlreverse( + ietf.community.views.subscription, kwargs={"email_or_name": id} + ) + + # subscription without list + r = self.client.get(url) + self.assertEqual(r.status_code, 404, msg=f"id='{id}', url='{url}'") # subscription with list - clist = CommunityList.objects.create(user=User.objects.get(username="plain")) - if not draft in clist.added_docs.all(): + clist = CommunityList.objects.create(person=person) + if draft not in clist.added_docs.all(): clist.added_docs.add(draft) SearchRule.objects.create( community_list=clist, @@ -355,28 +552,51 @@ def test_subscription(self): state=State.objects.get(type="draft", slug="active"), text="test", ) - r = self.client.get(url) - self.assertEqual(r.status_code, 200) - # subscribe - email = Email.objects.filter(person__user__username="plain").first() - r = self.client.post(url, { "email": email.pk, "notify_on": "significant", "action": "subscribe" }) - self.assertEqual(r.status_code, 302) + for email in Email.objects.filter(person=person): + url = urlreverse( + ietf.community.views.subscription, kwargs={"email_or_name": email} + ) - subscription = EmailSubscription.objects.filter(community_list=clist, email=email, notify_on="significant").first() - - self.assertTrue(subscription) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) - # delete subscription - r = self.client.post(url, { "subscription_id": subscription.pk, "action": "unsubscribe" }) - self.assertEqual(r.status_code, 302) - self.assertEqual(EmailSubscription.objects.filter(community_list=clist, email=email, notify_on="significant").count(), 0) + # subscribe + r = self.client.post( + url, + {"email": email.pk, "notify_on": "significant", "action": "subscribe"}, + ) + self.assertEqual(r.status_code, 302) + + subscription = EmailSubscription.objects.filter( + community_list=clist, email=email, notify_on="significant" + ).first() + + self.assertTrue(subscription) + + # delete subscription + r = self.client.post( + url, {"subscription_id": subscription.pk, "action": "unsubscribe"} + ) + self.assertEqual(r.status_code, 302) + self.assertEqual( + EmailSubscription.objects.filter( + community_list=clist, email=email, notify_on="significant" + ).count(), + 0, + ) def test_subscription_for_group(self): - draft = WgDraftFactory(group__acronym='mars') - RoleFactory(group__acronym='mars',name_id='chair',person=PersonFactory(user__username='marschairman')) + draft = WgDraftFactory(group__acronym="mars") + RoleFactory( + group__acronym="mars", + name_id="chair", + person=PersonFactory(user__username="marschairman"), + ) - url = urlreverse(ietf.community.views.subscription, kwargs={ "acronym": draft.group.acronym }) + url = urlreverse( + ietf.community.views.subscription, kwargs={"acronym": draft.group.acronym} + ) setup_default_community_list_for_group(draft.group) @@ -385,27 +605,136 @@ def test_subscription_for_group(self): # test GET, rest is tested with personal list r = self.client.get(url) self.assertEqual(r.status_code, 200) - - def test_notification(self): - PersonFactory(user__username='plain') + + @mock.patch("ietf.community.signals.notify_of_event") + def test_notification_signal_receiver(self, mock_notify_of_event): + """Saving a newly created DocEvent should notify subscribers + + This implicitly tests that notify_of_event_receiver is hooked up to the post_save signal. + """ + # Arbitrary model that's not a DocEvent + person = PersonFactory.build() # builds but does not save... + mock_notify_of_event.reset_mock() # clear any calls that resulted from the factories + person.save() + self.assertFalse(mock_notify_of_event.called) + + # build a DocEvent that is not yet persisted + doc = DocumentFactory() + event = DocEventFactory.build(by=person, doc=doc) # builds but does not save... + mock_notify_of_event.reset_mock() # clear any calls that resulted from the factories + event.save() + self.assertEqual( + mock_notify_of_event.call_count, + 1, + "notify_task should be run on creation of DocEvent", + ) + self.assertEqual(mock_notify_of_event.call_args, mock.call(event)) + + # save the existing DocEvent and see that no notification is sent + mock_notify_of_event.reset_mock() + event.save() + self.assertFalse( + mock_notify_of_event.called, + "notify_task should not be run save of on existing DocEvent", + ) + + # Mock out the on_commit call so we can tell whether the task was actually queued + @mock.patch("ietf.submit.views.transaction.on_commit", side_effect=lambda x: x()) + @mock.patch("ietf.community.signals.notify_event_to_subscribers_task") + def test_notify_of_event(self, mock_notify_task, mock_on_commit): + """The community notification task should be called as intended""" + person = PersonFactory() # builds but does not save... + doc = DocumentFactory() + event = DocEventFactory(by=person, doc=doc) + # be careful overriding SERVER_MODE - we do it here because the method + # under test does not make this call when in "test" mode + with override_settings(SERVER_MODE="not-test"): + notify_of_event(event) + self.assertTrue( + mock_notify_task.delay.called, + "notify_task should run for a DocEvent on a draft", + ) + mock_notify_task.reset_mock() + + event.skip_community_list_notification = True + # be careful overriding SERVER_MODE - we do it here because the method + # under test does not make this call when in "test" mode + with override_settings(SERVER_MODE="not-test"): + notify_of_event(event) + self.assertFalse( + mock_notify_task.delay.called, + "notify_task should not run when skip_community_list_notification is set", + ) + + event = DocEventFactory.build(by=person, doc=DocumentFactory(type_id="rfc")) + # be careful overriding SERVER_MODE - we do it here because the method + # under test does not make this call when in "test" mode + with override_settings(SERVER_MODE="not-test"): + notify_of_event(event) + self.assertFalse( + mock_notify_task.delay.called, + "notify_task should not run on a document with type 'rfc'", + ) + + @mock.patch("ietf.utils.mail.send_mail_text") + def test_notify_event_to_subscribers(self, mock_send_mail_text): + person = PersonFactory(user__username="plain") draft = WgDraftFactory() - clist = CommunityList.objects.create(user=User.objects.get(username="plain")) - if not draft in clist.added_docs.all(): + clist = CommunityList.objects.create(person=person) + if draft not in clist.added_docs.all(): clist.added_docs.add(draft) - EmailSubscription.objects.create(community_list=clist, email=Email.objects.filter(person__user__username="plain").first(), notify_on="significant") + sub_to_significant = EmailSubscription.objects.create( + community_list=clist, + email=Email.objects.filter(person__user__username="plain").first(), + notify_on="significant", + ) + sub_to_all = EmailSubscription.objects.create( + community_list=clist, + email=Email.objects.filter(person__user__username="plain").first(), + notify_on="all", + ) - mailbox_before = len(outbox) active_state = State.objects.get(type="draft", slug="active") system = Person.objects.get(name="(System)") - add_state_change_event(draft, system, None, active_state) - self.assertEqual(len(outbox), mailbox_before) + event = add_state_change_event(draft, system, None, active_state) + notify_event_to_subscribers(event) + self.assertEqual(mock_send_mail_text.call_count, 1) + address = mock_send_mail_text.call_args[0][1] + subject = mock_send_mail_text.call_args[0][3] + content = mock_send_mail_text.call_args[0][4] + self.assertEqual(address, sub_to_all.email.address) + self.assertIn(draft.name, subject) + self.assertIn(clist.long_name(), content) - mailbox_before = len(outbox) rfc_state = State.objects.get(type="draft", slug="rfc") - add_state_change_event(draft, system, active_state, rfc_state) - self.assertEqual(len(outbox), mailbox_before + 1) - self.assertTrue(draft.name in outbox[-1]["Subject"]) - - \ No newline at end of file + event = add_state_change_event(draft, system, active_state, rfc_state) + mock_send_mail_text.reset_mock() + notify_event_to_subscribers(event) + self.assertEqual(mock_send_mail_text.call_count, 2) + addresses = [ + call_args[0][1] for call_args in mock_send_mail_text.call_args_list + ] + subjects = {call_args[0][3] for call_args in mock_send_mail_text.call_args_list} + contents = {call_args[0][4] for call_args in mock_send_mail_text.call_args_list} + self.assertCountEqual( + addresses, + [sub_to_significant.email.address, sub_to_all.email.address], + ) + self.assertEqual(len(subjects), 1) + self.assertIn(draft.name, subjects.pop()) + self.assertEqual(len(contents), 1) + self.assertIn(clist.long_name(), contents.pop()) + + @mock.patch("ietf.community.utils.notify_event_to_subscribers") + def test_notify_event_to_subscribers_task(self, mock_notify): + d = DocEventFactory() + notify_event_to_subscribers_task(event_id=d.pk) + self.assertEqual(mock_notify.call_count, 1) + self.assertEqual(mock_notify.call_args, mock.call(d)) + mock_notify.reset_mock() + + d.delete() + notify_event_to_subscribers_task(event_id=d.pk) + self.assertFalse(mock_notify.called) diff --git a/ietf/community/urls.py b/ietf/community/urls.py index f80547ffad..3ab132f2dc 100644 --- a/ietf/community/urls.py +++ b/ietf/community/urls.py @@ -4,11 +4,11 @@ from ietf.utils.urls import url urlpatterns = [ - url(r'^personal/(?P[^/]+)/$', views.view_list), - url(r'^personal/(?P[^/]+)/manage/$', views.manage_list), - url(r'^personal/(?P[^/]+)/trackdocument/(?P[^/]+)/$', views.track_document), - url(r'^personal/(?P[^/]+)/untrackdocument/(?P[^/]+)/$', views.untrack_document), - url(r'^personal/(?P[^/]+)/csv/$', views.export_to_csv), - url(r'^personal/(?P[^/]+)/feed/$', views.feed), - url(r'^personal/(?P[^/]+)/subscription/$', views.subscription), + url(r'^personal/(?P[^/]+)/$', views.view_list), + url(r'^personal/(?P[^/]+)/manage/$', views.manage_list), + url(r'^personal/(?P[^/]+)/trackdocument/(?P[^/]+)/$', views.track_document), + url(r'^personal/(?P[^/]+)/untrackdocument/(?P[^/]+)/$', views.untrack_document), + url(r'^personal/(?P[^/]+)/csv/$', views.export_to_csv), + url(r'^personal/(?P[^/]+)/feed/$', views.feed), + url(r'^personal/(?P[^/]+)/subscription/$', views.subscription), ] diff --git a/ietf/community/utils.py b/ietf/community/utils.py index 8130954b92..b6137095ef 100644 --- a/ietf/community/utils.py +++ b/ietf/community/utils.py @@ -1,4 +1,4 @@ -# Copyright The IETF Trust 2016-2020, All Rights Reserved +# Copyright The IETF Trust 2016-2023, All Rights Reserved # -*- coding: utf-8 -*- @@ -11,11 +11,9 @@ from ietf.community.models import CommunityList, EmailSubscription, SearchRule from ietf.doc.models import Document, State -from ietf.group.models import Role, Group +from ietf.group.models import Role from ietf.person.models import Person from ietf.ietfauth.utils import has_role -from django.contrib.auth.models import User -from django.shortcuts import get_object_or_404 from ietf.utils.mail import send_mail @@ -29,24 +27,12 @@ def states_of_significant_change(): Q(type="draft", slug__in=['rfc', 'dead']) ) -def lookup_community_list(username=None, acronym=None): - assert username or acronym - - if acronym: - group = get_object_or_404(Group, acronym=acronym) - clist = CommunityList.objects.filter(group=group).first() or CommunityList(group=group) - else: - user = get_object_or_404(User, username__iexact=username) - clist = CommunityList.objects.filter(user=user).first() or CommunityList(user=user) - - return clist - def can_manage_community_list(user, clist): if not user or not user.is_authenticated: return False - if clist.user: - return user == clist.user + if clist.person: + return user == clist.person.user elif clist.group: if has_role(user, 'Secretariat'): return True @@ -60,7 +46,7 @@ def reset_name_contains_index_for_rule(rule): if not rule.rule_type == "name_contains": return - rule.name_contains_index.set(Document.objects.filter(docalias__name__regex=rule.text)) + rule.name_contains_index.set(Document.objects.filter(name__regex=rule.text)) def update_name_contains_indexes_with_new_doc(doc): for r in SearchRule.objects.filter(rule_type="name_contains"): @@ -71,71 +57,113 @@ def update_name_contains_indexes_with_new_doc(doc): if re.search(r.text, doc.name) and not doc in r.name_contains_index.all(): r.name_contains_index.add(doc) + def docs_matching_community_list_rule(rule): docs = Document.objects.all() + + if rule.rule_type.endswith("_rfc"): + docs = docs.filter(type_id="rfc") # rule.state is ignored for RFCs + else: + docs = docs.filter(type_id="draft", states=rule.state) + if rule.rule_type in ['group', 'area', 'group_rfc', 'area_rfc']: - return docs.filter(Q(group=rule.group_id) | Q(group__parent=rule.group_id), states=rule.state) + return docs.filter(Q(group=rule.group_id) | Q(group__parent=rule.group_id)) elif rule.rule_type in ['group_exp']: - return docs.filter(group=rule.group_id, states=rule.state) + return docs.filter(group=rule.group_id) elif rule.rule_type.startswith("state_"): - return docs.filter(states=rule.state) - elif rule.rule_type in ["author", "author_rfc"]: - return docs.filter(states=rule.state, documentauthor__person=rule.person) + return docs + elif rule.rule_type == "author": + return docs.filter(documentauthor__person=rule.person) + elif rule.rule_type == "author_rfc": + return docs.filter(Q(rfcauthor__person=rule.person)|Q(rfcauthor__isnull=True,documentauthor__person=rule.person)) elif rule.rule_type == "ad": - return docs.filter(states=rule.state, ad=rule.person) + return docs.filter(ad=rule.person) elif rule.rule_type == "shepherd": - return docs.filter(states=rule.state, shepherd__person=rule.person) + return docs.filter(shepherd__person=rule.person) elif rule.rule_type == "name_contains": - return docs.filter(states=rule.state, searchrule=rule) + return docs.filter(searchrule=rule) raise NotImplementedError -def community_list_rules_matching_doc(doc): - states = list(doc.states.values_list("pk", flat=True)) +def community_list_rules_matching_doc(doc): rules = SearchRule.objects.none() + if doc.type_id not in ["draft", "rfc"]: + return rules # none + states = list(doc.states.values_list("pk", flat=True)) + # group and area rules if doc.group_id: groups = [doc.group_id] if doc.group.parent_id: groups.append(doc.group.parent_id) + rules_to_add = SearchRule.objects.filter(group__in=groups) + if doc.type_id == "rfc": + rules_to_add = rules_to_add.filter(rule_type__in=["group_rfc", "area_rfc"]) + else: + rules_to_add = rules_to_add.filter( + rule_type__in=["group", "area", "group_exp"], + state__in=states, + ) + rules |= rules_to_add + + # state rules (only relevant for I-Ds) + if doc.type_id == "draft": rules |= SearchRule.objects.filter( - rule_type__in=['group', 'area', 'group_rfc', 'area_rfc', 'group_exp'], + rule_type__in=[ + "state_iab", + "state_iana", + "state_iesg", + "state_irtf", + "state_ise", + "state_rfceditor", + "state_ietf", + ], state__in=states, - group__in=groups ) - rules |= SearchRule.objects.filter( - rule_type__in=['state_iab', 'state_iana', 'state_iesg', 'state_irtf', 'state_ise', 'state_rfceditor', 'state_ietf'], - state__in=states, - ) - - rules |= SearchRule.objects.filter( - rule_type__in=["author", "author_rfc"], - state__in=states, - person__in=list(Person.objects.filter(documentauthor__document=doc)), - ) - - if doc.ad_id: + # author rules + if doc.type_id == "rfc": + has_rfcauthors = doc.rfcauthor_set.exists() + rules |= SearchRule.objects.filter( + rule_type="author_rfc", + person__in=list( + Person.objects.filter( + Q(rfcauthor__document=doc) + if has_rfcauthors + else Q(documentauthor__document=doc) + ) + ), + ) + else: rules |= SearchRule.objects.filter( - rule_type="ad", + rule_type="author", state__in=states, - person=doc.ad_id, + person__in=list(Person.objects.filter(documentauthor__document=doc)), ) - if doc.shepherd_id: + # Other draft-only rules rules + if doc.type_id == "draft": + if doc.ad_id: + rules |= SearchRule.objects.filter( + rule_type="ad", + state__in=states, + person=doc.ad_id, + ) + + if doc.shepherd_id: + rules |= SearchRule.objects.filter( + rule_type="shepherd", + state__in=states, + person__email=doc.shepherd_id, + ) + rules |= SearchRule.objects.filter( - rule_type="shepherd", + rule_type="name_contains", state__in=states, - person__email=doc.shepherd_id, + name_contains_index=doc, # search our materialized index to avoid full scan ) - rules |= SearchRule.objects.filter( - rule_type="name_contains", - state__in=states, - name_contains_index=doc, # search our materialized index to avoid full scan - ) - return rules @@ -146,7 +174,11 @@ def docs_tracked_by_community_list(clist): # in theory, we could use an OR query, but databases seem to have # trouble with OR queries and complicated joins so do the OR'ing # manually - doc_ids = set(clist.added_docs.values_list("pk", flat=True)) + doc_ids = set() + for doc in clist.added_docs.all(): + doc_ids.add(doc.pk) + doc_ids.update(rfc.pk for rfc in doc.related_that_doc("became_rfc")) + for rule in clist.searchrule_set.all(): doc_ids = doc_ids | set(docs_matching_community_list_rule(rule).values_list("pk", flat=True)) diff --git a/ietf/community/views.py b/ietf/community/views.py index 1dbbfcaf0e..08b1c24fe5 100644 --- a/ietf/community/views.py +++ b/ietf/community/views.py @@ -1,4 +1,4 @@ -# Copyright The IETF Trust 2012-2020, All Rights Reserved +# Copyright The IETF Trust 2012-2023, All Rights Reserved # -*- coding: utf-8 -*- @@ -13,61 +13,119 @@ from django.utils import timezone from django.utils.html import strip_tags -import debug # pyflakes:ignore - -from ietf.community.models import SearchRule, EmailSubscription -from ietf.community.forms import SearchRuleTypeForm, SearchRuleForm, AddDocumentsForm, SubscriptionForm -from ietf.community.utils import lookup_community_list, can_manage_community_list -from ietf.community.utils import docs_tracked_by_community_list, docs_matching_community_list_rule -from ietf.community.utils import states_of_significant_change, reset_name_contains_index_for_rule +import debug # pyflakes:ignore + +from ietf.community.models import CommunityList, EmailSubscription, SearchRule +from ietf.community.forms import ( + SearchRuleTypeForm, + SearchRuleForm, + AddDocumentsForm, + SubscriptionForm, +) +from ietf.community.utils import can_manage_community_list +from ietf.community.utils import ( + docs_tracked_by_community_list, + docs_matching_community_list_rule, +) +from ietf.community.utils import ( + states_of_significant_change, + reset_name_contains_index_for_rule, +) +from ietf.group.models import Group from ietf.doc.models import DocEvent, Document from ietf.doc.utils_search import prepare_document_table +from ietf.person.utils import lookup_persons +from ietf.utils.decorators import ignore_view_kwargs from ietf.utils.http import is_ajax from ietf.utils.response import permission_denied -def view_list(request, username=None): - clist = lookup_community_list(username) +def lookup_community_list(request, email_or_name=None, acronym=None): + """Finds a CommunityList for a person or group + + Instantiates an unsaved CommunityList if one is not found. + + If the person or group cannot be found and uniquely identified, raises an Http404 exception + """ + assert email_or_name or acronym + + if acronym: + group = get_object_or_404(Group, acronym=acronym) + clist = CommunityList.objects.filter(group=group).first() or CommunityList( + group=group + ) + else: + persons = lookup_persons(email_or_name) + if len(persons) > 1: + if hasattr(request.user, "person") and request.user.person in persons: + person = request.user.person + else: + raise Http404( + f"Unable to identify the CommunityList for {email_or_name}" + ) + else: + person = persons[0] + clist = CommunityList.objects.filter(person=person).first() or CommunityList( + person=person + ) + return clist + + +def view_list(request, email_or_name=None): + clist = lookup_community_list(request, email_or_name) # may raise Http404 docs = docs_tracked_by_community_list(clist) docs, meta = prepare_document_table(request, docs, request.GET) - subscribed = request.user.is_authenticated and (EmailSubscription.objects.none() if clist.pk is None else EmailSubscription.objects.filter(community_list=clist, email__person__user=request.user)) + subscribed = request.user.is_authenticated and ( + EmailSubscription.objects.none() + if clist.pk is None + else EmailSubscription.objects.filter( + community_list=clist, email__person__user=request.user + ) + ) + + return render( + request, + "community/view_list.html", + { + "clist": clist, + "docs": docs, + "meta": meta, + "can_manage_list": can_manage_community_list(request.user, clist), + "subscribed": subscribed, + "email_or_name": email_or_name, + }, + ) - return render(request, 'community/view_list.html', { - 'clist': clist, - 'docs': docs, - 'meta': meta, - 'can_manage_list': can_manage_community_list(request.user, clist), - 'subscribed': subscribed, - }) @login_required -def manage_list(request, username=None, acronym=None, group_type=None): +@ignore_view_kwargs("group_type") +def manage_list(request, email_or_name=None, acronym=None): # we need to be a bit careful because clist may not exist in the # database so we can't call related stuff on it yet - clist = lookup_community_list(username, acronym) + clist = lookup_community_list(request, email_or_name, acronym) # may raise Http404 if not can_manage_community_list(request.user, clist): permission_denied(request, "You do not have permission to access this view") - action = request.POST.get('action') + action = request.POST.get("action") - if request.method == 'POST' and action == 'add_documents': + if request.method == "POST" and action == "add_documents": add_doc_form = AddDocumentsForm(request.POST) if add_doc_form.is_valid(): if clist.pk is None: clist.save() - for d in add_doc_form.cleaned_data['documents']: - if not d in clist.added_docs.all(): + for d in add_doc_form.cleaned_data["documents"]: + if d not in clist.added_docs.all(): clist.added_docs.add(d) return HttpResponseRedirect("") else: add_doc_form = AddDocumentsForm() - if request.method == 'POST' and action == 'remove_document': - document_id = request.POST.get('document') + if request.method == "POST" and action == "remove_document": + document_id = request.POST.get("document") if clist.pk is not None and document_id: document = get_object_or_404(clist.added_docs, id=document_id) clist.added_docs.remove(document) @@ -75,30 +133,29 @@ def manage_list(request, username=None, acronym=None, group_type=None): return HttpResponseRedirect("") rule_form = None - if request.method == 'POST' and action == 'add_rule': + if request.method == "POST" and action == "add_rule": rule_type_form = SearchRuleTypeForm(request.POST) if rule_type_form.is_valid(): - rule_type = rule_type_form.cleaned_data['rule_type'] - - if rule_type: - rule_form = SearchRuleForm(clist, rule_type, request.POST) - if rule_form.is_valid(): - if clist.pk is None: - clist.save() - - rule = rule_form.save(commit=False) - rule.community_list = clist - rule.rule_type = rule_type - rule.save() - if rule.rule_type == "name_contains": - reset_name_contains_index_for_rule(rule) + rule_type = rule_type_form.cleaned_data["rule_type"] + if rule_type: + rule_form = SearchRuleForm(clist, rule_type, request.POST) + if rule_form.is_valid(): + if clist.pk is None: + clist.save() + + rule = rule_form.save(commit=False) + rule.community_list = clist + rule.rule_type = rule_type + rule.save() + if rule.rule_type == "name_contains": + reset_name_contains_index_for_rule(rule) return HttpResponseRedirect("") else: rule_type_form = SearchRuleTypeForm() - if request.method == 'POST' and action == 'remove_rule': - rule_pk = request.POST.get('rule') + if request.method == "POST" and action == "remove_rule": + rule_pk = request.POST.get("rule") if clist.pk is not None and rule_pk: rule = get_object_or_404(SearchRule, pk=rule_pk, community_list=clist) rule.delete() @@ -109,53 +166,74 @@ def manage_list(request, username=None, acronym=None, group_type=None): for r in rules: r.matching_documents_count = docs_matching_community_list_rule(r).count() - empty_rule_forms = { rule_type: SearchRuleForm(clist, rule_type) for rule_type, _ in SearchRule.RULE_TYPES } + empty_rule_forms = { + rule_type: SearchRuleForm(clist, rule_type) + for rule_type, _ in SearchRule.RULE_TYPES + } total_count = docs_tracked_by_community_list(clist).count() - all_forms = [f for f in [rule_type_form, rule_form, add_doc_form, *empty_rule_forms.values()] - if f is not None] - return render(request, 'community/manage_list.html', { - 'clist': clist, - 'rules': rules, - 'individually_added': clist.added_docs.all() if clist.pk is not None else [], - 'rule_type_form': rule_type_form, - 'rule_form': rule_form, - 'empty_rule_forms': empty_rule_forms, - 'total_count': total_count, - 'add_doc_form': add_doc_form, - 'all_forms': all_forms, - }) + all_forms = [ + f + for f in [rule_type_form, rule_form, add_doc_form, *empty_rule_forms.values()] + if f is not None + ] + return render( + request, + "community/manage_list.html", + { + "clist": clist, + "rules": rules, + "individually_added": ( + clist.added_docs.all() if clist.pk is not None else [] + ), + "rule_type_form": rule_type_form, + "rule_form": rule_form, + "empty_rule_forms": empty_rule_forms, + "total_count": total_count, + "add_doc_form": add_doc_form, + "all_forms": all_forms, + }, + ) @login_required -def track_document(request, name, username=None, acronym=None): - doc = get_object_or_404(Document, docalias__name=name) +def track_document(request, name, email_or_name=None, acronym=None): + doc = get_object_or_404(Document, name=name) if request.method == "POST": - clist = lookup_community_list(username, acronym) + clist = lookup_community_list( + request, email_or_name, acronym + ) # may raise Http404 if not can_manage_community_list(request.user, clist): permission_denied(request, "You do not have permission to access this view") if clist.pk is None: clist.save() - if not doc in clist.added_docs.all(): + if doc not in clist.added_docs.all(): clist.added_docs.add(doc) if is_ajax(request): - return HttpResponse(json.dumps({ 'success': True }), content_type='application/json') + return HttpResponse( + json.dumps({"success": True}), content_type="application/json" + ) else: return HttpResponseRedirect(clist.get_absolute_url()) - return render(request, "community/track_document.html", { - "name": doc.name, - }) + return render( + request, + "community/track_document.html", + { + "name": doc.name, + }, + ) + @login_required -def untrack_document(request, name, username=None, acronym=None): - doc = get_object_or_404(Document, docalias__name=name) - clist = lookup_community_list(username, acronym) +def untrack_document(request, name, email_or_name=None, acronym=None): + doc = get_object_or_404(Document, name=name) + clist = lookup_community_list(request, email_or_name, acronym) # may raise Http404 if not can_manage_community_list(request.user, clist): permission_denied(request, "You do not have permission to access this view") @@ -164,28 +242,34 @@ def untrack_document(request, name, username=None, acronym=None): clist.added_docs.remove(doc) if is_ajax(request): - return HttpResponse(json.dumps({ 'success': True }), content_type='application/json') + return HttpResponse( + json.dumps({"success": True}), content_type="application/json" + ) else: return HttpResponseRedirect(clist.get_absolute_url()) - return render(request, "community/untrack_document.html", { - "name": doc.name, - }) - + return render( + request, + "community/untrack_document.html", + { + "name": doc.name, + }, + ) -def export_to_csv(request, username=None, acronym=None, group_type=None): - clist = lookup_community_list(username, acronym) - response = HttpResponse(content_type='text/csv') +@ignore_view_kwargs("group_type") +def export_to_csv(request, email_or_name=None, acronym=None): + clist = lookup_community_list(request, email_or_name, acronym) # may raise Http404 + response = HttpResponse(content_type="text/csv") if clist.group: filename = "%s-draft-list.csv" % clist.group.acronym else: filename = "draft-list.csv" - response['Content-Disposition'] = 'attachment; filename=%s' % filename + response["Content-Disposition"] = "attachment; filename=%s" % filename - writer = csv.writer(response, dialect=csv.excel, delimiter=str(',')) + writer = csv.writer(response, dialect=csv.excel, delimiter=str(",")) header = [ "Name", @@ -198,12 +282,12 @@ def export_to_csv(request, username=None, acronym=None, group_type=None): ] writer.writerow(header) - docs = docs_tracked_by_community_list(clist).select_related('type', 'group', 'ad') + docs = docs_tracked_by_community_list(clist).select_related("type", "group", "ad") for doc in docs.prefetch_related("states", "tags"): row = [] row.append(doc.name) row.append(doc.title) - e = doc.latest_event(type='new_revision') + e = doc.latest_event(type="new_revision") row.append(e.time.strftime("%Y-%m-%d") if e else "") row.append(strip_tags(doc.friendly_state())) row.append(doc.group.acronym if doc.group else "") @@ -214,53 +298,73 @@ def export_to_csv(request, username=None, acronym=None, group_type=None): return response -def feed(request, username=None, acronym=None, group_type=None): - clist = lookup_community_list(username, acronym) - significant = request.GET.get('significant', '') == '1' +@ignore_view_kwargs("group_type") +def feed(request, email_or_name=None, acronym=None): + clist = lookup_community_list(request, email_or_name, acronym) # may raise Http404 + significant = request.GET.get("significant", "") == "1" - documents = docs_tracked_by_community_list(clist).values_list('pk', flat=True) - since = timezone.now() - datetime.timedelta(days=14) + documents = docs_tracked_by_community_list(clist).values_list("pk", flat=True) + updated = timezone.now() + since = updated - datetime.timedelta(days=14) - events = DocEvent.objects.filter( - doc__id__in=documents, - time__gte=since, - ).distinct().order_by('-time', '-id').select_related("doc") + events = ( + DocEvent.objects.filter( + doc__id__in=documents, + time__gte=since, + ) + .distinct() + .order_by("-time", "-id") + .select_related("doc") + ) if significant: - events = events.filter(type="changed_state", statedocevent__state__in=list(states_of_significant_change())) + events = events.filter( + type="changed_state", + statedocevent__state__in=list(states_of_significant_change()), + ) host = request.get_host() - feed_url = 'https://%s%s' % (host, request.get_full_path()) + feed_url = "https://%s%s" % (host, request.get_full_path()) feed_id = uuid.uuid5(uuid.NAMESPACE_URL, str(feed_url)) - title = '%s RSS Feed' % clist.long_name() + title = "%s RSS Feed" % clist.long_name() if significant: - subtitle = 'Significant document changes' + subtitle = "Significant document changes" else: - subtitle = 'Document changes' - - return render(request, 'community/atom.xml', { - 'clist': clist, - 'entries': events[:50], - 'title': title, - 'subtitle': subtitle, - 'id': feed_id.urn, - 'updated': timezone.now(), - }, content_type='text/xml') + subtitle = "Document changes" + + return render( + request, + "community/atom.xml", + { + "clist": clist, + "entries": events[:50], + "title": title, + "subtitle": subtitle, + "id": feed_id.urn, + "updated": updated, + }, + content_type="text/xml", + ) @login_required -def subscription(request, username=None, acronym=None, group_type=None): - clist = lookup_community_list(username, acronym) +@ignore_view_kwargs("group_type") +def subscription(request, email_or_name=None, acronym=None): + clist = lookup_community_list(request, email_or_name, acronym) # may raise Http404 if clist.pk is None: raise Http404 - existing_subscriptions = EmailSubscription.objects.filter(community_list=clist, email__person__user=request.user) + person = request.user.person + + existing_subscriptions = EmailSubscription.objects.filter( + community_list=clist, email__person=person + ) - if request.method == 'POST': + if request.method == "POST": action = request.POST.get("action") if action == "subscribe": - form = SubscriptionForm(request.user, clist, request.POST) + form = SubscriptionForm(person, clist, request.POST) if form.is_valid(): subscription = form.save(commit=False) subscription.community_list = clist @@ -269,14 +373,20 @@ def subscription(request, username=None, acronym=None, group_type=None): return HttpResponseRedirect("") elif action == "unsubscribe": - existing_subscriptions.filter(pk=request.POST.get("subscription_id")).delete() + existing_subscriptions.filter( + pk=request.POST.get("subscription_id") + ).delete() return HttpResponseRedirect("") else: - form = SubscriptionForm(request.user, clist) - - return render(request, 'community/subscription.html', { - 'clist': clist, - 'form': form, - 'existing_subscriptions': existing_subscriptions, - }) + form = SubscriptionForm(person, clist) + + return render( + request, + "community/subscription.html", + { + "clist": clist, + "form": form, + "existing_subscriptions": existing_subscriptions, + }, + ) diff --git a/ietf/context_processors.py b/ietf/context_processors.py index baa8d7a5d2..5aaa4ab256 100644 --- a/ietf/context_processors.py +++ b/ietf/context_processors.py @@ -5,6 +5,7 @@ from django.conf import settings from django.utils import timezone from ietf import __version__, __patch__, __release_branch__, __release_hash__ +from opentelemetry.propagate import inject def server_mode(request): return {'server_mode': settings.SERVER_MODE} @@ -51,3 +52,8 @@ def timezone_now(request): return { 'timezone_now': timezone.now(), } + +def traceparent_id(request): + context_extras = {} + inject(context_extras) + return { "otel": context_extras } diff --git a/ietf/dbtemplate/fixtures/nomcom_templates.xml b/ietf/dbtemplate/fixtures/nomcom_templates.xml index abf0cb58f6..e7065b84cd 100644 --- a/ietf/dbtemplate/fixtures/nomcom_templates.xml +++ b/ietf/dbtemplate/fixtures/nomcom_templates.xml @@ -1,190 +1,190 @@ - - - - /nomcom/defaults/home.rst - Home page of group - - rst - Home page -========= - -This is the home page of the nomcom group. - - - - /nomcom/defaults/email/inexistent_person.txt - Email sent to chair of nomcom and secretariat when Email and Person are created if some of them don't exist - $email: Newly created email -$fullname: Fullname of the new person -$person_id: Id of the new Person object -$group: Name of the group - plain - Hello, - -A new person with name $fullname and email $email has been created. The new Person object has the following id: '$person_id'. - -Please, check if there is some more action nedeed. - - - - /nomcom/defaults/email/new_nominee.txt - Email sent to nominees when they are nominated - $nominee: Full name of the nominee -$position: Name of the position -$domain: Server domain -$accept_url: Url hash to accept nominations -$decline_url: Url hash to decline nominations - plain - Hi, - -You have been nominated for the position of $position. - -The NomCom would appreciate receiving an indication of whether or not you accept this nomination to stand for consideration as a candidate for this position. - -You can accept the nomination via web going to the following link https://$domain$accept_url or decline the nomination going the following link https://$domain$decline_url - -If you accept, you will need to fill out a questionnaire. You will receive the questionnaire by email. - -Best regards, - - - - - /nomcom/defaults/email/new_nomination.txt - Email sent to nominators and secretariat when the nominators make the nominations - $nominator: Full name of the nominator -$nominator_email: Email of the nominator -$nominee: Full name of the nominee -$nominee_email: Email of the nominee -$position: Nomination position - plain - A new nomination have been received. - -Nominator: $nominator ($nominator_email) -Nominee: $nominee ($nominee_email) -Position: $position - - - - /nomcom/defaults/position/questionnaire.txt - Questionnaire sent to the nomine - $position: Position - plain - Enter here the questionnaire for the position $position: - -Questionnaire - - - - /nomcom/defaults/position/requirements - Position requirements - $position: Position - rst - These are the requirements for the position $position: - -Requirements. - - - - /nomcom/defaults/position/header_questionnaire.txt - Header of the email that contains the questionnaire sent to the nomine - $nominee: Full name of the nomine -$position: Position - plain - Hi $nominee, this is the questionnaire for the position $position: - - - - - - /nomcom/defaults/email/nomination_accept_reminder.txt - Email sent to nominees asking them to accept (or decline) the nominations. - $positions: Nomination positions - plain - Hi, - -You have been nominated for the position of $position. - -The NomCom would appreciate receiving an indication of whether or not you accept this nomination to stand for consideration as a candidate for this position. - -You can accept the nomination via web going to the following link https://$domain$accept_url or decline the nomination going the following link https://$domain$decline_url - -If you accept, you will need to fill out a questionnaire. - -Best regards, - - - - /nomcom/defaults/email/nomination_receipt.txt - Email sent to nominator to get a confirmation mail containing feedback in cleartext - $nominee: Full name of the nominee -$position: Name of the position -$domain: Server domain -$accept_url: Url hash to accept nominations -$decline_url: Url hash to decline nominations - plain - Hi, - -Your nomination of $nominee for the position of -$position has been received and registered. - -The following comments have also been registered: - --------------------------------------------------------------------------- -$comments --------------------------------------------------------------------------- - -Thank you, - - - - /nomcom/defaults/email/feedback_receipt.txt - Email sent to feedback author to get a confirmation mail containing feedback in cleartext - $nominee: Full name of the nominee -$position: Nomination position -$comments: Comments on this candidate - plain - Hi, - -Your input regarding $about has been received and registered. - -The following comments have been registered: - --------------------------------------------------------------------------- -$comments --------------------------------------------------------------------------- - -Thank you, - - - - /nomcom/defaults/email/questionnaire_reminder.txt - Email sent to nominees reminding them to complete a questionnaire - $positions: Nomination positions - plain - -Thank you for accepting your nomination for the position of $position. - -Please remember to complete and return the questionnaire for this position at your earliest opportunity. -The questionnaire is repeated below for your convenience. - --------- - - - - /nomcom/defaults/topic/description - Description of Topic - $topic: Topic' - rst - This is a description of the topic "$topic" - -Describe the topic and add any information/instructions for the responder here. - - - - /nomcom/defaults/iesg_requirements - Generic IESG Requirements - rst - Generic IESG Requirements Yo! - - + + + + /nomcom/defaults/home.rst + Home page of group + + rst + Home page +========= + +This is the home page of the nomcom group. + + + + /nomcom/defaults/email/inexistent_person.txt + Email sent to chair of nomcom and secretariat when Email and Person are created if some of them don't exist + $email: Newly created email +$fullname: Fullname of the new person +$person_id: Id of the new Person object +$group: Name of the group + plain + Hello, + +A new person with name $fullname and email $email has been created. The new Person object has the following id: '$person_id'. + +Please, check if there is some more action nedeed. + + + + /nomcom/defaults/email/new_nominee.txt + Email sent to nominees when they are nominated + $nominee: Full name of the nominee +$position: Name of the position +$domain: Server domain +$accept_url: Url hash to accept nominations +$decline_url: Url hash to decline nominations + plain + Hi, + +You have been nominated for the position of $position. + +The NomCom would appreciate receiving an indication of whether or not you accept this nomination to stand for consideration as a candidate for this position. + +You can accept the nomination via web going to the following link https://$domain$accept_url or decline the nomination going the following link https://$domain$decline_url + +If you accept, you will need to fill out a questionnaire. You will receive the questionnaire by email. + +Best regards, + + + + + /nomcom/defaults/email/new_nomination.txt + Email sent to nominators and secretariat when the nominators make the nominations + $nominator: Full name of the nominator +$nominator_email: Email of the nominator +$nominee: Full name of the nominee +$nominee_email: Email of the nominee +$position: Nomination position + plain + A new nomination have been received. + +Nominator: $nominator ($nominator_email) +Nominee: $nominee ($nominee_email) +Position: $position + + + + /nomcom/defaults/position/questionnaire.txt + Questionnaire sent to the nomine + $position: Position + plain + Enter here the questionnaire for the position $position: + +Questionnaire + + + + /nomcom/defaults/position/requirements + Position requirements + $position: Position + rst + These are the requirements for the position $position: + +Requirements. + + + + /nomcom/defaults/position/header_questionnaire.txt + Header of the email that contains the questionnaire sent to the nomine + $nominee: Full name of the nomine +$position: Position + plain + Hi $nominee, this is the questionnaire for the position $position: + + + + + + /nomcom/defaults/email/nomination_accept_reminder.txt + Email sent to nominees asking them to accept (or decline) the nominations. + $positions: Nomination positions + plain + Hi, + +You have been nominated for the position of $position. + +The NomCom would appreciate receiving an indication of whether or not you accept this nomination to stand for consideration as a candidate for this position. + +You can accept the nomination via web going to the following link https://$domain$accept_url or decline the nomination going the following link https://$domain$decline_url + +If you accept, you will need to fill out a questionnaire. + +Best regards, + + + + /nomcom/defaults/email/nomination_receipt.txt + Email sent to nominator to get a confirmation mail containing feedback in cleartext + $nominee: Full name of the nominee +$position: Name of the position +$domain: Server domain +$accept_url: Url hash to accept nominations +$decline_url: Url hash to decline nominations + plain + Hi, + +Your nomination of $nominee for the position of +$position has been received and registered. + +The following comments have also been registered: + +-------------------------------------------------------------------------- +$comments +-------------------------------------------------------------------------- + +Thank you, + + + + /nomcom/defaults/email/feedback_receipt.txt + Email sent to feedback author to get a confirmation mail containing feedback in cleartext + $nominee: Full name of the nominee +$position: Nomination position +$comments: Comments on this candidate + plain + Hi, + +Your input regarding $about has been received and registered. + +The following comments have been registered: + +-------------------------------------------------------------------------- +$comments +-------------------------------------------------------------------------- + +Thank you, + + + + /nomcom/defaults/email/questionnaire_reminder.txt + Email sent to nominees reminding them to complete a questionnaire + $positions: Nomination positions + plain + +Thank you for accepting your nomination for the position of $position. + +Please remember to complete and return the questionnaire for this position at your earliest opportunity. +The questionnaire is repeated below for your convenience. + +-------- + + + + /nomcom/defaults/topic/description + Description of Topic + $topic: Topic' + rst + This is a description of the topic "$topic" + +Describe the topic and add any information/instructions for the responder here. + + + + /nomcom/defaults/iesg_requirements + Generic IESG Requirements + rst + Generic IESG Requirements Yo! + + diff --git a/ietf/doc/admin.py b/ietf/doc/admin.py index 64b9d9eff8..757d3da9f9 100644 --- a/ietf/doc/admin.py +++ b/ietf/doc/admin.py @@ -1,56 +1,60 @@ -# Copyright The IETF Trust 2010-2021, All Rights Reserved +# Copyright The IETF Trust 2010-2025, All Rights Reserved # -*- coding: utf-8 -*- from django.contrib import admin from django.db import models from django import forms +from django.db.models import QuerySet +from rangefilter.filters import DateRangeQuickSelectListFilterBuilder from .models import (StateType, State, RelatedDocument, DocumentAuthor, Document, RelatedDocHistory, - DocHistoryAuthor, DocHistory, DocAlias, DocReminder, DocEvent, NewRevisionDocEvent, + DocHistoryAuthor, DocHistory, DocReminder, DocEvent, NewRevisionDocEvent, StateDocEvent, ConsensusDocEvent, BallotType, BallotDocEvent, WriteupDocEvent, LastCallDocEvent, TelechatDocEvent, BallotPositionDocEvent, ReviewRequestDocEvent, InitialReviewDocEvent, AddedMessageEvent, SubmissionDocEvent, DeletedEvent, EditedAuthorsDocEvent, DocumentURL, ReviewAssignmentDocEvent, IanaExpertDocEvent, IRSGBallotDocEvent, DocExtResource, DocumentActionHolder, - BofreqEditorDocEvent, BofreqResponsibleDocEvent ) + BofreqEditorDocEvent, BofreqResponsibleDocEvent, StoredObject, RfcAuthor, + EditedRfcAuthorsDocEvent) +from ietf.utils.admin import SaferTabularInline from ietf.utils.validators import validate_external_resource_value +from .storage_utils import force_replication +from .utils import replicate_stored_objects_for_document + class StateTypeAdmin(admin.ModelAdmin): list_display = ["slug", "label"] admin.site.register(StateType, StateTypeAdmin) class StateAdmin(admin.ModelAdmin): - list_display = ["slug", "type", 'name', 'order', 'desc'] - list_filter = ["type", ] + list_display = ["slug", "type", 'name', 'order', 'desc', "used"] + list_filter = ["type", "used"] search_fields = ["slug", "type__label", "type__slug", "name", "desc"] filter_horizontal = ["next_states"] admin.site.register(State, StateAdmin) -# class DocAliasInline(admin.TabularInline): -# model = DocAlias -# extra = 1 - -class DocAuthorInline(admin.TabularInline): +class DocAuthorInline(SaferTabularInline): model = DocumentAuthor raw_id_fields = ['person', 'email'] extra = 1 -class DocActionHolderInline(admin.TabularInline): +class DocActionHolderInline(SaferTabularInline): model = DocumentActionHolder raw_id_fields = ['person'] extra = 1 -class RelatedDocumentInline(admin.TabularInline): +class RelatedDocumentInline(SaferTabularInline): model = RelatedDocument + fk_name= 'source' def this(self, instance): - return instance.source.canonical_name() + return instance.source.name readonly_fields = ['this', ] fields = ['this', 'relationship', 'target', ] raw_id_fields = ['target'] extra = 1 -class AdditionalUrlInLine(admin.TabularInline): +class AdditionalUrlInLine(SaferTabularInline): model = DocumentURL fields = ['tag','desc','url',] extra = 1 @@ -70,10 +74,12 @@ class Meta: class DocumentAuthorAdmin(admin.ModelAdmin): list_display = ['id', 'document', 'person', 'email', 'affiliation', 'country', 'order'] - search_fields = ['document__docalias__name', 'person__name', 'email__address', 'affiliation', 'country'] + search_fields = ['document__name', 'person__name', 'email__address', 'affiliation', 'country'] raw_id_fields = ["document", "person", "email"] admin.site.register(DocumentAuthor, DocumentAuthorAdmin) - + + + class DocumentAdmin(admin.ModelAdmin): list_display = ['name', 'rev', 'group', 'pages', 'intended_std_level', 'author_list', 'time'] search_fields = ['name'] @@ -81,6 +87,7 @@ class DocumentAdmin(admin.ModelAdmin): raw_id_fields = ['group', 'shepherd', 'ad'] inlines = [DocAuthorInline, DocActionHolderInline, RelatedDocumentInline, AdditionalUrlInLine] form = DocumentForm + actions = ["replicate_stored_objects"] def save_model(self, request, obj, form, change): e = DocEvent.objects.create( @@ -95,6 +102,22 @@ def save_model(self, request, obj, form, change): def state(self, instance): return self.get_state() + @admin.action(description="Replicate related blobs") + def replicate_stored_objects(self, request, queryset: QuerySet[Document]): + doc_count = 0 + stored_obj_count = 0 + for doc in queryset.all(): + doc_count += 1 + if isinstance(doc, Document): + stored_obj_count += replicate_stored_objects_for_document(doc) + self.message_user( + request, + ( + f"Queued replication of a total of {stored_obj_count} StoredObject(s) " + f"for {doc_count} Document(s)" + ) + ) + admin.site.register(Document, DocumentAdmin) class DocHistoryAdmin(admin.ModelAdmin): @@ -108,14 +131,6 @@ def state(self, instance): admin.site.register(DocHistory, DocHistoryAdmin) -class DocAliasAdmin(admin.ModelAdmin): - list_display = ['name', 'targets'] - search_fields = ['name', 'docs__name'] - raw_id_fields = ['docs'] - def targets(self, obj): - return ', '.join([o.name for o in obj.docs.all()]) -admin.site.register(DocAlias, DocAliasAdmin) - class DocReminderAdmin(admin.ModelAdmin): list_display = ['id', 'event', 'type', 'due', 'active'] list_filter = ['type', 'due', 'active'] @@ -125,7 +140,7 @@ class DocReminderAdmin(admin.ModelAdmin): class RelatedDocumentAdmin(admin.ModelAdmin): list_display = ['source', 'target', 'relationship', ] list_filter = ['relationship', ] - search_fields = ['source__name', 'target__name', 'target__docs__name', ] + search_fields = ['source__name', 'target__name', ] raw_id_fields = ['source', 'target', ] admin.site.register(RelatedDocument, RelatedDocumentAdmin) @@ -153,6 +168,13 @@ class DocumentActionHolderAdmin(admin.ModelAdmin): # events +class DeletedEventAdmin(admin.ModelAdmin): + list_display = ['id', 'content_type', 'json', 'by', 'time'] + list_filter = ['time'] + raw_id_fields = ['content_type', 'by'] +admin.site.register(DeletedEvent, DeletedEventAdmin) + + class DocEventAdmin(admin.ModelAdmin): def event_type(self, obj): return str(obj.type) @@ -170,39 +192,43 @@ def short_desc(self, obj): admin.site.register(StateDocEvent, DocEventAdmin) admin.site.register(ConsensusDocEvent, DocEventAdmin) admin.site.register(BallotDocEvent, DocEventAdmin) +admin.site.register(IRSGBallotDocEvent, DocEventAdmin) admin.site.register(WriteupDocEvent, DocEventAdmin) admin.site.register(LastCallDocEvent, DocEventAdmin) admin.site.register(TelechatDocEvent, DocEventAdmin) -admin.site.register(ReviewRequestDocEvent, DocEventAdmin) -admin.site.register(ReviewAssignmentDocEvent, DocEventAdmin) admin.site.register(InitialReviewDocEvent, DocEventAdmin) -admin.site.register(AddedMessageEvent, DocEventAdmin) -admin.site.register(SubmissionDocEvent, DocEventAdmin) admin.site.register(EditedAuthorsDocEvent, DocEventAdmin) +admin.site.register(EditedRfcAuthorsDocEvent, DocEventAdmin) admin.site.register(IanaExpertDocEvent, DocEventAdmin) -class DeletedEventAdmin(admin.ModelAdmin): - list_display = ['id', 'content_type', 'json', 'by', 'time'] - list_filter = ['time'] - raw_id_fields = ['content_type', 'by'] -admin.site.register(DeletedEvent, DeletedEventAdmin) - class BallotPositionDocEventAdmin(DocEventAdmin): - raw_id_fields = ["doc", "by", "balloter", "ballot"] + raw_id_fields = DocEventAdmin.raw_id_fields + ["balloter", "ballot"] admin.site.register(BallotPositionDocEvent, BallotPositionDocEventAdmin) - -class IRSGBallotDocEventAdmin(DocEventAdmin): - raw_id_fields = ["doc", "by"] -admin.site.register(IRSGBallotDocEvent, IRSGBallotDocEventAdmin) class BofreqEditorDocEventAdmin(DocEventAdmin): - raw_id_fields = ["doc", "by", "editors" ] + raw_id_fields = DocEventAdmin.raw_id_fields + ["editors"] admin.site.register(BofreqEditorDocEvent, BofreqEditorDocEventAdmin) class BofreqResponsibleDocEventAdmin(DocEventAdmin): - raw_id_fields = ["doc", "by", "responsible" ] + raw_id_fields = DocEventAdmin.raw_id_fields + ["responsible"] admin.site.register(BofreqResponsibleDocEvent, BofreqResponsibleDocEventAdmin) +class ReviewRequestDocEventAdmin(DocEventAdmin): + raw_id_fields = DocEventAdmin.raw_id_fields + ["review_request"] +admin.site.register(ReviewRequestDocEvent, ReviewRequestDocEventAdmin) + +class ReviewAssignmentDocEventAdmin(DocEventAdmin): + raw_id_fields = DocEventAdmin.raw_id_fields + ["review_assignment"] +admin.site.register(ReviewAssignmentDocEvent, ReviewAssignmentDocEventAdmin) + +class AddedMessageEventAdmin(DocEventAdmin): + raw_id_fields = DocEventAdmin.raw_id_fields + ["message"] +admin.site.register(AddedMessageEvent, AddedMessageEventAdmin) + +class SubmissionDocEventAdmin(DocEventAdmin): + raw_id_fields = DocEventAdmin.raw_id_fields + ["submission"] +admin.site.register(SubmissionDocEvent, SubmissionDocEventAdmin) + class DocumentUrlAdmin(admin.ModelAdmin): list_display = ['id', 'doc', 'tag', 'url', 'desc', ] search_fields = ['doc__name', 'url', ] @@ -219,3 +245,41 @@ class DocExtResourceAdmin(admin.ModelAdmin): search_fields = ['doc__name', 'value', 'display_name', 'name__slug',] raw_id_fields = ['doc', ] admin.site.register(DocExtResource, DocExtResourceAdmin) + +class StoredObjectAdmin(admin.ModelAdmin): + list_display = ['store', 'name', 'doc_name', 'modified', 'is_deleted'] + list_filter = [ + 'store', + ('modified', DateRangeQuickSelectListFilterBuilder()), + ('deleted', DateRangeQuickSelectListFilterBuilder()), + ] + search_fields = ['name', 'doc_name', 'doc_rev'] + list_display_links = ['name'] + actions = ["replicate_stored_object"] + + @admin.display(boolean=True, description="Deleted?", ordering="deleted") + def is_deleted(self, instance): + return instance.deleted is not None + + @admin.action(description="Replicate related blobs") + def replicate_stored_object(self, request, queryset: QuerySet[StoredObject]): + stored_obj_count = 0 + for stored_object in queryset.all(): + if isinstance(stored_object, StoredObject): + force_replication(kind=stored_object.store, name=stored_object.name) + stored_obj_count += 1 + self.message_user( + request, + f"Queued replication of a total of {stored_obj_count} StoredObject(s)", + ) + + +admin.site.register(StoredObject, StoredObjectAdmin) + +class RfcAuthorAdmin(admin.ModelAdmin): + # the email field in the list_display/readonly_fields works through a @property + list_display = ['id', 'document', 'titlepage_name', 'person', 'email', 'affiliation', 'country', 'order'] + search_fields = ['document__name', 'titlepage_name', 'person__name', 'person__email__address', 'affiliation', 'country'] + raw_id_fields = ["document", "person"] + readonly_fields = ["email"] +admin.site.register(RfcAuthor, RfcAuthorAdmin) diff --git a/ietf/doc/api.py b/ietf/doc/api.py new file mode 100644 index 0000000000..73fff6b27f --- /dev/null +++ b/ietf/doc/api.py @@ -0,0 +1,213 @@ +# Copyright The IETF Trust 2024-2026, All Rights Reserved +"""Doc API implementations""" + +from django.db.models import ( + BooleanField, + Count, + OuterRef, + Prefetch, + Q, + QuerySet, + Subquery, +) +from django.db.models.functions import TruncDate +from django_filters import rest_framework as filters +from rest_framework import filters as drf_filters +from rest_framework.mixins import ListModelMixin, RetrieveModelMixin +from rest_framework.pagination import LimitOffsetPagination +from rest_framework.viewsets import GenericViewSet + +from ietf.group.models import Group +from ietf.name.models import StreamName, DocTypeName +from ietf.utils.timezone import RPC_TZINFO +from .models import ( + Document, + DocEvent, + RelatedDocument, + DocumentAuthor, + SUBSERIES_DOC_TYPE_IDS, +) +from .serializers import ( + RfcMetadataSerializer, + RfcStatus, + RfcSerializer, + SubseriesDocSerializer, +) + + +class RfcLimitOffsetPagination(LimitOffsetPagination): + default_limit = 10 + max_limit = 500 + + +class NumberInFilter(filters.BaseInFilter, filters.NumberFilter): + """Filter against a comma-separated list of numbers""" + pass + + +class RfcFilter(filters.FilterSet): + published = filters.DateFromToRangeFilter() + stream = filters.ModelMultipleChoiceFilter( + queryset=StreamName.objects.filter(used=True) + ) + number = NumberInFilter( + field_name="rfc_number" + ) + group = filters.ModelMultipleChoiceFilter( + queryset=Group.objects.all(), + field_name="group__acronym", + to_field_name="acronym", + ) + area = filters.ModelMultipleChoiceFilter( + queryset=Group.objects.areas(), + field_name="group__parent__acronym", + to_field_name="acronym", + ) + status = filters.MultipleChoiceFilter( + choices=[(slug, slug) for slug in RfcStatus.status_slugs], + method=RfcStatus.filter, + ) + sort = filters.OrderingFilter( + fields=( + ("rfc_number", "number"), # ?sort=number / ?sort=-number + ("published", "published"), # ?sort=published / ?sort=-published + ), + ) + + +class PrefetchRelatedDocument(Prefetch): + """Prefetch via a RelatedDocument + + Prefetches following RelatedDocument relationships to other docs. By default, includes + those for which the current RFC is the `source`. If `reverse` is True, includes those + for which it is the `target` instead. Defaults to only "rfc" documents. + """ + + @staticmethod + def _get_queryset(relationship_id, reverse, doc_type_ids): + """Get queryset to use for the prefetch""" + if isinstance(doc_type_ids, str): + doc_type_ids = (doc_type_ids,) + + return RelatedDocument.objects.filter( + **{ + "relationship_id": relationship_id, + f"{'source' if reverse else 'target'}__type_id__in": doc_type_ids, + } + ).select_related("source" if reverse else "target") + + def __init__(self, to_attr, relationship_id, reverse=False, doc_type_ids="rfc"): + super().__init__( + lookup="targets_related" if reverse else "relateddocument_set", + queryset=self._get_queryset(relationship_id, reverse, doc_type_ids), + to_attr=to_attr, + ) + + +def augment_rfc_queryset(queryset: QuerySet[Document]): + return ( + queryset.select_related("std_level", "stream") + .prefetch_related( + Prefetch( + "group", + Group.objects.select_related("parent"), + ), + Prefetch( + "documentauthor_set", + DocumentAuthor.objects.select_related("email", "person"), + ), + PrefetchRelatedDocument( + to_attr="drafts", + relationship_id="became_rfc", + doc_type_ids="draft", + reverse=True, + ), + PrefetchRelatedDocument(to_attr="obsoletes", relationship_id="obs"), + PrefetchRelatedDocument( + to_attr="obsoleted_by", relationship_id="obs", reverse=True + ), + PrefetchRelatedDocument(to_attr="updates", relationship_id="updates"), + PrefetchRelatedDocument( + to_attr="updated_by", relationship_id="updates", reverse=True + ), + PrefetchRelatedDocument( + to_attr="subseries", + relationship_id="contains", + reverse=True, + doc_type_ids=SUBSERIES_DOC_TYPE_IDS, + ), + ) + .annotate( + published_datetime=Subquery( + DocEvent.objects.filter( + doc_id=OuterRef("pk"), + type="published_rfc", + ) + .order_by("-time") + .values("time")[:1] + ), + ) + .annotate(published=TruncDate("published_datetime", tzinfo=RPC_TZINFO)) + .annotate( + # Count of "verified-errata" tags will be 1 or 0, convert to Boolean + has_errata=Count( + "tags", + filter=Q( + tags__slug="verified-errata", + ), + output_field=BooleanField(), + ) + ) + ) + + +class RfcViewSet(ListModelMixin, RetrieveModelMixin, GenericViewSet): + api_key_endpoint = "ietf.api.red_api" # matches prefix in ietf/api/urls.py + lookup_field = "rfc_number" + queryset = augment_rfc_queryset( + Document.objects.filter(type_id="rfc", rfc_number__isnull=False) + ).order_by("-rfc_number") + + pagination_class = RfcLimitOffsetPagination + filter_backends = [filters.DjangoFilterBackend, drf_filters.SearchFilter] + filterset_class = RfcFilter + search_fields = ["title", "abstract"] + + def get_serializer_class(self): + if self.action == "retrieve": + return RfcSerializer + return RfcMetadataSerializer + + +class PrefetchSubseriesContents(Prefetch): + def __init__(self, to_attr): + super().__init__( + lookup="relateddocument_set", + queryset=RelatedDocument.objects.filter( + relationship_id="contains", + target__type_id="rfc", + ).prefetch_related( + Prefetch( + "target", + queryset=augment_rfc_queryset(Document.objects.all()), + ) + ), + to_attr=to_attr, + ) + + +class SubseriesFilter(filters.FilterSet): + type = filters.ModelMultipleChoiceFilter( + queryset=DocTypeName.objects.filter(pk__in=SUBSERIES_DOC_TYPE_IDS) + ) + + +class SubseriesViewSet(ListModelMixin, RetrieveModelMixin, GenericViewSet): + api_key_endpoint = "ietf.api.red_api" # matches prefix in ietf/api/urls.py + lookup_field = "name" + serializer_class = SubseriesDocSerializer + queryset = Document.objects.subseries_docs().prefetch_related( + PrefetchSubseriesContents(to_attr="contents") + ) + filter_backends = [filters.DjangoFilterBackend] + filterset_class = SubseriesFilter diff --git a/ietf/doc/expire.py b/ietf/doc/expire.py index f6779e0471..d42af628f8 100644 --- a/ietf/doc/expire.py +++ b/ietf/doc/expire.py @@ -3,6 +3,8 @@ # expiry of Internet-Drafts +import debug # pyflakes:ignore + from django.conf import settings from django.utils import timezone @@ -11,12 +13,13 @@ from typing import List, Optional # pyflakes:ignore +from ietf.doc.storage_utils import exists_in_storage, remove_from_storage +from ietf.doc.utils import update_action_holders from ietf.utils import log from ietf.utils.mail import send_mail -from ietf.doc.models import Document, DocEvent, State, IESG_SUBSTATE_TAGS +from ietf.doc.models import Document, DocEvent, State from ietf.person.models import Person from ietf.meeting.models import Meeting -from ietf.doc.utils import add_state_change_event, update_action_holders from ietf.mailtrigger.utils import gather_address_lists from ietf.utils.timezone import date_today, datetime_today, DEADLINE_TZINFO @@ -34,23 +37,47 @@ def expirable_drafts(queryset=None): # Populate this first time through (but after django has been set up) if nonexpirable_states is None: - # all IESG states except I-D Exists, AD Watching, and Dead block expiry - nonexpirable_states = list(State.objects.filter(used=True, type="draft-iesg").exclude(slug__in=("idexists","watching", "dead"))) + # all IESG states except I-D Exists and Dead block expiry + nonexpirable_states = list( + State.objects.filter(used=True, type="draft-iesg").exclude( + slug__in=("idexists", "dead") + ) + ) # sent to RFC Editor and RFC Published block expiry (the latter # shouldn't be possible for an active draft, though) - nonexpirable_states += list(State.objects.filter(used=True, type__in=("draft-stream-iab", "draft-stream-irtf", "draft-stream-ise"), slug__in=("rfc-edit", "pub"))) + nonexpirable_states += list( + State.objects.filter( + used=True, + type__in=( + "draft-stream-iab", + "draft-stream-irtf", + "draft-stream-ise", + "draft-stream-editorial", + ), + slug__in=("rfc-edit", "pub"), + ) + ) # other IRTF states that block expiration - nonexpirable_states += list(State.objects.filter(used=True, type_id="draft-stream-irtf", slug__in=("irsgpoll", "iesg-rev",))) - - return queryset.filter( - states__type="draft", states__slug="active" - ).exclude( - expires=None - ).exclude( - states__in=nonexpirable_states - ).exclude( - tags="rfc-rev" # under review by the RFC Editor blocks expiry - ).distinct() + nonexpirable_states += list( + State.objects.filter( + used=True, + type_id="draft-stream-irtf", + slug__in=( + "irsgpoll", + "iesg-rev", + ), + ) + ) + + return ( + queryset.filter(states__type="draft", states__slug="active") + .exclude(expires=None) + .exclude(states__in=nonexpirable_states) + .exclude( + tags="rfc-rev" # under review by the RFC Editor blocks expiry + ) + .distinct() + ) def get_soon_to_expire_drafts(days_of_warning): @@ -139,16 +166,32 @@ def move_file(f): if os.path.exists(src): try: + # ghostlinkd would keep this in the combined all archive since it would + # be sourced from a different place. But when ghostlinkd is removed, nothing + # new is needed here - the file will already exist in the combined archive shutil.move(src, dst) except IOError as e: if "No such file or directory" in str(e): pass else: raise - + + def remove_ftp_copy(f): + mark = Path(settings.FTP_DIR) / "internet-drafts" / f + if mark.exists(): + mark.unlink() + + def remove_from_active_draft_storage(file): + # Assumes the glob will never find a file with no suffix + ext = file.suffix[1:] + remove_from_storage("active-draft", f"{ext}/{file.name}", warn_if_missing=False) + + # Note that the object is already in the "draft" storage. src_dir = Path(settings.INTERNET_DRAFT_PATH) for file in src_dir.glob("%s-%s.*" % (doc.name, rev)): move_file(str(file.name)) + remove_ftp_copy(str(file.name)) + remove_from_active_draft_storage(file) def expire_draft(doc): # clean up files @@ -158,24 +201,11 @@ def expire_draft(doc): events = [] - # change the state - if doc.latest_event(type='started_iesg_process'): - new_state = State.objects.get(used=True, type="draft-iesg", slug="dead") - prev_state = doc.get_state(new_state.type_id) - prev_tags = doc.tags.filter(slug__in=IESG_SUBSTATE_TAGS) - if new_state != prev_state: - doc.set_state(new_state) - doc.tags.remove(*prev_tags) - e = add_state_change_event(doc, system, prev_state, new_state, prev_tags=prev_tags, new_tags=[]) - if e: - events.append(e) - e = update_action_holders(doc, prev_state, new_state, prev_tags=prev_tags, new_tags=[]) - if e: - events.append(e) - events.append(DocEvent.objects.create(doc=doc, rev=doc.rev, by=system, type="expired_document", desc="Document has expired")) + prev_draft_state=doc.get_state("draft") doc.set_state(State.objects.get(used=True, type="draft", slug="expired")) + events.append(update_action_holders(doc, prev_draft_state, doc.get_state("draft"),[],[])) doc.save_with_history(events) def clean_up_draft_files(): @@ -213,8 +243,19 @@ def splitext(fn): filename, revision = match.groups() def move_file_to(subdir): + # Similar to move_draft_files_to_archive shutil.move(path, os.path.join(settings.INTERNET_DRAFT_ARCHIVE_DIR, subdir, basename)) + mark = Path(settings.FTP_DIR) / "internet-drafts" / basename + if mark.exists(): + mark.unlink() + if ext: + # Note that we're not moving these strays anywhere - the assumption + # is that the active-draft blobstore will not get strays. + # See, however, the note about "major system failures" at "unknown_ids" + blobname = f"{ext[1:]}/{basename}" + if exists_in_storage("active-draft", blobname): + remove_from_storage("active-draft", blobname) try: doc = Document.objects.get(name=filename, rev=revision) @@ -229,4 +270,6 @@ def move_file_to(subdir): move_file_to("") except Document.DoesNotExist: + # All uses of this past 2014 seem related to major system failures. move_file_to("unknown_ids") + diff --git a/ietf/doc/factories.py b/ietf/doc/factories.py index 3ea9f2b8fa..1a178c6f31 100644 --- a/ietf/doc/factories.py +++ b/ietf/doc/factories.py @@ -7,14 +7,14 @@ import factory.fuzzy import datetime -from typing import Optional # pyflakes:ignore +from typing import Any # pyflakes:ignore from django.conf import settings from django.utils import timezone -from ietf.doc.models import ( Document, DocEvent, NewRevisionDocEvent, DocAlias, State, DocumentAuthor, +from ietf.doc.models import ( Document, DocEvent, NewRevisionDocEvent, State, DocumentAuthor, StateDocEvent, BallotPositionDocEvent, BallotDocEvent, BallotType, IRSGBallotDocEvent, TelechatDocEvent, - DocumentActionHolder, BofreqEditorDocEvent, BofreqResponsibleDocEvent, DocExtResource ) + DocumentActionHolder, BofreqEditorDocEvent, BofreqResponsibleDocEvent, DocExtResource, RfcAuthor ) from ietf.group.models import Group from ietf.person.factories import PersonFactory from ietf.group.factories import RoleFactory @@ -37,13 +37,16 @@ class Meta: model = Document skip_postgeneration_save = True + # n.b., a few attributes are typed as Any so mypy won't complain when we override in subclasses title = factory.Faker('sentence',nb_words=5) - abstract = factory.Faker('paragraph', nb_sentences=5) + abstract: Any = factory.Faker('paragraph', nb_sentences=5) rev = '00' - std_level_id = None # type: Optional[str] + std_level_id: Any = None intended_std_level_id = None time = timezone.now() - expires = factory.LazyAttribute(lambda o: o.time+datetime.timedelta(days=settings.INTERNET_DRAFT_DAYS_TO_EXPIRE)) + expires: Any = factory.LazyAttribute( + lambda o: o.time+datetime.timedelta(days=settings.INTERNET_DRAFT_DAYS_TO_EXPIRE) + ) pages = factory.fuzzy.FuzzyInteger(2,400) @@ -51,16 +54,11 @@ class Meta: def name(self, n): return draft_name_generator(self.type_id,self.group,n) - newrevisiondocevent = factory.RelatedFactory('ietf.doc.factories.NewRevisionDocEventFactory','doc') - @factory.post_generation - def other_aliases(obj, create, extracted, **kwargs): # pylint: disable=no-self-argument - alias = DocAliasFactory(name=obj.name) - alias.docs.add(obj) - if create and extracted: - for name in extracted: - alias = DocAliasFactory(name=name) - alias.docs.add(obj) + def newrevisiondocevent(obj, create, extracted, **kwargs): # pylint: disable=no-self-argument + if create: + if obj.type_id != "rfc": + NewRevisionDocEventFactory(doc=obj) @factory.post_generation def states(obj, create, extracted, **kwargs): # pylint: disable=no-self-argument @@ -83,13 +81,7 @@ def authors(obj, create, extracted, **kwargs): # pylint: disable=no-self-argumen def relations(obj, create, extracted, **kwargs): # pylint: disable=no-self-argument if create and extracted: for (rel_id, doc) in extracted: - if isinstance(doc, Document): - docalias = doc.docalias.first() - elif isinstance(doc, DocAlias): - docalias = doc - else: - continue - obj.relateddocument_set.create(relationship_id=rel_id, target=docalias) + obj.relateddocument_set.create(relationship_id=rel_id, target=doc) @factory.post_generation def create_revisions(obj, create, extracted, **kwargs): # pylint: disable=no-self-argument @@ -119,10 +111,12 @@ class DocumentFactory(BaseDocumentFactory): group = factory.SubFactory('ietf.group.factories.GroupFactory',acronym='none') -class IndividualDraftFactory(BaseDocumentFactory): - - type_id = 'draft' - group = factory.SubFactory('ietf.group.factories.GroupFactory',acronym='none') +class RfcFactory(BaseDocumentFactory): + type_id = "rfc" + rev = "" + rfc_number = factory.Sequence(lambda n: n + 1000) + name = factory.LazyAttribute(lambda o: f"rfc{o.rfc_number:d}") + expires = None @factory.post_generation def states(obj, create, extracted, **kwargs): @@ -131,15 +125,14 @@ def states(obj, create, extracted, **kwargs): if extracted: for (state_type_id,state_slug) in extracted: obj.set_state(State.objects.get(type_id=state_type_id,slug=state_slug)) - if not obj.get_state('draft-iesg'): - obj.set_state(State.objects.get(type_id='draft-iesg',slug='idexists')) else: - obj.set_state(State.objects.get(type_id='draft',slug='active')) - obj.set_state(State.objects.get(type_id='draft-iesg',slug='idexists')) + obj.set_state(State.objects.get(type_id='rfc',slug='published')) + -class IndividualRfcFactory(IndividualDraftFactory): +class IndividualDraftFactory(BaseDocumentFactory): - alias2 = factory.RelatedFactory('ietf.doc.factories.DocAliasFactory','document',name=factory.Sequence(lambda n: 'rfc%04d'%(n+1000))) + type_id = 'draft' + group = factory.SubFactory('ietf.group.factories.GroupFactory',acronym='none') @factory.post_generation def states(obj, create, extracted, **kwargs): @@ -148,17 +141,17 @@ def states(obj, create, extracted, **kwargs): if extracted: for (state_type_id,state_slug) in extracted: obj.set_state(State.objects.get(type_id=state_type_id,slug=state_slug)) + if not obj.get_state('draft-iesg'): + obj.set_state(State.objects.get(type_id='draft-iesg',slug='idexists')) else: - obj.set_state(State.objects.get(type_id='draft',slug='rfc')) + obj.set_state(State.objects.get(type_id='draft',slug='active')) + obj.set_state(State.objects.get(type_id='draft-iesg',slug='idexists')) - @factory.post_generation - def reset_canonical_name(obj, create, extracted, **kwargs): - if hasattr(obj, '_canonical_name'): - del obj._canonical_name - return None +class IndividualRfcFactory(RfcFactory): + group = factory.SubFactory('ietf.group.factories.GroupFactory',acronym='none') -class WgDraftFactory(BaseDocumentFactory): +class WgDraftFactory(BaseDocumentFactory): type_id = 'draft' group = factory.SubFactory('ietf.group.factories.GroupFactory',type_id='wg') stream_id = 'ietf' @@ -177,30 +170,12 @@ def states(obj, create, extracted, **kwargs): obj.set_state(State.objects.get(type_id='draft-stream-ietf',slug='wg-doc')) obj.set_state(State.objects.get(type_id='draft-iesg',slug='idexists')) -class WgRfcFactory(WgDraftFactory): - - alias2 = factory.RelatedFactory('ietf.doc.factories.DocAliasFactory','document',name=factory.Sequence(lambda n: 'rfc%04d'%(n+1000))) +class WgRfcFactory(RfcFactory): + group = factory.SubFactory('ietf.group.factories.GroupFactory',type_id='wg') + stream_id = 'ietf' std_level_id = 'ps' - @factory.post_generation - def states(obj, create, extracted, **kwargs): - if not create: - return - if extracted: - for (state_type_id,state_slug) in extracted: - obj.set_state(State.objects.get(type_id=state_type_id,slug=state_slug)) - if not obj.get_state('draft-iesg'): - obj.set_state(State.objects.get(type_id='draft-iesg', slug='pub')) - else: - obj.set_state(State.objects.get(type_id='draft',slug='rfc')) - obj.set_state(State.objects.get(type_id='draft-iesg', slug='pub')) - - @factory.post_generation - def reset_canonical_name(obj, create, extracted, **kwargs): - if hasattr(obj, '_canonical_name'): - del obj._canonical_name - return None class RgDraftFactory(BaseDocumentFactory): @@ -223,34 +198,11 @@ def states(obj, create, extracted, **kwargs): obj.set_state(State.objects.get(type_id='draft-iesg',slug='idexists')) -class RgRfcFactory(RgDraftFactory): - - alias2 = factory.RelatedFactory('ietf.doc.factories.DocAliasFactory','document',name=factory.Sequence(lambda n: 'rfc%04d'%(n+1000))) - +class RgRfcFactory(RfcFactory): + group = factory.SubFactory('ietf.group.factories.GroupFactory',type_id='rg') + stream_id = 'irtf' std_level_id = 'inf' - @factory.post_generation - def states(obj, create, extracted, **kwargs): - if not create: - return - if extracted: - for (state_type_id,state_slug) in extracted: - obj.set_state(State.objects.get(type_id=state_type_id,slug=state_slug)) - if not obj.get_state('draft-stream-irtf'): - obj.set_state(State.objects.get(type_id='draft-stream-irtf', slug='pub')) - if not obj.get_state('draft-iesg'): - obj.set_state(State.objects.get(type_id='draft-iesg',slug='idexists')) - else: - obj.set_state(State.objects.get(type_id='draft',slug='rfc')) - obj.set_state(State.objects.get(type_id='draft-stream-irtf', slug='pub')) - obj.set_state(State.objects.get(type_id='draft-iesg',slug='idexists')) - - @factory.post_generation - def reset_canonical_name(obj, create, extracted, **kwargs): - if hasattr(obj, '_canonical_name'): - del obj._canonical_name - return None - class CharterFactory(BaseDocumentFactory): @@ -279,7 +231,7 @@ def changes_status_of(obj, create, extracted, **kwargs): for (rel, target) in extracted: obj.relateddocument_set.create(relationship_id=rel,target=target) else: - obj.relateddocument_set.create(relationship_id='tobcp', target=WgRfcFactory().docalias.first()) + obj.relateddocument_set.create(relationship_id='tobcp', target=WgRfcFactory()) @factory.post_generation def states(obj, create, extracted, **kwargs): @@ -306,9 +258,9 @@ def review_of(obj, create, extracted, **kwargs): if not create: return if extracted: - obj.relateddocument_set.create(relationship_id='conflrev',target=extracted.docalias.first()) + obj.relateddocument_set.create(relationship_id='conflrev',target=extracted) else: - obj.relateddocument_set.create(relationship_id='conflrev',target=DocumentFactory(name=obj.name.replace('conflict-review-','draft-'),type_id='draft',group=Group.objects.get(type_id='individ')).docalias.first()) + obj.relateddocument_set.create(relationship_id='conflrev',target=DocumentFactory(name=obj.name.replace('conflict-review-','draft-'),type_id='draft',group=Group.objects.get(type_id='individ'))) @factory.post_generation @@ -327,31 +279,13 @@ class ReviewFactory(BaseDocumentFactory): name = factory.LazyAttribute(lambda o: 'review-doesnotexist-00-%s-%s'%(o.group.acronym,date_today().isoformat())) group = factory.SubFactory('ietf.group.factories.GroupFactory',type_id='review') -class DocAliasFactory(factory.django.DjangoModelFactory): - class Meta: - model = DocAlias - skip_postgeneration_save = True - - @factory.post_generation - def document(self, create, extracted, **kwargs): - if create and extracted: - self.docs.add(extracted) - - @factory.post_generation - def docs(self, create, extracted, **kwargs): - if create and extracted: - for doc in extracted: - if not doc in self.docs.all(): - self.docs.add(doc) - - class DocEventFactory(factory.django.DjangoModelFactory): class Meta: model = DocEvent type = 'added_comment' by = factory.SubFactory('ietf.person.factories.PersonFactory') - doc = factory.SubFactory(DocumentFactory) + doc: Any = factory.SubFactory(DocumentFactory) # `Any` to appease mypy when a subclass overrides doc desc = factory.Faker('sentence',nb_words=6) @factory.lazy_attribute @@ -377,6 +311,12 @@ class Meta: def desc(self): return 'New version available %s-%s'%(self.doc.name,self.rev) +class PublishedRfcDocEventFactory(DocEventFactory): + class Meta: + model = DocEvent + type = "published_rfc" + doc = factory.SubFactory(WgRfcFactory) + class StateDocEventFactory(DocEventFactory): class Meta: model = StateDocEvent @@ -448,6 +388,18 @@ class Meta: country = factory.Faker('country') order = factory.LazyAttribute(lambda o: o.document.documentauthor_set.count() + 1) +class RfcAuthorFactory(factory.django.DjangoModelFactory): + class Meta: + model = RfcAuthor + + document = factory.SubFactory(DocumentFactory) + titlepage_name = factory.LazyAttribute( + lambda obj: " ".join([obj.person.initials(), obj.person.last_name()]) + ) + person = factory.SubFactory('ietf.person.factories.PersonFactory') + affiliation = factory.Faker('company') + order = factory.LazyAttribute(lambda o: o.document.rfcauthor_set.count() + 1) + class WgDocumentAuthorFactory(DocumentAuthorFactory): document = factory.SubFactory(WgDraftFactory) @@ -557,33 +509,8 @@ def states(obj, create, extracted, **kwargs): obj.set_state(State.objects.get(type_id='draft-stream-editorial',slug='active')) obj.set_state(State.objects.get(type_id='draft-iesg',slug='idexists')) -class EditorialRfcFactory(RgDraftFactory): - - alias2 = factory.RelatedFactory('ietf.doc.factories.DocAliasFactory','document',name=factory.Sequence(lambda n: 'rfc%04d'%(n+1000))) - - std_level_id = 'inf' - - @factory.post_generation - def states(obj, create, extracted, **kwargs): - if not create: - return - if extracted: - for (state_type_id,state_slug) in extracted: - obj.set_state(State.objects.get(type_id=state_type_id,slug=state_slug)) - if not obj.get_state('draft-stream-editorial'): - obj.set_state(State.objects.get(type_id='draft-stream-editorial', slug='pub')) - if not obj.get_state('draft-iesg'): - obj.set_state(State.objects.get(type_id='draft-iesg',slug='idexists')) - else: - obj.set_state(State.objects.get(type_id='draft',slug='rfc')) - obj.set_state(State.objects.get(type_id='draft-stream-editorial', slug='pub')) - obj.set_state(State.objects.get(type_id='draft-iesg',slug='idexists')) - - @factory.post_generation - def reset_canonical_name(obj, create, extracted, **kwargs): - if hasattr(obj, '_canonical_name'): - del obj._canonical_name - return None +class EditorialRfcFactory(RgRfcFactory): + pass class StatementFactory(BaseDocumentFactory): type_id = "statement" @@ -611,3 +538,31 @@ def states(obj, create, extracted, **kwargs): obj.set_state(State.objects.get(type_id=state_type_id, slug=state_slug)) else: obj.set_state(State.objects.get(type_id="statement", slug="active")) + +class SubseriesFactory(factory.django.DjangoModelFactory): + class Meta: + model = Document + skip_postgeneration_save = True + + @factory.lazy_attribute_sequence + def name(self, n): + return f"{self.type_id}{n}" + + @factory.post_generation + def contains(obj, create, extracted, **kwargs): + if not create: + return + if extracted: + for doc in extracted: + obj.relateddocument_set.create(relationship_id="contains",target=doc) + else: + obj.relateddocument_set.create(relationship_id="contains", target=RfcFactory()) + +class BcpFactory(SubseriesFactory): + type_id="bcp" + +class StdFactory(SubseriesFactory): + type_id="std" + +class FyiFactory(SubseriesFactory): + type_id="fyi" diff --git a/ietf/doc/feeds.py b/ietf/doc/feeds.py index c5bb467e9b..0269906fcf 100644 --- a/ietf/doc/feeds.py +++ b/ietf/doc/feeds.py @@ -1,11 +1,11 @@ -# Copyright The IETF Trust 2007-2020, All Rights Reserved -# -*- coding: utf-8 -*- +# Copyright The IETF Trust 2007-2026, All Rights Reserved import debug # pyflakes:ignore import datetime import unicodedata +from django.conf import settings from django.contrib.syndication.views import Feed, FeedDoesNotExist from django.utils.feedgenerator import Atom1Feed, Rss201rev2Feed from django.urls import reverse as urlreverse @@ -36,7 +36,7 @@ class DocumentChangesFeed(Feed): feed_type = Atom1Feed def get_object(self, request, name): - return Document.objects.get(docalias__name=name) + return Document.objects.get(name=name) def title(self, obj): return "Changes for %s" % obj.display_name() @@ -46,7 +46,7 @@ def link(self, obj): raise FeedDoesNotExist return urlreverse( "ietf.doc.views_doc.document_history", - kwargs=dict(name=obj.canonical_name()), + kwargs=dict(name=obj.name), ) def subtitle(self, obj): @@ -86,7 +86,7 @@ def item_link(self, item): return ( urlreverse( "ietf.doc.views_doc.document_history", - kwargs=dict(name=item.doc.canonical_name()), + kwargs=dict(name=item.doc.name), ) + "#history-%s" % item.pk ) @@ -208,13 +208,13 @@ def items(self): return [doc for doc, time in results] def item_title(self, item): - return "%s : %s" % (item.canonical_name(), item.title) + return "%s : %s" % (item.name, item.title) def item_description(self, item): return item.abstract def item_link(self, item): - return "https://rfc-editor.org/info/%s" % item.canonical_name() + return "https://rfc-editor.org/info/%s" % item.name def item_pubdate(self, item): return item.publication_time @@ -224,20 +224,20 @@ def item_extra_kwargs(self, item): extra.update({"dcterms_accessRights": "gratis"}) extra.update({"dcterms_format": "text/html"}) media_contents = [] - if int(item.rfc_number()) < 8650: - if int(item.rfc_number()) not in [8, 9, 51, 418, 500, 530, 589]: + if item.rfc_number < settings.FIRST_V3_RFC: + if item.rfc_number not in [8, 9, 51, 418, 500, 530, 589]: for fmt, media_type in [("txt", "text/plain"), ("html", "text/html")]: media_contents.append( { - "url": f"https://rfc-editor.org/rfc/{item.canonical_name()}.{fmt}", + "url": f"https://rfc-editor.org/rfc/{item.name}.{fmt}", "media_type": media_type, "is_format_of": self.item_link(item), } ) - if int(item.rfc_number()) not in [571, 587]: + if item.rfc_number not in [571, 587]: media_contents.append( { - "url": f"https://www.rfc-editor.org/rfc/pdfrfc/{item.canonical_name()}.txt.pdf", + "url": f"https://www.rfc-editor.org/rfc/pdfrfc/{item.name}.txt.pdf", "media_type": "application/pdf", "is_format_of": self.item_link(item), } @@ -245,7 +245,7 @@ def item_extra_kwargs(self, item): else: media_contents.append( { - "url": f"https://www.rfc-editor.org/rfc/{item.canonical_name()}.xml", + "url": f"https://www.rfc-editor.org/rfc/{item.name}.xml", "media_type": "application/rfc+xml", } ) @@ -256,16 +256,18 @@ def item_extra_kwargs(self, item): ]: media_contents.append( { - "url": f"https://rfc-editor.org/rfc/{item.canonical_name()}.{fmt}", + "url": f"https://rfc-editor.org/rfc/{item.name}.{fmt}", "media_type": media_type, - "is_format_of": f"https://www.rfc-editor.org/rfc/{item.canonical_name()}.xml", + "is_format_of": f"https://www.rfc-editor.org/rfc/{item.name}.xml", } ) extra.update({"media_contents": media_contents}) - extra.update({"doi": "10.17487/%s" % item.canonical_name().upper()}) extra.update( - {"doiuri": "http://dx.doi.org/10.17487/%s" % item.canonical_name().upper()} + { + "doi": item.doi, + "doiuri": f"https://doi.org/{item.doi}", + } ) # R104 Publisher (Mandatory - but we need a string from them first) diff --git a/ietf/doc/fields.py b/ietf/doc/fields.py index fde5199509..4a6922bf34 100644 --- a/ietf/doc/fields.py +++ b/ietf/doc/fields.py @@ -13,7 +13,7 @@ import debug # pyflakes:ignore -from ietf.doc.models import Document, DocAlias +from ietf.doc.models import Document from ietf.doc.utils import uppercase_std_abbreviated_name from ietf.utils.fields import SearchableField @@ -69,19 +69,3 @@ def ajax_url(self): class SearchableDocumentField(SearchableDocumentsField): """Specialized to only return one Document""" max_entries = 1 - - -class SearchableDocAliasesField(SearchableDocumentsField): - """Search DocAliases instead of Documents""" - model = DocAlias # type: Type[models.Model] - - def doc_type_filter(self, queryset): - """Filter to include only desired doc type - - For DocAlias, pass through to the docs to check type. - """ - return queryset.filter(docs__type=self.doc_type) - -class SearchableDocAliasField(SearchableDocAliasesField): - """Specialized to only return one DocAlias""" - max_entries = 1 \ No newline at end of file diff --git a/ietf/doc/forms.py b/ietf/doc/forms.py index c0c52571c2..768d6f96af 100644 --- a/ietf/doc/forms.py +++ b/ietf/doc/forms.py @@ -1,4 +1,4 @@ -# Copyright The IETF Trust 2013-2020, All Rights Reserved +# Copyright The IETF Trust 2013-2025, All Rights Reserved # -*- coding: utf-8 -*- @@ -8,8 +8,8 @@ from django.core.exceptions import ObjectDoesNotExist, ValidationError from django.core.validators import validate_email -from ietf.doc.fields import SearchableDocAliasesField, SearchableDocAliasField -from ietf.doc.models import RelatedDocument, DocExtResource +from ietf.doc.fields import SearchableDocumentField, SearchableDocumentsField +from ietf.doc.models import RelatedDocument, DocExtResource, State from ietf.iesg.models import TelechatDate from ietf.iesg.utils import telechat_page_count from ietf.person.fields import SearchablePersonField, SearchablePersonsField @@ -61,7 +61,7 @@ class DocAuthorChangeBasisForm(forms.Form): basis = forms.CharField(max_length=255, label='Reason for change', help_text='What is the source or reasoning for the changes to the author list?') - + class AdForm(forms.Form): ad = forms.ModelChoiceField(Person.objects.filter(role__name="ad", role__group__state="active", role__group__type='area').order_by('name'), label="Shepherding AD", empty_label="(None)", required=True) @@ -134,11 +134,12 @@ class ActionHoldersForm(forms.Form): IESG_APPROVED_STATE_LIST = ("ann", "rfcqueue", "pub") class AddDownrefForm(forms.Form): - rfc = SearchableDocAliasField( + rfc = SearchableDocumentField( label="Referenced RFC", help_text="The RFC that is approved for downref", - required=True) - drafts = SearchableDocAliasesField( + required=True, + doc_type="rfc") + drafts = SearchableDocumentsField( label="Internet-Drafts that makes the reference", help_text="The Internet-Drafts that approve the downref in their Last Call", required=True) @@ -148,7 +149,7 @@ def clean_rfc(self): raise forms.ValidationError("Please provide a referenced RFC and a referencing Internet-Draft") rfc = self.cleaned_data['rfc'] - if not rfc.document.is_rfc(): + if rfc.type_id != "rfc": raise forms.ValidationError("Cannot find the RFC: " + rfc.name) return rfc @@ -158,10 +159,10 @@ def clean_drafts(self): v_err_names = [] drafts = self.cleaned_data['drafts'] - for da in drafts: - state = da.document.get_state("draft-iesg") + for d in drafts: + state = d.get_state("draft-iesg") if not state or state.slug not in IESG_APPROVED_STATE_LIST: - v_err_names.append(da.name) + v_err_names.append(d.name) if v_err_names: raise forms.ValidationError("Internet-Draft is not yet approved: " + ", ".join(v_err_names)) return drafts @@ -173,23 +174,23 @@ def clean(self): v_err_pairs = [] rfc = self.cleaned_data['rfc'] drafts = self.cleaned_data['drafts'] - for da in drafts: - if RelatedDocument.objects.filter(source=da.document, target=rfc, relationship_id='downref-approval'): - v_err_pairs.append(da.name + " --> RFC " + rfc.document.rfc_number()) + for d in drafts: + if RelatedDocument.objects.filter(source=d, target=rfc, relationship_id='downref-approval'): + v_err_pairs.append(f"{d.name} --> RFC {rfc.rfc_number}") if v_err_pairs: raise forms.ValidationError("Downref is already in the registry: " + ", ".join(v_err_pairs)) if 'save_downref_anyway' not in self.data: # this check is skipped if the save_downref_anyway button is used v_err_refnorm = "" - for da in drafts: - if not RelatedDocument.objects.filter(source=da.document, target=rfc, relationship_id='refnorm'): + for d in drafts: + if not RelatedDocument.objects.filter(source=d, target=rfc, relationship_id='refnorm'): if v_err_refnorm: - v_err_refnorm = v_err_refnorm + " or " + da.name + v_err_refnorm = v_err_refnorm + " or " + d.name else: - v_err_refnorm = da.name + v_err_refnorm = d.name if v_err_refnorm: - v_err_refnorm_prefix = "There does not seem to be a normative reference to RFC " + rfc.document.rfc_number() + " by " + v_err_refnorm_prefix = f"There does not seem to be a normative reference to RFC {rfc.rfc_number} by " raise forms.ValidationError(v_err_refnorm_prefix + v_err_refnorm) @@ -265,3 +266,32 @@ def clean(self): @staticmethod def valid_resource_tags(): return ExtResourceName.objects.all().order_by('slug').values_list('slug', flat=True) + +class InvestigateForm(forms.Form): + name_fragment = forms.CharField( + label="File name or fragment to investigate", + required=True, + help_text=( + "Enter a filename such as draft-ietf-some-draft-00.txt or a fragment like draft-ietf-some-draft using at least 8 characters. The search will also work for files that are not necessarily drafts." + ), + min_length=8, + ) + task_id = forms.CharField(required=False, widget=forms.HiddenInput) + + def clean_name_fragment(self): + disallowed_characters = ["%", "/", "\\", "*"] + name_fragment = self.cleaned_data["name_fragment"] + # Manual inspection of the directories at the time of this writing shows + # looking for files with less than 8 characters in the name is not useful + # Requiring this will help protect against the secretariat unintentionally + # matching every draft. + if any(c in name_fragment for c in disallowed_characters): + raise ValidationError(f"The following characters are disallowed: {', '.join(disallowed_characters)}") + return name_fragment + + +class ChangeStatementStateForm(forms.Form): + state = forms.ModelChoiceField( + State.objects.filter(used=True, type="statement"), + empty_label=None, + ) diff --git a/ietf/doc/mails.py b/ietf/doc/mails.py index 8f5d0eb678..ddecbb6b54 100644 --- a/ietf/doc/mails.py +++ b/ietf/doc/mails.py @@ -19,7 +19,7 @@ from ietf.utils import log from ietf.utils.mail import send_mail, send_mail_text from ietf.ipr.utils import iprs_from_docs, related_docs -from ietf.doc.models import WriteupDocEvent, LastCallDocEvent, DocAlias, ConsensusDocEvent +from ietf.doc.models import WriteupDocEvent, LastCallDocEvent, ConsensusDocEvent from ietf.doc.utils import needed_ballot_positions from ietf.doc.utils_bofreq import bofreq_editors, bofreq_responsible from ietf.group.models import Role @@ -54,7 +54,7 @@ def email_ad_approved_doc(request, doc, text): def email_ad_approved_conflict_review(request, review, ok_to_publish): """Email notification when AD approves a conflict review""" - conflictdoc = review.relateddocument_set.get(relationship__slug='conflrev').target.document + conflictdoc = review.relateddocument_set.get(relationship__slug='conflrev').target (to, cc) = gather_address_lists("ad_approved_conflict_review") frm = request.user.person.formatted_email() send_mail(request, @@ -202,7 +202,7 @@ def generate_last_call_announcement(request, doc): doc.filled_title = textwrap.fill(doc.title, width=70, subsequent_indent=" " * 3) - iprs = iprs_from_docs(related_docs(DocAlias.objects.get(name=doc.canonical_name()))) + iprs = iprs_from_docs(related_docs(Document.objects.get(name=doc.name))) if iprs: ipr_links = [ urlreverse("ietf.ipr.views.show", kwargs=dict(id=i.id)) for i in iprs] ipr_links = [ settings.IDTRACKER_BASE_URL+url if not url.startswith("http") else url for url in ipr_links ] @@ -568,7 +568,7 @@ def email_last_call_expired(doc): send_mail(None, addrs.to, "DraftTracker Mail System ", - "Last Call Expired: %s" % doc.file_tag(), + "IETF Last Call Expired: %s" % doc.file_tag(), "doc/mail/change_notice.txt", dict(text=text, doc=doc, @@ -670,7 +670,7 @@ def send_review_possibly_replaces_request(request, doc, submitter_info): to = set(addrs.to) cc = set(addrs.cc) - possibly_replaces = Document.objects.filter(name__in=[alias.name for alias in doc.related_that_doc("possibly-replaces")]) + possibly_replaces = Document.objects.filter(name__in=[related.name for related in doc.related_that_doc("possibly-replaces")]) for other_doc in possibly_replaces: (other_to, other_cc) = gather_address_lists('doc_replacement_suggested',doc=other_doc) to.update(other_to) diff --git a/ietf/doc/management/commands/find_github_backup_info.py b/ietf/doc/management/commands/find_github_backup_info.py deleted file mode 100644 index f1f71452df..0000000000 --- a/ietf/doc/management/commands/find_github_backup_info.py +++ /dev/null @@ -1,74 +0,0 @@ -# Copyright The IETF Trust 2020, All Rights Reserved - - -import github3 - -from collections import Counter -from urllib.parse import urlparse - -from django.conf import settings -from django.core.management.base import BaseCommand, CommandError - -from ietf.doc.models import DocExtResource -from ietf.group.models import GroupExtResource -from ietf.person.models import PersonExtResource - -# TODO: Think more about submodules. This currently will only take top level repos, with the assumption that the clone will include arguments to grab all the submodules. -# As a consequence, we might end up pulling more than we need (or that the org or user expected) -# Make sure this is what we want. - -class Command(BaseCommand): - help = ('Locate information about github repositories to backup') - - def add_arguments(self, parser): - parser.add_argument('--verbose', dest='verbose', action='store_true', help='Show counts of types of repositories') - - def handle(self, *args, **options): - - if not (hasattr(settings,'GITHUB_BACKUP_API_KEY') and settings.GITHUB_BACKUP_API_KEY): - raise CommandError("ERROR: can't find GITHUB_BACKUP_API_KEY") # TODO: at >= py3.1, use returncode - - github = github3.login(token = settings.GITHUB_BACKUP_API_KEY) - owners = dict() - repos = set() - - for cls in (DocExtResource, GroupExtResource, PersonExtResource): - for res in cls.objects.filter(name_id__in=('github_repo','github_org')): - path_parts = urlparse(res.value).path.strip('/').split('/') - if not path_parts or not path_parts[0]: - continue - - owner = path_parts[0] - - if owner not in owners: - try: - gh_owner = github.user(username=owner) - owners[owner] = gh_owner - except github3.exceptions.NotFoundError: - continue - - if gh_owner.type in ('User', 'Organization'): - if len(path_parts) > 1: - repo = path_parts[1] - if (owner, repo) not in repos: - try: - github.repository(owner,repo) - repos.add( (owner, repo) ) - except github3.exceptions.NotFoundError: - continue - else: - for repo in github.repositories_by(owner): - repos.add( (owner, repo.name) ) - - owner_types = Counter([owners[owner].type for owner in owners]) - if options['verbose']: - self.stdout.write("Owners:") - for key in owner_types: - self.stdout.write(" %s: %s"%(key,owner_types[key])) - self.stdout.write("Repositories: %d" % len(repos)) - for repo in sorted(repos): - self.stdout.write(" https://github.com/%s/%s" % repo ) - else: - for repo in sorted(repos): - self.stdout.write("%s/%s" % repo ) - diff --git a/ietf/doc/management/commands/generate_draft_aliases.py b/ietf/doc/management/commands/generate_draft_aliases.py deleted file mode 100755 index 88f4aa98cb..0000000000 --- a/ietf/doc/management/commands/generate_draft_aliases.py +++ /dev/null @@ -1,180 +0,0 @@ -# Copyright The IETF Trust 2012-2021, All Rights Reserved -# -*- coding: utf-8 -*- - -# This was written as a script by Markus Stenberg . -# It was turned into a management command by Russ Housley . - -import datetime -import io -import os -import re -import shutil -import stat -import time - -from tempfile import mkstemp - -from django.conf import settings -from django.core.management.base import BaseCommand -from django.utils import timezone - -import debug # pyflakes:ignore - -from ietf.doc.models import Document -from ietf.group.utils import get_group_role_emails, get_group_ad_emails -from ietf.utils.aliases import dump_sublist -from utils.mail import parseaddr - -DEFAULT_YEARS = 2 - - -def get_draft_ad_emails(doc): - """Get AD email addresses for the given draft, if any.""" - ad_emails = set() - # If working group document, return current WG ADs - if doc.group and doc.group.acronym != 'none': - ad_emails.update(get_group_ad_emails(doc.group)) - # Document may have an explicit AD set - if doc.ad: - ad_emails.add(doc.ad.email_address()) - return ad_emails - - -def get_draft_chair_emails(doc): - """Get chair email addresses for the given draft, if any.""" - chair_emails = set() - if doc.group: - chair_emails.update(get_group_role_emails(doc.group, ['chair', 'secr'])) - return chair_emails - - -def get_draft_shepherd_email(doc): - """Get shepherd email addresses for the given draft, if any.""" - shepherd_email = set() - if doc.shepherd: - shepherd_email.add(doc.shepherd.email_address()) - return shepherd_email - - -def get_draft_authors_emails(doc): - """Get list of authors for the given draft.""" - author_emails = set() - for author in doc.documentauthor_set.all(): - if author.email and author.email.email_address(): - author_emails.add(author.email.email_address()) - return author_emails - - -def get_draft_notify_emails(doc): - """Get list of email addresses to notify for the given draft.""" - ad_email_alias_regex = r"^%s.ad@(%s|%s)$" % (doc.name, settings.DRAFT_ALIAS_DOMAIN, settings.TOOLS_SERVER) - all_email_alias_regex = r"^%s.all@(%s|%s)$" % (doc.name, settings.DRAFT_ALIAS_DOMAIN, settings.TOOLS_SERVER) - author_email_alias_regex = r"^%s@(%s|%s)$" % (doc.name, settings.DRAFT_ALIAS_DOMAIN, settings.TOOLS_SERVER) - notify_email_alias_regex = r"^%s.notify@(%s|%s)$" % (doc.name, settings.DRAFT_ALIAS_DOMAIN, settings.TOOLS_SERVER) - shepherd_email_alias_regex = r"^%s.shepherd@(%s|%s)$" % (doc.name, settings.DRAFT_ALIAS_DOMAIN, settings.TOOLS_SERVER) - notify_emails = set() - if doc.notify: - for e in doc.notify.split(','): - e = e.strip() - if re.search(ad_email_alias_regex, e): - notify_emails.update(get_draft_ad_emails(doc)) - elif re.search(author_email_alias_regex, e): - notify_emails.update(get_draft_authors_emails(doc)) - elif re.search(shepherd_email_alias_regex, e): - notify_emails.update(get_draft_shepherd_email(doc)) - elif re.search(all_email_alias_regex, e): - notify_emails.update(get_draft_ad_emails(doc)) - notify_emails.update(get_draft_authors_emails(doc)) - notify_emails.update(get_draft_shepherd_email(doc)) - elif re.search(notify_email_alias_regex, e): - pass - else: - (name, email) = parseaddr(e) - notify_emails.add(email) - return notify_emails - - -class Command(BaseCommand): - help = ('Generate the draft-aliases and draft-virtual files for Internet-Draft ' - 'mail aliases, placing them in the files configured in ' - 'settings.DRAFT_ALIASES_PATH and settings.DRAFT_VIRTUAL_PATH, ' - 'respectively. The generation includes aliases for Internet-Drafts ' - 'that have seen activity in the last %s years.' % (DEFAULT_YEARS)) - - def handle(self, *args, **options): - show_since = timezone.now() - datetime.timedelta(DEFAULT_YEARS*365) - - date = time.strftime("%Y-%m-%d_%H:%M:%S") - signature = '# Generated by %s at %s\n' % (os.path.abspath(__file__), date) - - ahandle, aname = mkstemp() - os.close(ahandle) - afile = io.open(aname,"w") - - vhandle, vname = mkstemp() - os.close(vhandle) - vfile = io.open(vname,"w") - - afile.write(signature) - vfile.write(signature) - vfile.write("%s anything\n" % settings.DRAFT_VIRTUAL_DOMAIN) - - # Internet-Drafts with active status or expired within DEFAULT_YEARS - drafts = Document.objects.filter(name__startswith='draft-') - active_drafts = drafts.filter(states__slug='active') - inactive_recent_drafts = drafts.exclude(states__slug='active').filter(expires__gte=show_since) - interesting_drafts = active_drafts | inactive_recent_drafts - - alias_domains = ['ietf.org', ] - for draft in interesting_drafts.distinct().iterator(): - # Omit RFCs, unless they were published in the last DEFAULT_YEARS - if draft.docalias.filter(name__startswith='rfc'): - if draft.latest_event(type='published_rfc').time < show_since: - continue - - alias = draft.name - all = set() - - # no suffix and .authors are the same list - emails = get_draft_authors_emails(draft) - all.update(emails) - dump_sublist(afile, vfile, alias, alias_domains, settings.DRAFT_VIRTUAL_DOMAIN, emails) - dump_sublist(afile, vfile, alias+'.authors', alias_domains, settings.DRAFT_VIRTUAL_DOMAIN, emails) - - # .chairs = group chairs - emails = get_draft_chair_emails(draft) - if emails: - all.update(emails) - dump_sublist(afile, vfile, alias+'.chairs', alias_domains, settings.DRAFT_VIRTUAL_DOMAIN, emails) - - # .ad = sponsoring AD / WG AD (WG document) - emails = get_draft_ad_emails(draft) - if emails: - all.update(emails) - dump_sublist(afile, vfile, alias+'.ad', alias_domains, settings.DRAFT_VIRTUAL_DOMAIN, emails) - - # .notify = notify email list from the Document - emails = get_draft_notify_emails(draft) - if emails: - all.update(emails) - dump_sublist(afile, vfile, alias+'.notify', alias_domains, settings.DRAFT_VIRTUAL_DOMAIN, emails) - - # .shepherd = shepherd email from the Document - emails = get_draft_shepherd_email(draft) - if emails: - all.update(emails) - dump_sublist(afile, vfile, alias+'.shepherd', alias_domains, settings.DRAFT_VIRTUAL_DOMAIN, emails) - - # .all = everything from above - dump_sublist(afile, vfile, alias+'.all', alias_domains, settings.DRAFT_VIRTUAL_DOMAIN, all) - - afile.close() - vfile.close() - - os.chmod(aname, stat.S_IWUSR|stat.S_IRUSR|stat.S_IRGRP|stat.S_IROTH) - os.chmod(vname, stat.S_IWUSR|stat.S_IRUSR|stat.S_IRGRP|stat.S_IROTH) - - shutil.move(aname, settings.DRAFT_ALIASES_PATH) - shutil.move(vname, settings.DRAFT_VIRTUAL_PATH) - - \ No newline at end of file diff --git a/ietf/doc/management/commands/generate_draft_bibxml_files.py b/ietf/doc/management/commands/generate_draft_bibxml_files.py deleted file mode 100644 index eda67c401b..0000000000 --- a/ietf/doc/management/commands/generate_draft_bibxml_files.py +++ /dev/null @@ -1,84 +0,0 @@ -# Copyright The IETF Trust 2012-2020, All Rights Reserved -# -*- coding: utf-8 -*- - - -import datetime -import io -import os -import re -import sys - -from django.conf import settings -from django.core.management.base import BaseCommand -from django.utils import timezone - -import debug # pyflakes:ignore - -from ietf.doc.models import NewRevisionDocEvent -from ietf.doc.utils import bibxml_for_draft - -DEFAULT_DAYS = 7 - -class Command(BaseCommand): - help = ('Generate draft bibxml files for xml2rfc references, placing them in the ' - 'directory configured in settings.BIBXML_BASE_PATH: %s. ' - 'By default, generate files as needed for new Internet-Draft revisions from the ' - 'last %s days.' % (settings.BIBXML_BASE_PATH, DEFAULT_DAYS)) - - def add_arguments(self, parser): - parser.add_argument('--all', action='store_true', default=False, help="Process all documents, not only recent submissions") - parser.add_argument('--days', type=int, default=DEFAULT_DAYS, help="Look submissions from the last DAYS days, instead of %s" % DEFAULT_DAYS) - - def say(self, msg): - if self.verbosity > 0: - sys.stdout.write(msg) - sys.stdout.write('\n') - - def note(self, msg): - if self.verbosity > 1: - sys.stdout.write(msg) - sys.stdout.write('\n') - - def mutter(self, msg): - if self.verbosity > 2: - sys.stdout.write(msg) - sys.stdout.write('\n') - - def write(self, fn, new): - # normalize new - new = re.sub(r'\r\n?', r'\n', new) - try: - with io.open(fn, encoding='utf-8') as f: - old = f.read() - except IOError: - old = "" - if old.strip() != new.strip(): - self.note('Writing %s' % os.path.basename(fn)) - with io.open(fn, "w", encoding='utf-8') as f: - f.write(new) - - def handle(self, *args, **options): - self.verbosity = options.get("verbosity", 1) - process_all = options.get("all") - days = options.get("days") - # - bibxmldir = os.path.join(settings.BIBXML_BASE_PATH, 'bibxml-ids') - if not os.path.exists(bibxmldir): - os.makedirs(bibxmldir) - # - if process_all: - doc_events = NewRevisionDocEvent.objects.filter(type='new_revision', doc__type_id='draft') - else: - start = timezone.now() - datetime.timedelta(days=days) - doc_events = NewRevisionDocEvent.objects.filter(type='new_revision', doc__type_id='draft', time__gte=start) - doc_events = doc_events.order_by('time') - - for e in doc_events: - self.mutter('%s %s' % (e.time, e.doc.name)) - try: - doc = e.doc - bibxml = bibxml_for_draft(doc, e.rev) - ref_rev_file_name = os.path.join(bibxmldir, 'reference.I-D.%s-%s.xml' % (doc.name, e.rev)) - self.write(ref_rev_file_name, bibxml) - except Exception as ee: - sys.stderr.write('\n%s-%s: %s\n' % (doc.name, doc.rev, ee)) diff --git a/ietf/doc/management/commands/generate_idnits2_rfc_status.py b/ietf/doc/management/commands/generate_idnits2_rfc_status.py deleted file mode 100644 index 45be188018..0000000000 --- a/ietf/doc/management/commands/generate_idnits2_rfc_status.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright The IETF Trust 2021 All Rights Reserved - -import os - -from django.conf import settings -from django.core.management.base import BaseCommand - -from ietf.doc.utils import generate_idnits2_rfc_status -from ietf.utils.log import log - -class Command(BaseCommand): - help = ('Generate the rfc_status blob used by idnits2') - - def handle(self, *args, **options): - filename=os.path.join(settings.DERIVED_DIR,'idnits2-rfc-status') - blob = generate_idnits2_rfc_status() - try: - bytes = blob.encode('utf-8') - with open(filename,'wb') as f: - f.write(bytes) - except Exception as e: - log('failed to write idnits2-rfc-status: '+str(e)) - raise e diff --git a/ietf/doc/management/commands/generate_idnits2_rfcs_obsoleted.py b/ietf/doc/management/commands/generate_idnits2_rfcs_obsoleted.py deleted file mode 100644 index 8bd122e87e..0000000000 --- a/ietf/doc/management/commands/generate_idnits2_rfcs_obsoleted.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright The IETF Trust 2021 All Rights Reserved - -import os - -from django.conf import settings -from django.core.management.base import BaseCommand - -from ietf.doc.utils import generate_idnits2_rfcs_obsoleted -from ietf.utils.log import log - -class Command(BaseCommand): - help = ('Generate the rfcs-obsoleted file used by idnits2') - - def handle(self, *args, **options): - filename=os.path.join(settings.DERIVED_DIR,'idnits2-rfcs-obsoleted') - blob = generate_idnits2_rfcs_obsoleted() - try: - bytes = blob.encode('utf-8') - with open(filename,'wb') as f: - f.write(bytes) - except Exception as e: - log('failed to write idnits2-rfcs-obsoleted: '+str(e)) - raise e diff --git a/ietf/doc/migrations/0009_add_rfc_states.py b/ietf/doc/migrations/0009_add_rfc_states.py new file mode 100644 index 0000000000..07a6ac0205 --- /dev/null +++ b/ietf/doc/migrations/0009_add_rfc_states.py @@ -0,0 +1,23 @@ +# Generated by Django 4.2.2 on 2023-06-14 20:57 + +from django.db import migrations + + +def forward(apps, schema_editor): + StateType = apps.get_model("doc", "StateType") + rfc_statetype, _ = StateType.objects.get_or_create(slug="rfc", label="State") + + State = apps.get_model("doc", "State") + State.objects.get_or_create( + type=rfc_statetype, slug="published", name="Published", used=True, order=1 + ) + + +class Migration(migrations.Migration): + dependencies = [ + ("doc", "0008_alter_docevent_type"), + ] + + operations = [ + migrations.RunPython(forward), + ] diff --git a/ietf/doc/migrations/0010_dochistory_rfc_number_document_rfc_number.py b/ietf/doc/migrations/0010_dochistory_rfc_number_document_rfc_number.py new file mode 100644 index 0000000000..26b2a85c62 --- /dev/null +++ b/ietf/doc/migrations/0010_dochistory_rfc_number_document_rfc_number.py @@ -0,0 +1,22 @@ +# Generated by Django 4.2.2 on 2023-06-14 22:28 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("doc", "0009_add_rfc_states"), + ] + + operations = [ + migrations.AddField( + model_name="dochistory", + name="rfc_number", + field=models.PositiveIntegerField(blank=True, null=True), + ), + migrations.AddField( + model_name="document", + name="rfc_number", + field=models.PositiveIntegerField(blank=True, null=True), + ), + ] diff --git a/ietf/doc/migrations/0011_create_rfc_documents.py b/ietf/doc/migrations/0011_create_rfc_documents.py new file mode 100644 index 0000000000..466ff81bb0 --- /dev/null +++ b/ietf/doc/migrations/0011_create_rfc_documents.py @@ -0,0 +1,76 @@ +# Generated by Django 4.2.2 on 2023-06-15 15:27 + +from django.db import migrations + + +def forward(apps, schema_editor): + Document = apps.get_model("doc", "Document") + DocAlias = apps.get_model("doc", "DocAlias") + DocumentAuthor = apps.get_model("doc", "DocumentAuthor") + + State = apps.get_model("doc", "State") + draft_rfc_state = State.objects.get(type_id="draft", slug="rfc") + rfc_published_state = State.objects.get(type_id="rfc", slug="published") + + # Find draft Documents in the "rfc" state + found_by_state = Document.objects.filter(states=draft_rfc_state).distinct() + + # Find Documents with an "rfc..." alias and confirm they're the same set + rfc_docaliases = DocAlias.objects.filter(name__startswith="rfc") + found_by_name = Document.objects.filter(docalias__in=rfc_docaliases).distinct() + assert set(found_by_name) == set(found_by_state), "mismatch between rfcs identified by state and docalias" + + # As of 2023-06-15, there is one Document with two rfc aliases: rfc6312 and rfc6342 are the same Document. This + # was due to a publication error. Because we go alias-by-alias, no special handling is needed in this migration. + + for rfc_alias in rfc_docaliases.order_by("name"): + assert rfc_alias.docs.count() == 1, f"DocAlias {rfc_alias} is linked to more than 1 Document" + draft = rfc_alias.docs.first() + if draft.name.startswith("rfc"): + rfc = draft + rfc.type_id = "rfc" + rfc.rfc_number = int(draft.name[3:]) + rfc.save() + rfc.states.set([rfc_published_state]) + else: + rfc = Document.objects.create( + type_id="rfc", + name=rfc_alias.name, + rfc_number=int(rfc_alias.name[3:]), + time=draft.time, + title=draft.title, + stream=draft.stream, + group=draft.group, + abstract=draft.abstract, + pages=draft.pages, + words=draft.words, + std_level=draft.std_level, + ad=draft.ad, + external_url=draft.external_url, + uploaded_filename=draft.uploaded_filename, + note=draft.note, + ) + rfc.states.set([rfc_published_state]) + rfc.formal_languages.set(draft.formal_languages.all()) + + # Copy Authors + for da in draft.documentauthor_set.all(): + DocumentAuthor.objects.create( + document=rfc, + person=da.person, + email=da.email, + affiliation=da.affiliation, + country=da.country, + order=da.order, + ) + + +class Migration(migrations.Migration): + dependencies = [ + ("doc", "0010_dochistory_rfc_number_document_rfc_number"), + ("name", "0010_rfc_doctype_names"), + ] + + operations = [ + migrations.RunPython(forward), + ] diff --git a/ietf/doc/migrations/0012_move_rfc_docevents.py b/ietf/doc/migrations/0012_move_rfc_docevents.py new file mode 100644 index 0000000000..9969a8f0ad --- /dev/null +++ b/ietf/doc/migrations/0012_move_rfc_docevents.py @@ -0,0 +1,88 @@ +# Generated by Django 4.2.2 on 2023-06-20 18:36 + +from django.db import migrations +from django.db.models import Q + + +def forward(apps, schema_editor): + """Move RFC events from the draft to the rfc Document""" + DocAlias = apps.get_model("doc", "DocAlias") + DocEvent = apps.get_model("doc", "DocEvent") + Document = apps.get_model("doc", "Document") + + # queryset with events migrated regardless of whether before or after the "published_rfc" event + events_always_migrated = DocEvent.objects.filter( + Q( + type__in=[ + "published_rfc", # do not remove this one! + ] + ) + ) + + # queryset with events migrated only after the "published_rfc" event + events_migrated_after_pub = DocEvent.objects.exclude( + type__in=[ + "created_ballot", + "closed_ballot", + "sent_ballot_announcement", + "changed_ballot_position", + "changed_ballot_approval_text", + "changed_ballot_writeup_text", + ] + ).exclude( + type="added_comment", + desc__contains="ballot set", # excludes 311 comments that all apply to drafts + ) + + # special case for rfc 6312/6342 draft, which has two published_rfc events + ignore = ["rfc6312", "rfc6342"] # do not reprocess these later + rfc6312 = Document.objects.get(name="rfc6312") + rfc6342 = Document.objects.get(name="rfc6342") + draft = DocAlias.objects.get(name="rfc6312").docs.first() + assert draft == DocAlias.objects.get(name="rfc6342").docs.first() + published_events = list( + DocEvent.objects.filter(doc=draft, type="published_rfc").order_by("time") + ) + assert len(published_events) == 2 + ( + pub_event_6312, + pub_event_6342, + ) = published_events # order matches pub dates at rfc-editor.org + + pub_event_6312.doc = rfc6312 + pub_event_6312.save() + events_migrated_after_pub.filter( + doc=draft, + time__gte=pub_event_6312.time, + time__lt=pub_event_6342.time, + ).update(doc=rfc6312) + + pub_event_6342.doc = rfc6342 + pub_event_6342.save() + events_migrated_after_pub.filter( + doc=draft, + time__gte=pub_event_6342.time, + ).update(doc=rfc6342) + + # Now handle all the rest + for rfc in Document.objects.filter(type_id="rfc").exclude(name__in=ignore): + draft = DocAlias.objects.get(name=rfc.name).docs.first() + assert draft is not None + published_event = DocEvent.objects.get(doc=draft, type="published_rfc") + events_always_migrated.filter( + doc=draft, + ).update(doc=rfc) + events_migrated_after_pub.filter( + doc=draft, + time__gte=published_event.time, + ).update(doc=rfc) + + +class Migration(migrations.Migration): + dependencies = [ + ("doc", "0011_create_rfc_documents"), + ] + + operations = [ + migrations.RunPython(forward), + ] diff --git a/ietf/doc/migrations/0013_rfc_relateddocuments.py b/ietf/doc/migrations/0013_rfc_relateddocuments.py new file mode 100644 index 0000000000..9baddaebdb --- /dev/null +++ b/ietf/doc/migrations/0013_rfc_relateddocuments.py @@ -0,0 +1,45 @@ +# Generated by Django 4.2.3 on 2023-07-05 22:40 + +from django.db import migrations + + +def forward(apps, schema_editor): + DocAlias = apps.get_model("doc", "DocAlias") + Document = apps.get_model("doc", "Document") + RelatedDocument = apps.get_model("doc", "RelatedDocument") + for rfc_alias in DocAlias.objects.filter(name__startswith="rfc").exclude( + docs__type_id="rfc" + ): + # Move these over to the RFC + RelatedDocument.objects.filter( + relationship__slug__in=( + "tobcp", + "toexp", + "tohist", + "toinf", + "tois", + "tops", + "obs", + "updates", + ), + source__docalias=rfc_alias, + ).update(source=Document.objects.get(name=rfc_alias.name)) + # Duplicate references on the RFC but keep the ones on the draft as well + originals = list( + RelatedDocument.objects.filter( + relationship__slug__in=("refinfo", "refnorm", "refold", "refunk"), + source__docalias=rfc_alias, + ) + ) + for o in originals: + o.pk = None + o.source = Document.objects.get(name=rfc_alias.name) + RelatedDocument.objects.bulk_create(originals) + + +class Migration(migrations.Migration): + dependencies = [ + ("doc", "0012_move_rfc_docevents"), + ] + + operations = [migrations.RunPython(forward)] diff --git a/ietf/doc/migrations/0014_move_rfc_docaliases.py b/ietf/doc/migrations/0014_move_rfc_docaliases.py new file mode 100644 index 0000000000..c82a98e052 --- /dev/null +++ b/ietf/doc/migrations/0014_move_rfc_docaliases.py @@ -0,0 +1,38 @@ +# Generated by Django 4.2.2 on 2023-06-20 18:36 + +from django.db import migrations + + +def forward(apps, schema_editor): + """Point "rfc..." DocAliases at the rfc-type Document + + Creates a became_rfc RelatedDocument to preserve the connection between the draft and the rfc. + """ + DocAlias = apps.get_model("doc", "DocAlias") + Document = apps.get_model("doc", "Document") + RelatedDocument = apps.get_model("doc", "RelatedDocument") + + for rfc_alias in DocAlias.objects.filter(name__startswith="rfc"): + rfc = Document.objects.get(name=rfc_alias.name) + aliased_doc = rfc_alias.docs.get() # implicitly confirms only one value in rfc_alias.docs + if aliased_doc != rfc: + # If the DocAlias was not already pointing at the rfc, it was pointing at the draft + # it came from. Create the relationship between draft and rfc Documents. + assert aliased_doc.type_id == "draft", f"Alias for {rfc.name} should be pointing at a draft" + RelatedDocument.objects.create( + source=aliased_doc, + target=rfc_alias, + relationship_id="became_rfc", + ) + # Now move the alias from the draft to the rfc + rfc_alias.docs.set([rfc]) + + +class Migration(migrations.Migration): + dependencies = [ + ("doc", "0013_rfc_relateddocuments"), + ] + + operations = [ + migrations.RunPython(forward), + ] diff --git a/ietf/doc/migrations/0015_relate_no_aliases.py b/ietf/doc/migrations/0015_relate_no_aliases.py new file mode 100644 index 0000000000..4ba3dd9607 --- /dev/null +++ b/ietf/doc/migrations/0015_relate_no_aliases.py @@ -0,0 +1,84 @@ +# Generated by Django 4.2.2 on 2023-06-16 13:40 + +from django.db import migrations +import django.db.models.deletion +from django.db.models import F, Subquery, OuterRef, CharField +import ietf.utils.models + +def forward(apps, schema_editor): + RelatedDocument = apps.get_model("doc", "RelatedDocument") + DocAlias = apps.get_model("doc", "DocAlias") + target_subquery = Subquery(DocAlias.objects.filter(pk=OuterRef("deprecated_target")).values("docs")[:1]) + name_subquery = Subquery(DocAlias.objects.filter(pk=OuterRef("deprecated_target")).values("name")[:1]) + RelatedDocument.objects.annotate(firstdoc=target_subquery).annotate(aliasname=name_subquery).update(target=F("firstdoc"),originaltargetaliasname=F("aliasname")) + +def reverse(apps, schema_editor): + pass + +class Migration(migrations.Migration): + dependencies = [ + ("doc", "0014_move_rfc_docaliases"), + ] + + operations = [ + migrations.AlterField( + model_name='relateddocument', + name='target', + field=ietf.utils.models.ForeignKey( + db_index=False, + on_delete=django.db.models.deletion.CASCADE, + to='doc.docalias', + ), + ), + migrations.RenameField( + model_name="relateddocument", + old_name="target", + new_name="deprecated_target" + ), + migrations.AlterField( + model_name='relateddocument', + name='deprecated_target', + field=ietf.utils.models.ForeignKey( + db_index=True, + on_delete=django.db.models.deletion.CASCADE, + to='doc.docalias', + ), + ), + migrations.AddField( + model_name="relateddocument", + name="target", + field=ietf.utils.models.ForeignKey( + default=1, # A lie, but a convenient one - no relations point here. + on_delete=django.db.models.deletion.CASCADE, + related_name="targets_related", + to="doc.document", + db_index=False, + ), + preserve_default=False, + ), + migrations.AddField( + model_name="relateddocument", + name="originaltargetaliasname", + field=CharField(max_length=255,null=True,blank=True), + preserve_default=True, + ), + migrations.RunPython(forward, reverse), + migrations.AlterField( + model_name="relateddocument", + name="target", + field=ietf.utils.models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="targets_related", + to="doc.document", + db_index=True, + ), + ), + migrations.RemoveField( + model_name="relateddocument", + name="deprecated_target", + field=ietf.utils.models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to='doc.DocAlias', + ), + ), + ] diff --git a/ietf/doc/migrations/0016_relate_hist_no_aliases.py b/ietf/doc/migrations/0016_relate_hist_no_aliases.py new file mode 100644 index 0000000000..df5fb3c325 --- /dev/null +++ b/ietf/doc/migrations/0016_relate_hist_no_aliases.py @@ -0,0 +1,87 @@ +# Generated by Django 4.2.2 on 2023-06-16 13:40 + +from django.db import migrations +import django.db.models.deletion +from django.db.models import F, Subquery, OuterRef, CharField +import ietf.utils.models + +def forward(apps, schema_editor): + RelatedDocHistory = apps.get_model("doc", "RelatedDocHistory") + DocAlias = apps.get_model("doc", "DocAlias") + target_subquery = Subquery(DocAlias.objects.filter(pk=OuterRef("deprecated_target")).values("docs")[:1]) + name_subquery = Subquery(DocAlias.objects.filter(pk=OuterRef("deprecated_target")).values("name")[:1]) + RelatedDocHistory.objects.annotate(firstdoc=target_subquery).annotate(aliasname=name_subquery).update(target=F("firstdoc"),originaltargetaliasname=F("aliasname")) + +def reverse(apps, schema_editor): + pass + +class Migration(migrations.Migration): + dependencies = [ + ("doc", "0015_relate_no_aliases"), + ] + + operations = [ + migrations.AlterField( + model_name='relateddochistory', + name='target', + field=ietf.utils.models.ForeignKey( + db_index=False, + on_delete=django.db.models.deletion.CASCADE, + to='doc.docalias', + related_name='reversely_related_document_history_set', + ), + ), + migrations.RenameField( + model_name="relateddochistory", + old_name="target", + new_name="deprecated_target" + ), + migrations.AlterField( + model_name='relateddochistory', + name='deprecated_target', + field=ietf.utils.models.ForeignKey( + db_index=True, + on_delete=django.db.models.deletion.CASCADE, + to='doc.docalias', + related_name='deprecated_reversely_related_document_history_set', + ), + ), + migrations.AddField( + model_name="relateddochistory", + name="target", + field=ietf.utils.models.ForeignKey( + default=1, # A lie, but a convenient one - no relations point here. + on_delete=django.db.models.deletion.CASCADE, + to="doc.document", + db_index=False, + related_name='reversely_related_document_history_set', + ), + preserve_default=False, + ), + migrations.AddField( + model_name="relateddochistory", + name="originaltargetaliasname", + field=CharField(max_length=255,null=True,blank=True), + preserve_default=True, + ), + migrations.RunPython(forward, reverse), + migrations.AlterField( + model_name="relateddochistory", + name="target", + field=ietf.utils.models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="doc.document", + db_index=True, + related_name='reversely_related_document_history_set', + ), + ), + migrations.RemoveField( + model_name="relateddochistory", + name="deprecated_target", + field=ietf.utils.models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to='doc.DocAlias', + related_name='deprecated_reversely_related_document_history_set', + ), + ), + ] diff --git a/ietf/doc/migrations/0017_delete_docalias.py b/ietf/doc/migrations/0017_delete_docalias.py new file mode 100644 index 0000000000..207ca81e15 --- /dev/null +++ b/ietf/doc/migrations/0017_delete_docalias.py @@ -0,0 +1,16 @@ +# Copyright The IETF Trust 2023, All Rights Reserved + +from django.db import migrations + + +class Migration(migrations.Migration): + dependencies = [ + ("ipr", "0002_iprdocrel_no_aliases"), + ("doc", "0016_relate_hist_no_aliases"), + ] + + operations = [ + migrations.DeleteModel( + name="DocAlias", + ), + ] diff --git a/ietf/doc/migrations/0018_move_dochistory.py b/ietf/doc/migrations/0018_move_dochistory.py new file mode 100644 index 0000000000..0bc29b0bc4 --- /dev/null +++ b/ietf/doc/migrations/0018_move_dochistory.py @@ -0,0 +1,45 @@ +# Generated by Django 4.2.5 on 2023-09-11 17:52 + +from django.db import migrations + +from django.db.models import Subquery, OuterRef, F + + +def forward(apps, schema_editor): + DocHistory = apps.get_model("doc", "DocHistory") + RelatedDocument = apps.get_model("doc", "RelatedDocument") + Document = apps.get_model("doc", "Document") + DocHistory.objects.filter(type_id="draft", doc__type_id="rfc").update(type_id="rfc") + DocHistory.objects.filter( + type_id="draft", doc__type_id="draft", name__startswith="rfc" + ).annotate( + rfc_id=Subquery( + RelatedDocument.objects.filter( + source_id=OuterRef("doc_id"), relationship_id="became_rfc" + ).values_list("target_id", flat=True)[:1] + ) + ).update( + doc_id=F("rfc_id"), type_id="rfc" + ) + DocHistory.objects.filter(type_id="rfc").annotate( + rfcno=Subquery( + Document.objects.filter(pk=OuterRef("doc_id")).values_list( + "rfc_number", flat=True + )[:1] + ) + ).update(rfc_number=F("rfcno")) + assert not DocHistory.objects.filter( + name__startswith="rfc", type_id="draft" + ).exists() + assert not DocHistory.objects.filter( + type_id="rfc", rfc_number__isnull=True + ).exists() + + +class Migration(migrations.Migration): + dependencies = [ + ("doc", "0017_delete_docalias"), + ] + + # There is no going back + operations = [migrations.RunPython(forward)] diff --git a/ietf/doc/migrations/0019_subseries.py b/ietf/doc/migrations/0019_subseries.py new file mode 100644 index 0000000000..be2c612ac0 --- /dev/null +++ b/ietf/doc/migrations/0019_subseries.py @@ -0,0 +1,21 @@ +# Copyright The IETF Trust 2023, All Rights Reserved +from django.db import migrations + + +def forward(apps, schema_editor): + StateType = apps.get_model("doc", "StateType") + for slug in ["bcp", "std", "fyi"]: + StateType.objects.create(slug=slug, label=f"{slug} state") + + +def reverse(apps, schema_editor): + StateType = apps.get_model("doc", "StateType") + StateType.objects.filter(slug__in=["bcp", "std", "fyi"]).delete() + + +class Migration(migrations.Migration): + dependencies = [ + ("doc", "0018_move_dochistory"), + ] + + operations = [migrations.RunPython(forward, reverse)] diff --git a/ietf/doc/migrations/0020_move_errata_tags.py b/ietf/doc/migrations/0020_move_errata_tags.py new file mode 100644 index 0000000000..897b88f467 --- /dev/null +++ b/ietf/doc/migrations/0020_move_errata_tags.py @@ -0,0 +1,29 @@ +# Copyright The IETF Trust 2023, All Rights Reserved + +from django.db import migrations + +from django.db.models import Subquery, OuterRef, F + + +def forward(apps, schema_editor): + Document = apps.get_model("doc", "Document") + RelatedDocument = apps.get_model("doc", "RelatedDocument") + Document.tags.through.objects.filter( + doctagname_id__in=["errata", "verified-errata"], document__type_id="draft" + ).annotate( + rfcdoc=Subquery( + RelatedDocument.objects.filter( + relationship_id="became_rfc", source_id=OuterRef("document__pk") + ).values_list("target__pk", flat=True)[:1] + ) + ).update( + document_id=F("rfcdoc") + ) + + +class Migration(migrations.Migration): + dependencies = [ + ("doc", "0019_subseries"), + ] + + operations = [migrations.RunPython(forward)] diff --git a/ietf/doc/migrations/0021_narrativeminutes.py b/ietf/doc/migrations/0021_narrativeminutes.py new file mode 100644 index 0000000000..0f330bd053 --- /dev/null +++ b/ietf/doc/migrations/0021_narrativeminutes.py @@ -0,0 +1,39 @@ +# Copyright The IETF Trust 2023, All Rights Reserved + +from django.db import migrations + + +def forward(apps, schema_editor): + StateType = apps.get_model("doc", "StateType") + State = apps.get_model("doc", "State") + + StateType.objects.create( + slug="narrativeminutes", + label="State", + ) + for order, slug in enumerate(["active", "deleted"]): + State.objects.create( + slug=slug, + type_id="narrativeminutes", + name=slug.capitalize(), + order=order, + desc="", + used=True, + ) + + +def reverse(apps, schema_editor): + StateType = apps.get_model("doc", "StateType") + State = apps.get_model("doc", "State") + + State.objects.filter(type_id="narrativeminutes").delete() + StateType.objects.filter(slug="narrativeminutes").delete() + + +class Migration(migrations.Migration): + dependencies = [ + ("doc", "0020_move_errata_tags"), + ("name", "0013_narrativeminutes"), + ] + + operations = [migrations.RunPython(forward, reverse)] diff --git a/ietf/doc/migrations/0022_remove_dochistory_internal_comments_and_more.py b/ietf/doc/migrations/0022_remove_dochistory_internal_comments_and_more.py new file mode 100644 index 0000000000..ad27793a83 --- /dev/null +++ b/ietf/doc/migrations/0022_remove_dochistory_internal_comments_and_more.py @@ -0,0 +1,21 @@ +# Generated by Django 4.2.15 on 2024-08-16 16:43 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("doc", "0021_narrativeminutes"), + ] + + operations = [ + migrations.RemoveField( + model_name="dochistory", + name="internal_comments", + ), + migrations.RemoveField( + model_name="document", + name="internal_comments", + ), + ] diff --git a/ietf/doc/migrations/0023_bofreqspamstate.py b/ietf/doc/migrations/0023_bofreqspamstate.py new file mode 100644 index 0000000000..dbbaf996e9 --- /dev/null +++ b/ietf/doc/migrations/0023_bofreqspamstate.py @@ -0,0 +1,30 @@ +# Copyright The IETF Trust 2024, All Rights Reserved + +from django.db import migrations + + +def forward(apps, schema_editor): + State = apps.get_model("doc", "State") + State.objects.get_or_create( + type_id="bofreq", + slug="spam", + defaults={"name": "Spam", "desc": "The BOF request is spam", "order": 5}, + ) + + +def reverse(apps, schema_editor): + State = apps.get_model("doc", "State") + Document = apps.get_model("doc", "Document") + assert not Document.objects.filter( + states__type="bofreq", states__slug="spam" + ).exists() + State.objects.filter(type_id="bofreq", slug="spam").delete() + + +class Migration(migrations.Migration): + + dependencies = [ + ("doc", "0022_remove_dochistory_internal_comments_and_more"), + ] + + operations = [migrations.RunPython(forward, reverse)] diff --git a/ietf/doc/migrations/0024_remove_ad_is_watching_states.py b/ietf/doc/migrations/0024_remove_ad_is_watching_states.py new file mode 100644 index 0000000000..0c0fb0ad25 --- /dev/null +++ b/ietf/doc/migrations/0024_remove_ad_is_watching_states.py @@ -0,0 +1,121 @@ +# Copyright The IETF Trust 2024, All Rights Reserved + +from django.db import migrations + + +def get_helper(DocHistory, RelatedDocument, RelatedDocHistory, DocumentAuthor, DocHistoryAuthor): + """Dependency injection wrapper""" + + def save_document_in_history(doc): + """Save a snapshot of document and related objects in the database. + + Local copy of ietf.doc.utils.save_document_in_history() to avoid depending on the + code base in a migration. + """ + + def get_model_fields_as_dict(obj): + return dict((field.name, getattr(obj, field.name)) + for field in obj._meta.fields + if field is not obj._meta.pk) + + # copy fields + fields = get_model_fields_as_dict(doc) + fields["doc"] = doc + fields["name"] = doc.name + + dochist = DocHistory(**fields) + dochist.save() + + # copy many to many + for field in doc._meta.many_to_many: + if field.remote_field.through and field.remote_field.through._meta.auto_created: + hist_field = getattr(dochist, field.name) + hist_field.clear() + hist_field.set(getattr(doc, field.name).all()) + + # copy remaining tricky many to many + def transfer_fields(obj, HistModel): + mfields = get_model_fields_as_dict(item) + # map doc -> dochist + for k, v in mfields.items(): + if v == doc: + mfields[k] = dochist + HistModel.objects.create(**mfields) + + for item in RelatedDocument.objects.filter(source=doc): + transfer_fields(item, RelatedDocHistory) + + for item in DocumentAuthor.objects.filter(document=doc): + transfer_fields(item, DocHistoryAuthor) + + return dochist + + return save_document_in_history + + +def forward(apps, schema_editor): + """Mark watching draft-iesg state unused after removing it from Documents""" + StateDocEvent = apps.get_model("doc", "StateDocEvent") + Document = apps.get_model("doc", "Document") + State = apps.get_model("doc", "State") + StateType = apps.get_model("doc", "StateType") + Person = apps.get_model("person", "Person") + + save_document_in_history = get_helper( + DocHistory=apps.get_model("doc", "DocHistory"), + RelatedDocument=apps.get_model("doc", "RelatedDocument"), + RelatedDocHistory=apps.get_model("doc", "RelatedDocHistory"), + DocumentAuthor=apps.get_model("doc", "DocumentAuthor"), + DocHistoryAuthor=apps.get_model("doc", "DocHistoryAuthor"), + ) + + draft_iesg_state_type = StateType.objects.get(slug="draft-iesg") + idexists_state = State.objects.get(type=draft_iesg_state_type, slug="idexists") + watching_state = State.objects.get(type=draft_iesg_state_type, slug="watching") + system_person = Person.objects.get(name="(System)") + + # Remove state from documents that currently have it + for doc in Document.objects.filter(states=watching_state): + assert doc.type_id == "draft" + doc.states.remove(watching_state) + doc.states.add(idexists_state) + e = StateDocEvent.objects.create( + type="changed_state", + by=system_person, + doc=doc, + rev=doc.rev, + desc=f"{draft_iesg_state_type.label} changed to {idexists_state.name} from {watching_state.name}", + state_type=draft_iesg_state_type, + state=idexists_state, + ) + doc.time = e.time + doc.save() + save_document_in_history(doc) + assert not Document.objects.filter(states=watching_state).exists() + + # Mark state as unused + watching_state.used = False + watching_state.save() + + +def reverse(apps, schema_editor): + """Mark watching draft-iesg state as used + + Does not try to re-apply the state to Documents modified by the forward migration. This + could be done in theory, but would either require dangerous history rewriting or add a + lot of history junk. + """ + State = apps.get_model("doc", "State") + StateType = apps.get_model("doc", "StateType") + State.objects.filter( + type=StateType.objects.get(slug="draft-iesg"), slug="watching" + ).update(used=True) + + +class Migration(migrations.Migration): + + dependencies = [ + ("doc", "0023_bofreqspamstate"), + ] + + operations = [migrations.RunPython(forward, reverse)] diff --git a/ietf/doc/migrations/0025_storedobject_storedobject_unique_name_per_store.py b/ietf/doc/migrations/0025_storedobject_storedobject_unique_name_per_store.py new file mode 100644 index 0000000000..e948ca3011 --- /dev/null +++ b/ietf/doc/migrations/0025_storedobject_storedobject_unique_name_per_store.py @@ -0,0 +1,66 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("doc", "0024_remove_ad_is_watching_states"), + ] + + operations = [ + migrations.CreateModel( + name="StoredObject", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("store", models.CharField(max_length=256)), + ("name", models.CharField(max_length=1024)), + ("sha384", models.CharField(max_length=96)), + ("len", models.PositiveBigIntegerField()), + ( + "store_created", + models.DateTimeField( + help_text="The instant the object ws first placed in the store" + ), + ), + ( + "created", + models.DateTimeField( + help_text="Instant object became known. May not be the same as the storage's created value for the instance. It will hold ctime for objects imported from older disk storage" + ), + ), + ( + "modified", + models.DateTimeField( + help_text="Last instant object was modified. May not be the same as the storage's modified value for the instance. It will hold mtime for objects imported from older disk storage unless they've actually been overwritten more recently" + ), + ), + ("doc_name", models.CharField(blank=True, max_length=255, null=True)), + ("doc_rev", models.CharField(blank=True, max_length=16, null=True)), + ("deleted", models.DateTimeField(null=True)), + ], + options={ + "indexes": [ + models.Index( + fields=["doc_name", "doc_rev"], + name="doc_storedo_doc_nam_d04465_idx", + ) + ], + }, + ), + migrations.AddConstraint( + model_name="storedobject", + constraint=models.UniqueConstraint( + fields=("store", "name"), name="unique_name_per_store" + ), + ), + ] diff --git a/ietf/doc/migrations/0026_change_wg_state_descriptions.py b/ietf/doc/migrations/0026_change_wg_state_descriptions.py new file mode 100644 index 0000000000..b02b12c97e --- /dev/null +++ b/ietf/doc/migrations/0026_change_wg_state_descriptions.py @@ -0,0 +1,117 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +from django.db import migrations + +def forward(apps, schema_editor): + State = apps.get_model("doc","State") + for name, desc in [ + ("WG Document","The document has been adopted by the Working Group (WG) and is under development. A document can only be adopted by one WG at a time. However, a document may be transferred between WGs."), + ("Parked WG Document","The Working Group (WG) document is in a temporary state where it will not be actively developed. The reason for the pause is explained via a datatracker comments section."), + ("Dead WG Document","The Working Group (WG) document has been abandoned by the WG. No further development is planned in this WG. A decision to resume work on this document and move it out of this state is possible."), + ("In WG Last Call","The Working Group (WG) document is currently subject to an active WG Last Call (WGLC) review per Section 7.4 of RFC2418."), + ("Waiting for Implementation","The progression of this Working Group (WG) document towards publication is paused as it awaits implementation. The process governing the approach to implementations is WG-specific."), + ("Held by WG","Held by Working Group (WG) chairs for administrative reasons. See document history for details."), + ("Waiting for WG Chair Go-Ahead","The Working Group (WG) document has completed Working Group Last Call (WGLC), but the WG chair(s) are not yet ready to call consensus on the document. The reasons for this may include comments from the WGLC need to be responded to, or a revision to the document is needed"), + ("WG Consensus: Waiting for Write-Up","The Working Group (WG) document has consensus to proceed to publication. However, the document is waiting for a document shepherd write-up per RFC4858."), + ("Submitted to IESG for Publication","The Working Group (WG) document has left the WG and been submitted to the Internet Engineering Steering Group (IESG) for evaluation and publication. See the “IESG State” or “RFC Editor State” for further details on the state of the document."), + ("Candidate for WG Adoption","The individual submission document has been marked by the Working Group (WG) chairs as a candidate for adoption by the WG, but no adoption call has been started."), + ("Call For Adoption By WG Issued","A call for adoption of the individual submission document has been issued by the Working Group (WG) chairs. This call is still running but the WG has not yet reached consensus for adoption."), + ("Adopted by a WG","The individual submission document has been adopted by the Working Group (WG), but a WG document replacing this document with the typical naming convention of 'draft- ietf-wgname-topic-nn' has not yet been submitted."), + ("Adopted for WG Info Only","The document is adopted by the Working Group (WG) for its internal use. The WG has decided that it will not pursue publication of it as an RFC."), + ]: + State.objects.filter(name=name).update(desc=desc) + +def reverse(apps, schema_editor): + State = apps.get_model("doc","State") + for name, desc in [ + ("WG Document","""4.2.4. WG Document + + The "WG Document" state describes an I-D that has been adopted by an IETF WG and is being actively developed. + + A WG Chair may transition an I-D into the "WG Document" state at any time as long as the I-D is not being considered or developed in any other WG. + + Alternatively, WG Chairs may rely upon new functionality to be added to the Datatracker to automatically move version-00 drafts into the "WG Document" state as described in Section 4.1. + + Under normal conditions, it should not be possible for an I-D to be in the "WG Document" state in more than one WG at a time. This said, I-Ds may be transferred from one WG to another with the consent of the WG Chairs and the responsible ADs."""), + ("Parked WG Document","""4.2.5. Parked WG Document + + A "Parked WG Document" is an I-D that has lost its author or editor, is waiting for another document to be written or for a review to be completed, or cannot be progressed by the working group for some other reason. + + Some of the annotation tags described in Section 4.3 may be used in conjunction with this state to indicate why an I-D has been parked, and/or what may need to happen for the I-D to be un-parked. + + Parking a WG draft will not prevent it from expiring; however, this state can be used to indicate why the I-D has stopped progressing in the WG. + + A "Parked WG Document" that is not expired may be transferred from one WG to another with the consent of the WG Chairs and the responsible ADs."""), + ("Dead WG Document","""4.2.6. Dead WG Document + + A "Dead WG Document" is an I-D that has been abandoned. Note that 'Dead' is not always a final state for a WG I-D. If consensus is subsequently achieved, a "Dead WG Document" may be resurrected. A "Dead WG Document" that is not resurrected will eventually expire. + + Note that an I-D that is declared to be "Dead" in one WG and that is not expired may be transferred to a non-dead state in another WG with the consent of the WG Chairs and the responsible ADs."""), + ("In WG Last Call","""4.2.7. In WG Last Call + + A document "In WG Last Call" is an I-D for which a WG Last Call (WGLC) has been issued and is in progress. + + Note that conducting a WGLC is an optional part of the IETF WG process, per Section 7.4 of RFC 2418 [RFC2418]. + + If a WG Chair decides to conduct a WGLC on an I-D, the "In WG Last Call" state can be used to track the progress of the WGLC. The Chair may configure the Datatracker to send a WGLC message to one or more mailing lists when the Chair moves the I-D into this state. The WG Chair may also be able to select a different set of mailing lists for a different document undergoing a WGLC; some documents may deserve coordination with other WGs. + + A WG I-D in this state should remain "In WG Last Call" until the WG Chair moves it to another state. The WG Chair may configure the Datatracker to send an e-mail after a specified period of time to remind or 'nudge' the Chair to conclude the WGLC and to determine the next state for the document. + + It is possible for one WGLC to lead into another WGLC for the same document. For example, an I-D that completed a WGLC as an "Informational" document may need another WGLC if a decision is taken to convert the I-D into a Standards Track document."""), + ("Waiting for Implementation","""In some areas, it can be desirable to wait for multiple interoperable implementations before progressing a draft to be an RFC, and in some WGs this is required. This state should be entered after WG Last Call has completed."""), + ("Held by WG","""Held by WG, see document history for details."""), + ("Waiting for WG Chair Go-Ahead","""4.2.8. Waiting for WG Chair Go-Ahead + + A WG Chair may wish to place an I-D that receives a lot of comments during a WGLC into the "Waiting for WG Chair Go-Ahead" state. This state describes an I-D that has undergone a WGLC; however, the Chair is not yet ready to call consensus on the document. + + If comments from the WGLC need to be responded to, or a revision to the I-D is needed, the Chair may place an I-D into this state until all of the WGLC comments are adequately addressed and the (possibly revised) document is in the I-D repository."""), + ("WG Consensus: Waiting for Write-Up","""4.2.9. WG Consensus: Waiting for Writeup + + A document in the "WG Consensus: Waiting for Writeup" state has essentially completed its development within the working group, and is nearly ready to be sent to the IESG for publication. The last thing to be done is the preparation of a protocol writeup by a Document Shepherd. The IESG requires that a document shepherd writeup be completed before publication of the I-D is requested. The IETF document shepherding process and the role of a WG Document Shepherd is described in RFC 4858 [RFC4858] + + A WG Chair may call consensus on an I-D without a formal WGLC and transition an I-D that was in the "WG Document" state directly into this state. + + The name of this state includes the words "Waiting for Writeup" because a good document shepherd writeup takes time to prepare."""), + ("Submitted to IESG for Publication","""4.2.10. Submitted to IESG for Publication + + This state describes a WG document that has been submitted to the IESG for publication and that has not been sent back to the working group for revision. + + An I-D in this state may be under review by the IESG, it may have been approved and be in the RFC Editor's queue, or it may have been published as an RFC. Other possibilities exist too. The document may be "Dead" (in the IESG state machine) or in a "Do Not Publish" state."""), + ("Candidate for WG Adoption","""The document has been marked as a candidate for WG adoption by the WG Chair. This state can be used before a call for adoption is issued (and the document is put in the "Call For Adoption By WG Issued" state), to indicate that the document is in the queue for a call for adoption, even if none has been issued yet."""), + ("Call For Adoption By WG Issued","""4.2.1. Call for Adoption by WG Issued + + The "Call for Adoption by WG Issued" state should be used to indicate when an I-D is being considered for adoption by an IETF WG. An I-D that is in this state is actively being considered for adoption and has not yet achieved consensus, preference, or selection in the WG. + + This state may be used to describe an I-D that someone has asked a WG to consider for adoption, if the WG Chair has agreed with the request. This state may also be used to identify an I-D that a WG Chair asked an author to write specifically for consideration as a candidate WG item [WGDTSPEC], and/or an I-D that is listed as a 'candidate draft' in the WG's charter. + + Under normal conditions, it should not be possible for an I-D to be in the "Call for Adoption by WG Issued" state in more than one working group at the same time. This said, it is not uncommon for authors to "shop" their I-Ds to more than one WG at a time, with the hope of getting their documents adopted somewhere. + + After this state is implemented in the Datatracker, an I-D that is in the "Call for Adoption by WG Issued" state will not be able to be "shopped" to any other WG without the consent of the WG Chairs and the responsible ADs impacted by the shopping. + + Note that Figure 1 includes an arc leading from this state to outside of the WG state machine. This illustrates that some I-Ds that are considered do not get adopted as WG drafts. An I-D that is not adopted as a WG draft will transition out of the WG state machine and revert back to having no stream-specific state; however, the status change history log of the I-D will record that the I-D was previously in the "Call for Adoption by WG Issued" state."""), + ("Adopted by a WG","""4.2.2. Adopted by a WG + + The "Adopted by a WG" state describes an individual submission I-D that an IETF WG has agreed to adopt as one of its WG drafts. + + WG Chairs who use this state will be able to clearly indicate when their WGs adopt individual submission I-Ds. This will facilitate the Datatracker's ability to correctly capture "Replaces" information for WG drafts and correct "Replaced by" information for individual submission I-Ds that have been replaced by WG drafts. + + This state is needed because the Datatracker uses the filename of an I-D as a key to search its database for status information about the I-D, and because the filename of a WG I-D is supposed to be different from the filename of an individual submission I-D. The filename of an individual submission I-D will typically be formatted as 'draft-author-wgname-topic-nn'. + + The filename of a WG document is supposed to be formatted as 'draft- ietf-wgname-topic-nn'. + + An individual I-D that is adopted by a WG may take weeks or months to be resubmitted by the author as a new (version-00) WG draft. If the "Adopted by a WG" state is not used, the Datatracker has no way to determine that an I-D has been adopted until a new version of the I-D is submitted to the WG by the author and until the I-D is approved for posting by a WG Chair."""), + ("Adopted for WG Info Only","""4.2.3. Adopted for WG Info Only + + The "Adopted for WG Info Only" state describes a document that contains useful information for the WG that adopted it, but the document is not intended to be published as an RFC. The WG will not actively develop the contents of the I-D or progress it for publication as an RFC. The only purpose of the I-D is to provide information for internal use by the WG."""), + ]: + State.objects.filter(name=name).update(desc=desc) + +class Migration(migrations.Migration): + + dependencies = [ + ("doc", "0025_storedobject_storedobject_unique_name_per_store"), + ] + + operations = [ + migrations.RunPython(forward, reverse) + ] diff --git a/ietf/doc/migrations/0027_alter_dochistory_title_alter_document_title.py b/ietf/doc/migrations/0027_alter_dochistory_title_alter_document_title.py new file mode 100644 index 0000000000..e0d8560e6f --- /dev/null +++ b/ietf/doc/migrations/0027_alter_dochistory_title_alter_document_title.py @@ -0,0 +1,41 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +import django.core.validators +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("doc", "0026_change_wg_state_descriptions"), + ] + + operations = [ + migrations.AlterField( + model_name="dochistory", + name="title", + field=models.CharField( + max_length=255, + validators=[ + django.core.validators.ProhibitNullCharactersValidator, # type:ignore + django.core.validators.RegexValidator( + message="Please enter a string without control characters.", + regex="^[^\x01-\x1f]*$", + ), + ], + ), + ), + migrations.AlterField( + model_name="document", + name="title", + field=models.CharField( + max_length=255, + validators=[ + django.core.validators.ProhibitNullCharactersValidator, # type:ignore + django.core.validators.RegexValidator( + message="Please enter a string without control characters.", + regex="^[^\x01-\x1f]*$", + ), + ], + ), + ), + ] diff --git a/ietf/doc/migrations/0028_rfcauthor.py b/ietf/doc/migrations/0028_rfcauthor.py new file mode 100644 index 0000000000..776dc22eb1 --- /dev/null +++ b/ietf/doc/migrations/0028_rfcauthor.py @@ -0,0 +1,84 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +from django.db import migrations, models +import django.db.models.deletion +import ietf.utils.models + + +class Migration(migrations.Migration): + dependencies = [ + ("person", "0005_alter_historicalperson_pronouns_selectable_and_more"), + ("doc", "0027_alter_dochistory_title_alter_document_title"), + ] + + operations = [ + migrations.CreateModel( + name="RfcAuthor", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("titlepage_name", models.CharField(max_length=128)), + ("is_editor", models.BooleanField(default=False)), + ( + "affiliation", + models.CharField( + blank=True, + help_text="Organization/company used by author for submission", + max_length=100, + ), + ), + ( + "country", + models.CharField( + blank=True, + help_text="Country used by author for submission", + max_length=255, + ), + ), + ("order", models.IntegerField(default=1)), + ( + "document", + ietf.utils.models.ForeignKey( + limit_choices_to={"type_id": "rfc"}, + on_delete=django.db.models.deletion.CASCADE, + to="doc.document", + ), + ), + ( + "email", + ietf.utils.models.ForeignKey( + blank=True, + help_text="Email address used by author for submission", + null=True, + on_delete=django.db.models.deletion.PROTECT, + to="person.email", + ), + ), + ( + "person", + ietf.utils.models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.PROTECT, + to="person.person", + ), + ), + ], + options={ + "ordering": ["document", "order"], + "indexes": [ + models.Index( + fields=["document", "order"], + name="doc_rfcauth_documen_6b5dc4_idx", + ) + ], + }, + ), + ] diff --git a/ietf/doc/migrations/0029_editedrfcauthorsdocevent.py b/ietf/doc/migrations/0029_editedrfcauthorsdocevent.py new file mode 100644 index 0000000000..60837c5cb2 --- /dev/null +++ b/ietf/doc/migrations/0029_editedrfcauthorsdocevent.py @@ -0,0 +1,30 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + dependencies = [ + ("doc", "0028_rfcauthor"), + ] + + operations = [ + migrations.CreateModel( + name="EditedRfcAuthorsDocEvent", + fields=[ + ( + "docevent_ptr", + models.OneToOneField( + auto_created=True, + on_delete=django.db.models.deletion.CASCADE, + parent_link=True, + primary_key=True, + serialize=False, + to="doc.docevent", + ), + ), + ], + bases=("doc.docevent",), + ), + ] diff --git a/ietf/doc/migrations/0030_alter_dochistory_title_alter_document_title.py b/ietf/doc/migrations/0030_alter_dochistory_title_alter_document_title.py new file mode 100644 index 0000000000..9ee858b2e8 --- /dev/null +++ b/ietf/doc/migrations/0030_alter_dochistory_title_alter_document_title.py @@ -0,0 +1,41 @@ +# Copyright The IETF Trust 2026, All Rights Reserved + +import django.core.validators +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("doc", "0029_editedrfcauthorsdocevent"), + ] + + operations = [ + migrations.AlterField( + model_name="dochistory", + name="title", + field=models.CharField( + max_length=255, + validators=[ + django.core.validators.ProhibitNullCharactersValidator(), + django.core.validators.RegexValidator( + message="Please enter a string without control characters.", + regex="^[^\x01-\x1f]*$", + ), + ], + ), + ), + migrations.AlterField( + model_name="document", + name="title", + field=models.CharField( + max_length=255, + validators=[ + django.core.validators.ProhibitNullCharactersValidator(), + django.core.validators.RegexValidator( + message="Please enter a string without control characters.", + regex="^[^\x01-\x1f]*$", + ), + ], + ), + ), + ] diff --git a/ietf/doc/migrations/0031_change_draft_stream_ietf_state_descriptions.py b/ietf/doc/migrations/0031_change_draft_stream_ietf_state_descriptions.py new file mode 100644 index 0000000000..c664126da3 --- /dev/null +++ b/ietf/doc/migrations/0031_change_draft_stream_ietf_state_descriptions.py @@ -0,0 +1,57 @@ +# Copyright The IETF Trust 2026, All Rights Reserved + +from django.db import migrations + + +def forward(apps, schema_editor): + State = apps.get_model("doc", "State") + for name, desc in [ + ( + "Adopted by a WG", + "The individual submission document has been adopted by the Working Group (WG), but some administrative matter still needs to be completed (e.g., a WG document replacing this document with the typical naming convention of 'draft-ietf-wgname-topic-nn' has not yet been submitted).", + ), + ( + "WG Document", + "The document has been identified as a Working Group (WG) document and is under development per Section 7.2 of RFC2418.", + ), + ( + "Waiting for WG Chair Go-Ahead", + "The Working Group (WG) document has completed Working Group Last Call (WGLC), but the WG chairs are not yet ready to call consensus on the document. The reasons for this may include comments from the WGLC need to be responded to, or a revision to the document is needed.", + ), + ( + "Submitted to IESG for Publication", + "The Working Group (WG) document has been submitted to the Internet Engineering Steering Group (IESG) for evaluation and publication per Section 7.4 of RFC2418. See the “IESG State” or “RFC Editor State” for further details on the state of the document.", + ), + ]: + State.objects.filter(name=name).update(desc=desc, type="draft-stream-ietf") + + +def reverse(apps, schema_editor): + State = apps.get_model("doc", "State") + for name, desc in [ + ( + "Adopted by a WG", + "The individual submission document has been adopted by the Working Group (WG), but a WG document replacing this document with the typical naming convention of 'draft- ietf-wgname-topic-nn' has not yet been submitted.", + ), + ( + "WG Document", + "The document has been adopted by the Working Group (WG) and is under development. A document can only be adopted by one WG at a time. However, a document may be transferred between WGs.", + ), + ( + "Waiting for WG Chair Go-Ahead", + "The Working Group (WG) document has completed Working Group Last Call (WGLC), but the WG chair(s) are not yet ready to call consensus on the document. The reasons for this may include comments from the WGLC need to be responded to, or a revision to the document is needed", + ), + ( + "Submitted to IESG for Publication", + "The Working Group (WG) document has left the WG and been submitted to the Internet Engineering Steering Group (IESG) for evaluation and publication. See the “IESG State” or “RFC Editor State” for further details on the state of the document.", + ), + ]: + State.objects.filter(name=name).update(desc=desc, type="draft-stream-ietf") + + +class Migration(migrations.Migration): + dependencies = [ + ("doc", "0030_alter_dochistory_title_alter_document_title"), + ] + + operations = [migrations.RunPython(forward, reverse)] diff --git a/ietf/doc/migrations/0032_remove_rfcauthor_email.py b/ietf/doc/migrations/0032_remove_rfcauthor_email.py new file mode 100644 index 0000000000..a0e147da59 --- /dev/null +++ b/ietf/doc/migrations/0032_remove_rfcauthor_email.py @@ -0,0 +1,16 @@ +# Copyright The IETF Trust 2026, All Rights Reserved + +from django.db import migrations + + +class Migration(migrations.Migration): + dependencies = [ + ("doc", "0031_change_draft_stream_ietf_state_descriptions"), + ] + + operations = [ + migrations.RemoveField( + model_name="rfcauthor", + name="email", + ), + ] diff --git a/ietf/doc/migrations/0033_dochistory_keywords_document_keywords.py b/ietf/doc/migrations/0033_dochistory_keywords_document_keywords.py new file mode 100644 index 0000000000..5e2513e15a --- /dev/null +++ b/ietf/doc/migrations/0033_dochistory_keywords_document_keywords.py @@ -0,0 +1,31 @@ +# Copyright The IETF Trust 2026, All Rights Reserved + +from django.db import migrations, models +import ietf.doc.models + + +class Migration(migrations.Migration): + dependencies = [ + ("doc", "0032_remove_rfcauthor_email"), + ] + + operations = [ + migrations.AddField( + model_name="dochistory", + name="keywords", + field=models.JSONField( + default=list, + max_length=1000, + validators=[ietf.doc.models.validate_doc_keywords], + ), + ), + migrations.AddField( + model_name="document", + name="keywords", + field=models.JSONField( + default=list, + max_length=1000, + validators=[ietf.doc.models.validate_doc_keywords], + ), + ), + ] diff --git a/ietf/doc/models.py b/ietf/doc/models.py index 30d95fbf50..cc79b73831 100644 --- a/ietf/doc/models.py +++ b/ietf/doc/models.py @@ -1,25 +1,34 @@ -# Copyright The IETF Trust 2010-2023, All Rights Reserved +# Copyright The IETF Trust 2010-2026, All Rights Reserved # -*- coding: utf-8 -*- +from collections import namedtuple import datetime import logging -import io import os import django.db import rfc2html +from io import BufferedReader from pathlib import Path + +from django.core.exceptions import ValidationError +from django.db.models import Q from lxml import etree -from typing import Optional, TYPE_CHECKING +from typing import Optional, Protocol, TYPE_CHECKING, Union from weasyprint import HTML as wpHTML from weasyprint.text.fonts import FontConfiguration from django.db import models from django.core import checks +from django.core.files.base import File from django.core.cache import caches -from django.core.validators import URLValidator, RegexValidator +from django.core.validators import ( + URLValidator, + RegexValidator, + ProhibitNullCharactersValidator, +) from django.urls import reverse as urlreverse from django.contrib.contenttypes.models import ContentType from django.conf import settings @@ -31,14 +40,19 @@ import debug # pyflakes:ignore from ietf.group.models import Group +from ietf.doc.storage_utils import ( + store_str as utils_store_str, + store_bytes as utils_store_bytes, + store_file as utils_store_file +) from ietf.name.models import ( DocTypeName, DocTagName, StreamName, IntendedStdLevelName, StdLevelName, DocRelationshipName, DocReminderTypeName, BallotPositionName, ReviewRequestStateName, ReviewAssignmentStateName, FormalLanguageName, DocUrlTagName, ExtResourceName) from ietf.person.models import Email, Person from ietf.person.utils import get_active_balloters from ietf.utils import log -from ietf.utils.admin import admin_link from ietf.utils.decorators import memoize +from ietf.utils.text import decode_document_content from ietf.utils.validators import validate_no_control_chars from ietf.utils.mail import formataddr from ietf.utils.models import ForeignKey @@ -84,7 +98,7 @@ class State(models.Model): desc = models.TextField(blank=True) order = models.IntegerField(default=0) - next_states = models.ManyToManyField('State', related_name="previous_states", blank=True) + next_states = models.ManyToManyField('doc.State', related_name="previous_states", blank=True) def __str__(self): return self.name @@ -97,12 +111,27 @@ class Meta: IESG_STATCHG_CONFLREV_ACTIVE_STATES = ("iesgeval", "defer") IESG_SUBSTATE_TAGS = ('ad-f-up', 'need-rev', 'extpty') + +def validate_doc_keywords(value): + if ( + not isinstance(value, list | tuple | set) + or not all(isinstance(elt, str) for elt in value) + ): + raise ValidationError("Value must be an array of strings") + + class DocumentInfo(models.Model): """Any kind of document. Draft, RFC, Charter, IPR Statement, Liaison Statement""" time = models.DateTimeField(default=timezone.now) # should probably have auto_now=True type = ForeignKey(DocTypeName, blank=True, null=True) # Draft, Agenda, Minutes, Charter, Discuss, Guideline, Email, Review, Issue, Wiki, External ... - title = models.CharField(max_length=255, validators=[validate_no_control_chars, ]) + title = models.CharField( + max_length=255, + validators=[ + ProhibitNullCharactersValidator(), + validate_no_control_chars, + ], + ) states = models.ManyToManyField(State, blank=True) # plain state (Active/Expired/...), IESG state, stream state tags = models.ManyToManyField(DocTagName, blank=True) # Revised ID Needed, ExternalParty, AD Followup, ... @@ -123,7 +152,18 @@ class DocumentInfo(models.Model): external_url = models.URLField(blank=True) uploaded_filename = models.TextField(blank=True) note = models.TextField(blank=True) - internal_comments = models.TextField(blank=True) + rfc_number = models.PositiveIntegerField(blank=True, null=True) # only valid for type="rfc" + keywords = models.JSONField( + default=list, + max_length=1000, + validators=[validate_doc_keywords], + ) + + @property + def doi(self) -> str | None: + if self.type_id == "rfc" and self.rfc_number is not None: + return f"{settings.IETF_DOI_PREFIX}/RFC{self.rfc_number:04d}" + return None def file_extension(self): if not hasattr(self, '_cached_extension'): @@ -136,20 +176,20 @@ def file_extension(self): def get_file_path(self): if not hasattr(self, '_cached_file_path'): - if self.type_id == "draft": + if self.type_id == "rfc": + self._cached_file_path = settings.RFC_PATH + elif self.type_id == "draft": if self.is_dochistory(): self._cached_file_path = settings.INTERNET_ALL_DRAFTS_ARCHIVE_DIR else: - if self.get_state_slug() == "rfc": - self._cached_file_path = settings.RFC_PATH + # This could be simplified since anything in INTERNET_DRAFT_PATH is also already in INTERNET_ALL_DRAFTS_ARCHIVE_DIR + draft_state = self.get_state('draft') + if draft_state and draft_state.slug == 'active': + self._cached_file_path = settings.INTERNET_DRAFT_PATH else: - draft_state = self.get_state('draft') - if draft_state and draft_state.slug == 'active': - self._cached_file_path = settings.INTERNET_DRAFT_PATH - else: - self._cached_file_path = settings.INTERNET_ALL_DRAFTS_ARCHIVE_DIR + self._cached_file_path = settings.INTERNET_ALL_DRAFTS_ARCHIVE_DIR elif self.meeting_related() and self.type_id in ( - "agenda", "minutes", "slides", "bluesheets", "procmaterials", "chatlog", "polls" + "agenda", "minutes", "narrativeminutes", "slides", "bluesheets", "procmaterials", "chatlog", "polls" ): meeting = self.get_related_meeting() if meeting is not None: @@ -172,17 +212,16 @@ def get_base_name(self): if not hasattr(self, '_cached_base_name'): if self.uploaded_filename: self._cached_base_name = self.uploaded_filename + elif self.type_id == 'rfc': + self._cached_base_name = "%s.txt" % self.name elif self.type_id == 'draft': if self.is_dochistory(): self._cached_base_name = "%s-%s.txt" % (self.doc.name, self.rev) else: - if self.get_state_slug() == 'rfc': - self._cached_base_name = "%s.txt" % self.canonical_name() - else: - self._cached_base_name = "%s-%s.txt" % (self.name, self.rev) + self._cached_base_name = "%s-%s.txt" % (self.name, self.rev) elif self.type_id in ["slides", "agenda", "minutes", "bluesheets", "procmaterials", ] and self.meeting_related(): ext = 'pdf' if self.type_id == 'procmaterials' else 'txt' - self._cached_base_name = f'{self.canonical_name()}-{self.rev}.{ext}' + self._cached_base_name = f'{self.name}-{self.rev}.{ext}' elif self.type_id == 'review': # TODO: This will be wrong if a review is updated on the same day it was created (or updated more than once on the same day) self._cached_base_name = "%s.txt" % self.name @@ -190,9 +229,9 @@ def get_base_name(self): self._cached_base_name = "%s-%s.md" % (self.name, self.rev) else: if self.rev: - self._cached_base_name = "%s-%s.txt" % (self.canonical_name(), self.rev) + self._cached_base_name = "%s-%s.txt" % (self.name, self.rev) else: - self._cached_base_name = "%s.txt" % (self.canonical_name(), ) + self._cached_base_name = "%s.txt" % (self.name, ) return self._cached_base_name def get_file_name(self): @@ -200,27 +239,38 @@ def get_file_name(self): self._cached_file_name = os.path.join(self.get_file_path(), self.get_base_name()) return self._cached_file_name - def revisions(self): + + def revisions_by_dochistory(self): revisions = [] - doc = self.doc if isinstance(self, DocHistory) else self - for e in doc.docevent_set.filter(type='new_revision').distinct(): - if e.rev and not e.rev in revisions: - revisions.append(e.rev) - if not doc.rev in revisions: - revisions.append(doc.rev) - revisions.sort() + if self.type_id != "rfc": + for h in self.history_set.order_by("time", "id"): + if h.rev and not h.rev in revisions: + revisions.append(h.rev) + if not self.rev in revisions: + revisions.append(self.rev) return revisions + def revisions_by_newrevisionevent(self): + revisions = [] + if self.type_id != "rfc": + doc = self.doc if isinstance(self, DocHistory) else self + for e in doc.docevent_set.filter(type='new_revision').distinct(): + if e.rev and not e.rev in revisions: + revisions.append(e.rev) + if not doc.rev in revisions: + revisions.append(doc.rev) + revisions.sort() + return revisions def get_href(self, meeting=None): - return self._get_ref(meeting=meeting,meeting_doc_refs=settings.MEETING_DOC_HREFS) + return self._get_ref(meeting=meeting, versioned=True) def get_versionless_href(self, meeting=None): - return self._get_ref(meeting=meeting,meeting_doc_refs=settings.MEETING_DOC_GREFS) + return self._get_ref(meeting=meeting, versioned=False) - def _get_ref(self, meeting=None, meeting_doc_refs=settings.MEETING_DOC_HREFS): + def _get_ref(self, meeting=None, versioned=True): """ Returns an url to the document text. This differs from .get_absolute_url(), which returns an url to the datatracker page for the document. @@ -229,12 +279,16 @@ def _get_ref(self, meeting=None, meeting_doc_refs=settings.MEETING_DOC_HREFS): # the earlier resolution order, but there's at the moment one single # instance which matches this (with correct results), so we won't # break things all over the place. - if not hasattr(self, '_cached_href'): + cache_attr = "_cached_href" if versioned else "_cached_versionless_href" + if not hasattr(self, cache_attr): validator = URLValidator() if self.external_url and self.external_url.split(':')[0] in validator.schemes: validator(self.external_url) return self.external_url + meeting_doc_refs = ( + settings.MEETING_DOC_HREFS if versioned else settings.MEETING_DOC_GREFS + ) if self.type_id in settings.DOC_HREFS and self.type_id in meeting_doc_refs: if self.meeting_related(): self.is_meeting_related = True @@ -244,7 +298,7 @@ def _get_ref(self, meeting=None, meeting_doc_refs=settings.MEETING_DOC_HREFS): format = settings.DOC_HREFS[self.type_id] elif self.type_id in settings.DOC_HREFS: self.is_meeting_related = False - if self.is_rfc(): + if self.type_id == "rfc": format = settings.DOC_HREFS['rfc'] else: format = settings.DOC_HREFS[self.type_id] @@ -271,10 +325,23 @@ def _get_ref(self, meeting=None, meeting_doc_refs=settings.MEETING_DOC_HREFS): info = dict(doc=self) href = format.format(**info) + + # For slides that are not meeting-related, we need to know the file extension. + # Assume we have access to the same files as settings.DOC_HREFS["slides"] and + # see what extension is available + if self.type_id == "slides" and not self.meeting_related() and not href.endswith("/"): + filepath = Path(self.get_file_path()) / self.get_base_name() # start with this + if not filepath.exists(): + # Look for other extensions - grab the first one, sorted for stability + for existing in sorted(filepath.parent.glob(f"{filepath.stem}.*")): + filepath = filepath.with_suffix(existing.suffix) + break + href += filepath.suffix # tack on the extension + if href.startswith('/'): href = settings.IDTRACKER_BASE_URL + href - self._cached_href = href - return self._cached_href + setattr(self, cache_attr, href) + return getattr(self, cache_attr) def set_state(self, state): """Switch state type implicit in state to state. This just @@ -334,7 +401,9 @@ def friendly_state(self): if not state: return "Unknown state" - if self.type_id == 'draft': + if self.type_id == "rfc": + return f"RFC {self.rfc_number} ({self.std_level})" + elif self.type_id == 'draft': iesg_state = self.get_state("draft-iesg") iesg_state_summary = None if iesg_state: @@ -343,13 +412,15 @@ def friendly_state(self): iesg_state_summary = iesg_state.name if iesg_substate: iesg_state_summary = iesg_state_summary + "::"+"::".join(tag.name for tag in iesg_substate) - - if state.slug == "rfc": - return "RFC %s (%s)" % (self.rfc_number(), self.std_level) + + rfc = self.became_rfc() + if rfc: + return f"Became RFC {rfc.rfc_number} ({rfc.std_level})" + elif state.slug == "repl": rs = self.related_that("replaces") if rs: - return mark_safe("Replaced by " + ", ".join("%s" % (urlreverse('ietf.doc.views_doc.document_main', kwargs=dict(name=alias.document.name)), alias.document) for alias in rs)) + return mark_safe("Replaced by " + ", ".join("%s" % (urlreverse('ietf.doc.views_doc.document_main', kwargs=dict(name=related.name)), related) for related in rs)) else: return "Replaced" elif state.slug == "active": @@ -375,30 +446,56 @@ def friendly_state(self): else: return state.name - def is_rfc(self): - if not hasattr(self, '_cached_is_rfc'): - self._cached_is_rfc = self.pk and self.type_id == 'draft' and self.states.filter(type='draft',slug='rfc').exists() - return self._cached_is_rfc - - def rfc_number(self): - if not hasattr(self, '_cached_rfc_number'): - self._cached_rfc_number = None - if self.is_rfc(): - n = self.canonical_name() - if n.startswith("rfc"): - self._cached_rfc_number = n[3:] + def author_names(self): + """Author names as a list of strings""" + names = [] + if self.type_id == "rfc" and self.rfcauthor_set.exists(): + for author in self.rfcauthor_set.select_related("person"): + if author.person: + names.append(author.person.name) else: - if isinstance(self,Document): - logger.error("Document self.is_rfc() is True but self.canonical_name() is %s" % n) - return self._cached_rfc_number + # titlepage_name cannot be blank + names.append(author.titlepage_name) + else: + names = [ + author.person.name + for author in self.documentauthor_set.select_related("person") + ] + return names + + def author_persons_or_names(self): + """Authors as a list of named tuples with person and/or titlepage_name""" + Author = namedtuple("Author", "person titlepage_name") + persons_or_names = [] + if self.type_id=="rfc" and self.rfcauthor_set.exists(): + for author in self.rfcauthor_set.select_related("person"): + persons_or_names.append(Author(person=author.person, titlepage_name=author.titlepage_name)) + else: + for author in self.documentauthor_set.select_related("person"): + persons_or_names.append(Author(person=author.person, titlepage_name="")) + return persons_or_names - @property - def rfcnum(self): - return self.rfc_number() + def author_persons(self): + """Authors as a list of Persons + + Omits any RfcAuthors with a null person field. + """ + if self.type_id == "rfc" and self.rfcauthor_set.exists(): + authors_qs = self.rfcauthor_set.filter(person__isnull=False) + else: + authors_qs = self.documentauthor_set.all() + return [a.person for a in authors_qs.select_related("person")] def author_list(self): + """List of author emails""" + if self.type_id == "rfc" and self.rfcauthor_set.exists(): + author_qs = self.rfcauthor_set.select_related("person").order_by("order") + else: + author_qs = self.documentauthor_set.select_related("email").order_by( + "order" + ) best_addresses = [] - for author in self.documentauthor_set.all(): + for author in author_qs: if author.email: if author.email.active or not author.email.person: best_addresses.append(author.email.address) @@ -406,9 +503,6 @@ def author_list(self): best_addresses.append(author.email.person.email_address()) return ", ".join(best_addresses) - def authors(self): - return [ a.person for a in self.documentauthor_set.all() ] - # This, and several other ballot related functions here, assume that there is only one active ballot for a document at any point in time. # If that assumption is violated, they will only expose the most recently created ballot def ballot_open(self, ballot_type_slug): @@ -433,7 +527,7 @@ def has_rfc_editor_note(self): return e != None and (e.text != "") def meeting_related(self): - if self.type_id in ("agenda","minutes","bluesheets","slides","recording","procmaterials","chatlog","polls"): + if self.type_id in ("agenda","minutes", "narrativeminutes", "bluesheets","slides","recording","procmaterials","chatlog","polls"): return self.type_id != "slides" or self.get_state_slug('reuse_policy')=='single' return False @@ -468,9 +562,9 @@ def relations_that(self, relationship): if not isinstance(relationship, tuple): raise TypeError("Expected a string or tuple, received %s" % type(relationship)) if isinstance(self, Document): - return RelatedDocument.objects.filter(target__docs=self, relationship__in=relationship).select_related('source') + return RelatedDocument.objects.filter(target=self, relationship__in=relationship).select_related('source') elif isinstance(self, DocHistory): - return RelatedDocHistory.objects.filter(target__docs=self.doc, relationship__in=relationship).select_related('source') + return RelatedDocHistory.objects.filter(target=self.doc, relationship__in=relationship).select_related('source') else: raise TypeError("Expected method called on Document or DocHistory") @@ -504,15 +598,14 @@ def all_relations_that_doc(self, relationship, related=None): for r in rels: if not r in related: related += ( r, ) - for doc in r.target.docs.all(): - related = doc.all_relations_that_doc(relationship, related) + related = r.target.all_relations_that_doc(relationship, related) return related def related_that(self, relationship): - return list(set([x.source.docalias.get(name=x.source.name) for x in self.relations_that(relationship)])) + return list(set([x.source for x in self.relations_that(relationship)])) def all_related_that(self, relationship, related=None): - return list(set([x.source.docalias.get(name=x.source.name) for x in self.all_relations_that(relationship)])) + return list(set([x.source for x in self.all_relations_that(relationship)])) def related_that_doc(self, relationship): return list(set([x.target for x in self.relations_that_doc(relationship)])) @@ -521,42 +614,43 @@ def all_related_that_doc(self, relationship, related=None): return list(set([x.target for x in self.all_relations_that_doc(relationship)])) def replaces(self): - return set([ d for r in self.related_that_doc("replaces") for d in r.docs.all() ]) - - def replaces_canonical_name(self): - s = set([ r.document for r in self.related_that_doc("replaces")]) - first = list(s)[0] if s else None - return None if first is None else first.filename_with_rev() + return self.related_that_doc("replaces") def replaced_by(self): return set([ r.document for r in self.related_that("replaces") ]) - def text(self): + def _text_path(self): path = self.get_file_name() root, ext = os.path.splitext(path) txtpath = root+'.txt' if ext != '.txt' and os.path.exists(txtpath): path = txtpath - try: - with io.open(path, 'rb') as file: - raw = file.read() - except IOError: + return path + + def text_exists(self): + path = Path(self._text_path()) + return path.exists() + + def text(self, size = -1): + path = Path(self._text_path()) + if not path.exists(): return None try: - text = raw.decode('utf-8') - except UnicodeDecodeError: - text = raw.decode('latin-1') - # - return text + with path.open('rb') as file: + raw = file.read(size) + except IOError as e: + log.log(f"Error reading text for {path}: {e}") + return None + return decode_document_content(raw) def text_or_error(self): return self.text() or "Error; cannot read '%s'"%self.get_base_name() def html_body(self, classes=""): - if self.get_state_slug() == "rfc": + if self.type_id == "rfc": try: html = Path( - os.path.join(settings.RFC_PATH, self.canonical_name() + ".html") + os.path.join(settings.RFC_PATH, self.name + ".html") ).read_text() except (IOError, UnicodeDecodeError): return None @@ -648,6 +742,9 @@ def pdfized(self): ) except AssertionError: pdf = None + except Exception as e: + log.log('weasyprint failed:'+str(e)) + raise if pdf: cache.set(cache_key, pdf, settings.PDFIZER_CACHE_TIME) return pdf @@ -656,54 +753,140 @@ def references(self): return self.relations_that_doc(('refnorm','refinfo','refunk','refold')) def referenced_by(self): - return self.relations_that(('refnorm','refinfo','refunk','refold')).filter(source__states__type__slug='draft',source__states__slug__in=['rfc','active']) - + return self.relations_that(("refnorm", "refinfo", "refunk", "refold")).filter( + models.Q( + source__type__slug="draft", + source__states__type__slug="draft", + source__states__slug="active", + ) + | models.Q(source__type__slug="rfc") + ).distinct() + def referenced_by_rfcs(self): - return self.relations_that(('refnorm','refinfo','refunk','refold')).filter(source__states__type__slug='draft',source__states__slug='rfc') + """Get refs to this doc from RFCs""" + return self.relations_that(("refnorm", "refinfo", "refunk", "refold")).filter( + source__type__slug="rfc" + ) + def became_rfc(self): + if not hasattr(self, "_cached_became_rfc"): + doc = self if isinstance(self, Document) else self.doc + self._cached_became_rfc = next(iter(doc.related_that_doc("became_rfc")), None) + return self._cached_became_rfc + + def came_from_draft(self): + if not hasattr(self, "_cached_came_from_draft"): + doc = self if isinstance(self, Document) else self.doc + self._cached_came_from_draft = next(iter(doc.related_that("became_rfc")), None) + return self._cached_came_from_draft + + def contains(self): + return self.related_that_doc("contains") + + def part_of(self): + return self.related_that("contains") + + def referenced_by_rfcs_as_rfc_or_draft(self): + """Get refs to this doc, or a draft/rfc it came from, from an RFC""" + refs_to = self.referenced_by_rfcs() + if self.type_id == "rfc" and self.came_from_draft(): + refs_to |= self.came_from_draft().referenced_by_rfcs() + return refs_to + + def sent_to_rfc_editor_event(self): + if self.stream_id == "ietf": + return self.docevent_set.filter(type="iesg_approved").order_by("-time").first() + elif self.stream_id in ["editorial", "iab", "irtf", "ise"]: + return self.docevent_set.filter(type="requested_publication").order_by("-time").first() + else: + return None class Meta: abstract = True + +class HasNameRevAndTypeIdProtocol(Protocol): + """Typing Protocol describing a class that has name, rev, and type_id properties""" + @property + def name(self) -> str: ... + @property + def rev(self) -> str: ... + @property + def type_id(self) -> str: ... + + +class StorableMixin: + """Mixin that adds storage helpers to a DocumentInfo subclass""" + def store_str( + self: HasNameRevAndTypeIdProtocol, + name: str, + content: str, + allow_overwrite: bool = False + ) -> None: + return utils_store_str(self.type_id, name, content, allow_overwrite, self.name, self.rev) + + def store_bytes( + self: HasNameRevAndTypeIdProtocol, + name: str, + content: bytes, + allow_overwrite: bool = False, + doc_name: Optional[str] = None, + doc_rev: Optional[str] = None + ) -> None: + return utils_store_bytes(self.type_id, name, content, allow_overwrite, self.name, self.rev) + + def store_file( + self: HasNameRevAndTypeIdProtocol, + name: str, + file: Union[File, BufferedReader], + allow_overwrite: bool = False, + doc_name: Optional[str] = None, + doc_rev: Optional[str] = None + ) -> None: + return utils_store_file(self.type_id, name, file, allow_overwrite, self.name, self.rev) + + STATUSCHANGE_RELATIONS = ('tops','tois','tohist','toinf','tobcp','toexp') class RelatedDocument(models.Model): source = ForeignKey('Document') - target = ForeignKey('DocAlias') + target = ForeignKey('Document', related_name='targets_related') relationship = ForeignKey(DocRelationshipName) + originaltargetaliasname = models.CharField(max_length=255, null=True, blank=True) def action(self): return self.relationship.name def __str__(self): return u"%s %s %s" % (self.source.name, self.relationship.name.lower(), self.target.name) def is_downref(self): - if self.source.type.slug != "draft" or self.relationship.slug not in [ + if self.source.type_id not in ["draft","rfc"] or self.relationship.slug not in [ "refnorm", "refold", "refunk", ]: return None - state = self.source.get_state() - if state and state.slug == "rfc": - source_lvl = self.source.std_level.slug if self.source.std_level else None - elif self.source.intended_std_level: - source_lvl = self.source.intended_std_level.slug + if self.source.type_id == "rfc": + source_lvl = self.source.std_level_id + elif self.source.type_id in ["bcp","std"]: + source_lvl = self.source.type_id else: - source_lvl = None + source_lvl = self.source.intended_std_level_id if source_lvl not in ["bcp", "ps", "ds", "std", "unkn"]: return None - if self.target.document.get_state().slug == "rfc": - if not self.target.document.std_level: - target_lvl = "unkn" + if self.target.type_id == 'rfc': + if not self.target.std_level: + target_lvl = 'unkn' else: - target_lvl = self.target.document.std_level.slug + target_lvl = self.target.std_level_id + elif self.target.type_id in ["bcp", "std"]: + target_lvl = self.target.type_id else: - if not self.target.document.intended_std_level: - target_lvl = "unkn" + if not self.target.intended_std_level: + target_lvl = 'unkn' else: - target_lvl = self.target.document.intended_std_level.slug + target_lvl = self.target.intended_std_level_id if self.relationship.slug not in ["refnorm", "refunk"]: return None @@ -712,7 +895,7 @@ def is_downref(self): return None pos_downref = ( - "Downref" if self.relationship.slug != "refunk" else "Possible Downref" + "Downref" if self.relationship_id != "refunk" else "Possible Downref" ) if source_lvl in ["bcp", "ps", "ds", "std"] and target_lvl in ["inf", "exp"]: @@ -734,12 +917,60 @@ def is_downref(self): def is_approved_downref(self): - if self.target.document.get_state().slug == 'rfc': - if RelatedDocument.objects.filter(relationship_id='downref-approval', target=self.target): + if self.target.type_id == 'rfc': + if RelatedDocument.objects.filter(relationship_id='downref-approval', target=self.target).exists(): return "Approved Downref" return False +class RfcAuthor(models.Model): + """Captures the authors of an RFC as represented on the RFC title page. + + This deviates from DocumentAuthor in that it does not get moved into the DocHistory + hierarchy as documents are saved. It will attempt to preserve email, country, and affiliation + from the DocumentAuthor objects associated with the draft leading to this RFC (which + may be wrong if the author moves or changes affiliation while the document is in the + queue). + + It does not, at this time, attempt to capture the authors from anything _but_ the title + page. The datatracker may know more about such authors based on information from the draft + leading to the RFC, and future work may take that into account. + + Once doc.rfcauthor_set.exists() for a doc of type `rfc`, doc.documentauthor_set should be + ignored. + """ + + document = ForeignKey( + "Document", + on_delete=models.CASCADE, + limit_choices_to={"type_id": "rfc"}, # only affects ModelForms (e.g., admin) + ) + titlepage_name = models.CharField(max_length=128, blank=False) + is_editor = models.BooleanField(default=False) + person = ForeignKey(Person, null=True, blank=True, on_delete=models.PROTECT) + affiliation = models.CharField(max_length=100, blank=True, help_text="Organization/company used by author for submission") + country = models.CharField(max_length=255, blank=True, help_text="Country used by author for submission") + order = models.IntegerField(default=1) + + def __str__(self): + return u"%s %s (%s)" % (self.document.name, self.person, self.order) + + class Meta: + ordering=["document", "order"] + indexes=[ + models.Index(fields=["document", "order"]) + ] + + @property + def email(self) -> Email | None: + return self.person.email() if self.person else None + + def format_for_titlepage(self): + if self.is_editor: + return f"{self.titlepage_name}, Ed." + return self.titlepage_name + + class DocumentAuthorInfo(models.Model): person = ForeignKey(Person) # email should only be null for some historic documents @@ -789,7 +1020,7 @@ class Meta: def role_for_doc(self): """Brief string description of this person's relationship to the doc""" roles = [] - if self.person in self.document.authors(): + if self.person in self.document.author_persons(): roles.append('Author') if self.person == self.document.ad: roles.append('Responsible AD') @@ -808,13 +1039,25 @@ def role_for_doc(self): roles.append('Action Holder') return ', '.join(roles) +# N.B., at least a couple dozen documents exist that do not satisfy this validator validate_docname = RegexValidator( r'^[-a-z0-9]+$', "Provide a valid document name consisting of lowercase letters, numbers and hyphens.", 'invalid' ) -class Document(DocumentInfo): + +SUBSERIES_DOC_TYPE_IDS = ("bcp", "fyi", "std") + + +class DocumentQuerySet(models.QuerySet): + def subseries_docs(self): + return self.filter(type_id__in=SUBSERIES_DOC_TYPE_IDS) + + +class Document(StorableMixin, DocumentInfo): + objects = DocumentQuerySet.as_manager() + name = models.CharField(max_length=255, validators=[validate_docname,], unique=True) # immutable action_holders = models.ManyToManyField(Person, through=DocumentActionHolder, blank=True) @@ -831,7 +1074,7 @@ def get_absolute_url(self): name = self.name url = None if self.type_id == "draft" and self.get_state_slug() == "rfc": - name = self.canonical_name() + name = self.name url = urlreverse('ietf.doc.views_doc.document_main', kwargs={ 'name': name }, urlconf="ietf.urls") elif self.type_id in ('slides','bluesheets','recording'): session = self.session_set.first() @@ -869,28 +1112,8 @@ def latest_event(self, *args, **filter_args): e = model.objects.filter(doc=self).filter(**filter_args).order_by('-time', '-id').first() return e - def canonical_name(self): - if not hasattr(self, '_canonical_name'): - name = self.name - if self.type_id == "draft" and self.get_state_slug() == "rfc": - a = self.docalias.filter(name__startswith="rfc").order_by('-name').first() - if a: - name = a.name - elif self.type_id == "charter": - from ietf.doc.utils_charter import charter_name_for_group # Imported locally to avoid circular imports - try: - name = charter_name_for_group(self.chartered_group) - except Group.DoesNotExist: - pass - self._canonical_name = name - return self._canonical_name - - - def canonical_docalias(self): - return self.docalias.get(name=self.name) - def display_name(self): - name = self.canonical_name() + name = self.name if name.startswith('rfc'): name = name.upper() return name @@ -940,6 +1163,22 @@ def request_closed_time(self, review_req): e = self.latest_event(ReviewRequestDocEvent, type="closed_review_request", review_request=review_req) return e.time if e and e.time else None + @property + def area(self) -> Group | None: + """Get area for document, if one exists + + None for non-IETF-stream documents. N.b., this is stricter than Group.area() and + uses different logic from Document.area_acronym(). + """ + if self.stream_id != "ietf": + return None + if self.group is None: + return None + parent = self.group.parent + if parent.type_id == "area": + return parent + return None + def area_acronym(self): g = self.group if g: @@ -985,21 +1224,31 @@ def displayname_with_link(self): def ipr(self,states=settings.PUBLISH_IPR_STATES): """Returns the IPR disclosures against this document (as a queryset over IprDocRel).""" - from ietf.ipr.models import IprDocRel - return IprDocRel.objects.filter(document__docs=self, disclosure__state__in=states) + # from ietf.ipr.models import IprDocRel + # return IprDocRel.objects.filter(document__docs=self, disclosure__state__in=states) # TODO - clear these comments away + return self.iprdocrel_set.filter(disclosure__state__in=states) def related_ipr(self): """Returns the IPR disclosures against this document and those documents this document directly or indirectly obsoletes or replaces """ from ietf.ipr.models import IprDocRel - iprs = IprDocRel.objects.filter(document__in=list(self.docalias.all())+self.all_related_that_doc(('obs','replaces'))).filter(disclosure__state__in=settings.PUBLISH_IPR_STATES).values_list('disclosure', flat=True).distinct() + iprs = ( + IprDocRel.objects.filter( + document__in=[self] + + self.all_related_that_doc(("obs", "replaces")) + ) + .filter(disclosure__state__in=settings.PUBLISH_IPR_STATES) + .values_list("disclosure", flat=True) + .distinct() + ) return iprs + def future_presentations(self): """ returns related SessionPresentation objects for meetings that have not yet ended. This implementation allows for 2 week meetings """ - candidate_presentations = self.sessionpresentation_set.filter( + candidate_presentations = self.presentations.filter( session__meeting__date__gte=date_today() - datetime.timedelta(days=15) ) return sorted( @@ -1012,11 +1261,11 @@ def last_presented(self): """ returns related SessionPresentation objects for the most recent meeting in the past""" # Assumes no two meetings have the same start date - if the assumption is violated, one will be chosen arbitrarily today = date_today() - candidate_presentations = self.sessionpresentation_set.filter(session__meeting__date__lte=today) + candidate_presentations = self.presentations.filter(session__meeting__date__lte=today) candidate_meetings = set([p.session.meeting for p in candidate_presentations if p.session.meeting.end_date()%s" % (self.name, ','.join([force_str(d.name) for d in self.docs.all() if isinstance(d, Document) ])) - document_link = admin_link("document") - class Meta: - verbose_name = "document alias" - verbose_name_plural = "document aliases" class DocReminder(models.Model): event = ForeignKey('DocEvent') @@ -1515,6 +1765,11 @@ class EditedAuthorsDocEvent(DocEvent): """ basis = models.CharField(help_text="What is the source or reasoning for the changes to the author list",max_length=255) + +class EditedRfcAuthorsDocEvent(DocEvent): + """Change to the RfcAuthor list for a document""" + + class BofreqEditorDocEvent(DocEvent): """ Capture the proponents of a BOF Request.""" editors = models.ManyToManyField('person.Person', blank=True) @@ -1522,3 +1777,42 @@ class BofreqEditorDocEvent(DocEvent): class BofreqResponsibleDocEvent(DocEvent): """ Capture the responsible leadership (IAB and IESG members) for a BOF Request """ responsible = models.ManyToManyField('person.Person', blank=True) + + +class StoredObjectQuerySet(models.QuerySet): + def exclude_deleted(self): + return self.filter(deleted__isnull=True) + + +class StoredObject(models.Model): + """Hold metadata about objects placed in object storage""" + + objects = StoredObjectQuerySet.as_manager() + + store = models.CharField(max_length=256) + name = models.CharField(max_length=1024, null=False, blank=False) # N.B. the 1024 limit on name comes from S3 + sha384 = models.CharField(max_length=96) + len = models.PositiveBigIntegerField() + store_created = models.DateTimeField(help_text="The instant the object ws first placed in the store") + created = models.DateTimeField( + null=False, + help_text="Instant object became known. May not be the same as the storage's created value for the instance. It will hold ctime for objects imported from older disk storage" + ) + modified = models.DateTimeField( + null=False, + help_text="Last instant object was modified. May not be the same as the storage's modified value for the instance. It will hold mtime for objects imported from older disk storage unless they've actually been overwritten more recently" + ) + doc_name = models.CharField(max_length=255, null=True, blank=True) + doc_rev = models.CharField(max_length=16, null=True, blank=True) + deleted = models.DateTimeField(null=True) + + class Meta: + constraints = [ + models.UniqueConstraint(fields=['store', 'name'], name='unique_name_per_store'), + ] + indexes = [ + models.Index(fields=["doc_name", "doc_rev"]), + ] + + def __str__(self): + return f"{self.store}:{self.name}" diff --git a/ietf/doc/resources.py b/ietf/doc/resources.py index 99e26ac33d..1d86df78d0 100644 --- a/ietf/doc/resources.py +++ b/ietf/doc/resources.py @@ -12,13 +12,14 @@ from ietf import api from ietf.doc.models import (BallotType, DeletedEvent, StateType, State, Document, - DocumentAuthor, DocEvent, StateDocEvent, DocHistory, ConsensusDocEvent, DocAlias, + DocumentAuthor, DocEvent, StateDocEvent, DocHistory, ConsensusDocEvent, TelechatDocEvent, DocReminder, LastCallDocEvent, NewRevisionDocEvent, WriteupDocEvent, InitialReviewDocEvent, DocHistoryAuthor, BallotDocEvent, RelatedDocument, RelatedDocHistory, BallotPositionDocEvent, AddedMessageEvent, SubmissionDocEvent, ReviewRequestDocEvent, ReviewAssignmentDocEvent, EditedAuthorsDocEvent, DocumentURL, - IanaExpertDocEvent, IRSGBallotDocEvent, DocExtResource, DocumentActionHolder, - BofreqEditorDocEvent,BofreqResponsibleDocEvent) + IanaExpertDocEvent, IRSGBallotDocEvent, DocExtResource, DocumentActionHolder, + BofreqEditorDocEvent, BofreqResponsibleDocEvent, StoredObject, RfcAuthor, + EditedRfcAuthorsDocEvent) from ietf.name.resources import BallotPositionNameResource, DocTypeNameResource class BallotTypeResource(ModelResource): @@ -130,7 +131,6 @@ class Meta: "external_url": ALL, "uploaded_filename": ALL, "note": ALL, - "internal_comments": ALL, "name": ALL, "type": ALL_WITH_RELATIONS, "stream": ALL_WITH_RELATIONS, @@ -247,7 +247,6 @@ class Meta: "external_url": ALL, "uploaded_filename": ALL, "note": ALL, - "internal_comments": ALL, "name": ALL, "type": ALL_WITH_RELATIONS, "stream": ALL_WITH_RELATIONS, @@ -286,21 +285,6 @@ class Meta: } api.doc.register(ConsensusDocEventResource()) -class DocAliasResource(ModelResource): - document = ToOneField(DocumentResource, 'document') - class Meta: - cache = SimpleCache() - queryset = DocAlias.objects.all() - serializer = api.Serializer() - detail_uri_name = 'name' - #resource_name = 'docalias' - ordering = ['id', ] - filtering = { - "name": ALL, - "document": ALL_WITH_RELATIONS, - } -api.doc.register(DocAliasResource()) - from ietf.person.resources import PersonResource class TelechatDocEventResource(ModelResource): by = ToOneField(PersonResource, 'by') @@ -490,7 +474,7 @@ class Meta: from ietf.name.resources import DocRelationshipNameResource class RelatedDocumentResource(ModelResource): source = ToOneField(DocumentResource, 'source') - target = ToOneField(DocAliasResource, 'target') + target = ToOneField(DocumentResource, 'target') relationship = ToOneField(DocRelationshipNameResource, 'relationship') class Meta: cache = SimpleCache() @@ -509,7 +493,7 @@ class Meta: from ietf.name.resources import DocRelationshipNameResource class RelatedDocHistoryResource(ModelResource): source = ToOneField(DocHistoryResource, 'source') - target = ToOneField(DocAliasResource, 'target') + target = ToOneField(DocumentResource, 'target') relationship = ToOneField(DocRelationshipNameResource, 'relationship') class Meta: cache = SimpleCache() @@ -667,6 +651,31 @@ class Meta: api.doc.register(EditedAuthorsDocEventResource()) + +from ietf.person.resources import PersonResource +class EditedRfcAuthorsDocEventResource(ModelResource): + by = ToOneField(PersonResource, 'by') + doc = ToOneField(DocumentResource, 'doc') + docevent_ptr = ToOneField(DocEventResource, 'docevent_ptr') + class Meta: + queryset = EditedRfcAuthorsDocEvent.objects.all() + serializer = api.Serializer() + cache = SimpleCache() + #resource_name = 'editedrfcauthorsdocevent' + ordering = ['id', ] + filtering = { + "id": ALL, + "time": ALL, + "type": ALL, + "rev": ALL, + "desc": ALL, + "by": ALL_WITH_RELATIONS, + "doc": ALL_WITH_RELATIONS, + "docevent_ptr": ALL_WITH_RELATIONS, + } +api.doc.register(EditedRfcAuthorsDocEventResource()) + + from ietf.name.resources import DocUrlTagNameResource class DocumentURLResource(ModelResource): doc = ToOneField(DocumentResource, 'doc') @@ -859,3 +868,51 @@ class Meta: "responsible": ALL_WITH_RELATIONS, } api.doc.register(BofreqResponsibleDocEventResource()) + + +class StoredObjectResource(ModelResource): + class Meta: + queryset = StoredObject.objects.all() + serializer = api.Serializer() + cache = SimpleCache() + #resource_name = 'storedobject' + ordering = ['id', ] + filtering = { + "id": ALL, + "store": ALL, + "name": ALL, + "sha384": ALL, + "len": ALL, + "store_created": ALL, + "created": ALL, + "modified": ALL, + "doc_name": ALL, + "doc_rev": ALL, + "deleted": ALL, + } +api.doc.register(StoredObjectResource()) + + +from ietf.person.resources import EmailResource, PersonResource +class RfcAuthorResource(ModelResource): + document = ToOneField(DocumentResource, 'document') + person = ToOneField(PersonResource, 'person', null=True) + email = ToOneField(EmailResource, 'email', null=True, readonly=True) + class Meta: + queryset = RfcAuthor.objects.all() + serializer = api.Serializer() + cache = SimpleCache() + #resource_name = 'rfcauthor' + ordering = ['id', ] + filtering = { + "id": ALL, + "titlepage_name": ALL, + "is_editor": ALL, + "affiliation": ALL, + "country": ALL, + "order": ALL, + "document": ALL_WITH_RELATIONS, + "person": ALL_WITH_RELATIONS, + "email": ALL_WITH_RELATIONS, + } +api.doc.register(RfcAuthorResource()) diff --git a/ietf/doc/serializers.py b/ietf/doc/serializers.py new file mode 100644 index 0000000000..3651670962 --- /dev/null +++ b/ietf/doc/serializers.py @@ -0,0 +1,360 @@ +# Copyright The IETF Trust 2024-2026, All Rights Reserved +"""django-rest-framework serializers""" + +from dataclasses import dataclass +from typing import Literal, ClassVar + +from django.db.models.manager import BaseManager +from django.db.models.query import QuerySet +from drf_spectacular.utils import extend_schema_field +from rest_framework import serializers + +from ietf.group.serializers import ( + AreaDirectorSerializer, + AreaSerializer, + GroupSerializer, +) +from ietf.name.serializers import StreamNameSerializer +from ietf.utils import log +from .models import Document, DocumentAuthor, RfcAuthor + + +class RfcAuthorSerializer(serializers.ModelSerializer): + """Serializer for an RfcAuthor / DocumentAuthor in a response""" + + email = serializers.EmailField(source="email.address", read_only=True) + datatracker_person_path = serializers.URLField( + source="person.get_absolute_url", + required=False, + help_text="URL for person link (relative to datatracker base URL)", + read_only=True, + ) + + class Meta: + model = RfcAuthor + fields = [ + "titlepage_name", + "is_editor", + "person", + "email", + "affiliation", + "country", + "datatracker_person_path", + ] + + def to_representation(self, instance): + """instance -> primitive data types + + Translates a DocumentAuthor into an equivalent RfcAuthor we can use the same + serializer for either type. + """ + if isinstance(instance, DocumentAuthor): + # create a non-persisted RfcAuthor as a shim - do not save it! + document_author = instance + instance = RfcAuthor( + titlepage_name=document_author.person.plain_name(), + is_editor=False, + person=document_author.person, + affiliation=document_author.affiliation, + country=document_author.country, + order=document_author.order, + ) + return super().to_representation(instance) + + def validate(self, data): + email = data.get("email") + if email is not None: + person = data.get("person") + if person is None: + raise serializers.ValidationError( + { + "email": "cannot have an email without a person", + }, + code="email-without-person", + ) + if email.person_id != person.pk: + raise serializers.ValidationError( + { + "email": "email must belong to person", + }, + code="email-person-mismatch", + ) + return data + + +@dataclass +class DocIdentifier: + type: Literal["doi", "issn"] + value: str + + +class DocIdentifierSerializer(serializers.Serializer): + type = serializers.ChoiceField(choices=["doi", "issn"]) + value = serializers.CharField() + + +type RfcStatusSlugT = Literal[ + "std", + "ps", + "ds", + "bcp", + "inf", + "exp", + "hist", + "unkn", + "not-issued", +] + + +@dataclass +class RfcStatus: + """Helper to extract the 'Status' from an RFC document for serialization""" + + slug: RfcStatusSlugT + + # Names that aren't just the slug itself. ClassVar annotation prevents dataclass from treating this as a field. + fancy_names: ClassVar[dict[RfcStatusSlugT, str]] = { + "std": "internet standard", + "ps": "proposed standard", + "ds": "draft standard", + "bcp": "best current practice", + "inf": "informational", + "exp": "experimental", + "hist": "historic", + "unkn": "unknown", + } + + # ClassVar annotation prevents dataclass from treating this as a field + stdlevelname_slug_map: ClassVar[dict[str, RfcStatusSlugT]] = { + "bcp": "bcp", + "ds": "ds", + "exp": "exp", + "hist": "hist", + "inf": "inf", + "std": "std", + "ps": "ps", + "unkn": "unkn", + } + + # ClassVar annotation prevents dataclass from treating this as a field + status_slugs: ClassVar[list[RfcStatusSlugT]] = sorted( + # TODO implement "not-issued" RFCs + set(stdlevelname_slug_map.values()) | {"not-issued"} + ) + + @property + def name(self): + return RfcStatus.fancy_names.get(self.slug, self.slug) + + @classmethod + def from_document(cls, doc: Document): + """Decide the status that applies to a document""" + return cls( + slug=(cls.stdlevelname_slug_map.get(doc.std_level.slug, "unkn")), + ) + + @classmethod + def filter(cls, queryset, name, value: list[RfcStatusSlugT]): + """Filter a queryset by status + + This is basically the inverse of the from_document() method. Given a status name, filter + the queryset to those in that status. The queryset should be a Document queryset. + """ + interesting_slugs = [ + stdlevelname_slug + for stdlevelname_slug, status_slug in cls.stdlevelname_slug_map.items() + if status_slug in value + ] + if len(interesting_slugs) == 0: + return queryset.none() + return queryset.filter(std_level__slug__in=interesting_slugs) + + +class RfcStatusSerializer(serializers.Serializer): + """Status serializer for a Document instance""" + + slug = serializers.ChoiceField(choices=RfcStatus.status_slugs) + name = serializers.CharField() + + def to_representation(self, instance: Document): + return super().to_representation(instance=RfcStatus.from_document(instance)) + + +class ShepherdSerializer(serializers.Serializer): + email = serializers.EmailField(source="email_address") + + +class RelatedDraftSerializer(serializers.Serializer): + id = serializers.IntegerField(source="source.id") + name = serializers.CharField(source="source.name") + title = serializers.CharField(source="source.title") + shepherd = ShepherdSerializer(source="source.shepherd", allow_null=True) + ad = AreaDirectorSerializer(source="source.ad", allow_null=True) + + +class RelatedRfcSerializer(serializers.Serializer): + id = serializers.IntegerField(source="target.id") + number = serializers.IntegerField(source="target.rfc_number") + title = serializers.CharField(source="target.title") + + +class ReverseRelatedRfcSerializer(serializers.Serializer): + id = serializers.IntegerField(source="source.id") + number = serializers.IntegerField(source="source.rfc_number") + title = serializers.CharField(source="source.title") + + +class ContainingSubseriesSerializer(serializers.Serializer): + name = serializers.CharField(source="source.name") + type = serializers.CharField(source="source.type_id") + + +class RfcFormatSerializer(serializers.Serializer): + RFC_FORMATS = ("xml", "txt", "html", "pdf", "ps", "json", "notprepped") + + fmt = serializers.ChoiceField(choices=RFC_FORMATS) + name = serializers.CharField(help_text="Name of blob in the blob store") + + +class RfcMetadataSerializer(serializers.ModelSerializer): + """Serialize metadata of an RFC + + This needs to be called with a Document queryset that has been processed with + api.augment_rfc_queryset() or it very likely will not work. Some of the typing + refers to Document, but this should really be WithAnnotations[Document, ...]. + However, have not been able to make that work yet. + """ + + number = serializers.IntegerField(source="rfc_number") + published = serializers.DateField() + status = RfcStatusSerializer(source="*") + authors = serializers.SerializerMethodField() + group = GroupSerializer() + area = AreaSerializer(read_only=True) + stream = StreamNameSerializer() + ad = AreaDirectorSerializer(read_only=True, allow_null=True) + group_list_email = serializers.EmailField(source="group.list_email", read_only=True) + identifiers = serializers.SerializerMethodField() + draft = serializers.SerializerMethodField() + obsoletes = RelatedRfcSerializer(many=True, read_only=True) + obsoleted_by = ReverseRelatedRfcSerializer(many=True, read_only=True) + updates = RelatedRfcSerializer(many=True, read_only=True) + updated_by = ReverseRelatedRfcSerializer(many=True, read_only=True) + subseries = ContainingSubseriesSerializer(many=True, read_only=True) + formats = RfcFormatSerializer( + many=True, read_only=True, help_text="Available formats" + ) + keywords = serializers.ListField(child=serializers.CharField(), read_only=True) + has_errata = serializers.BooleanField(read_only=True) + + class Meta: + model = Document + fields = [ + "number", + "title", + "published", + "status", + "pages", + "authors", + "group", + "area", + "stream", + "ad", + "group_list_email", + "identifiers", + "obsoletes", + "obsoleted_by", + "updates", + "updated_by", + "subseries", + "draft", + "abstract", + "formats", + "keywords", + "has_errata", + ] + + @extend_schema_field(RfcAuthorSerializer(many=True)) + def get_authors(self, doc: Document): + # If doc has any RfcAuthors, use those, otherwise fall back to DocumentAuthors + author_queryset: QuerySet[RfcAuthor] | QuerySet[DocumentAuthor] = ( + doc.rfcauthor_set.all() + if doc.rfcauthor_set.exists() + else doc.documentauthor_set.all() + ) + # RfcAuthorSerializer can deal with DocumentAuthor instances + return RfcAuthorSerializer( + instance=author_queryset, + many=True, + ).data + + @extend_schema_field(DocIdentifierSerializer(many=True)) + def get_identifiers(self, doc: Document): + identifiers = [] + if doc.doi: + identifiers.append( + DocIdentifier(type="doi", value=doc.doi) + ) + return DocIdentifierSerializer(instance=identifiers, many=True).data + + @extend_schema_field(RelatedDraftSerializer) + def get_draft(self, doc: Document): + if hasattr(doc, "drafts"): + # This is the expected case - drafts is added by a Prefetch in + # the augment_rfc_queryset() method. + try: + related_doc = doc.drafts[0] + except IndexError: + return None + else: + # Fallback in case augment_rfc_queryset() was not called + log.log( + f"Warning: {self.__class__}.get_draft() called without prefetched draft" + ) + related_doc = doc.came_from_draft() + return RelatedDraftSerializer(related_doc).data + + +class RfcSerializer(RfcMetadataSerializer): + """Serialize an RFC, including its metadata and text content if available""" + + text = serializers.CharField(allow_null=True) + + class Meta: + model = RfcMetadataSerializer.Meta.model + fields = RfcMetadataSerializer.Meta.fields + ["text"] + + +class SubseriesContentListSerializer(serializers.ListSerializer): + """ListSerializer that gets its object from item.target""" + + def to_representation(self, data): + """ + List of object instances -> List of dicts of primitive datatypes. + """ + # Dealing with nested relationships, data can be a Manager, + # so, first get a queryset from the Manager if needed + iterable = data.all() if isinstance(data, BaseManager) else data + # Serialize item.target instead of item itself + return [self.child.to_representation(item.target) for item in iterable] + + +class SubseriesContentSerializer(RfcMetadataSerializer): + """Serialize RFC contained in a subseries doc""" + + class Meta(RfcMetadataSerializer.Meta): + list_serializer_class = SubseriesContentListSerializer + + +class SubseriesDocSerializer(serializers.ModelSerializer): + """Serialize a subseries document (e.g., a BCP or STD)""" + + contents = SubseriesContentSerializer(many=True) + + class Meta: + model = Document + fields = [ + "name", + "type", + "contents", + ] diff --git a/ietf/doc/storage.py b/ietf/doc/storage.py new file mode 100644 index 0000000000..ee1e76c4fa --- /dev/null +++ b/ietf/doc/storage.py @@ -0,0 +1,181 @@ +# Copyright The IETF Trust 2025, All Rights Reserved +from typing import Optional + +import debug # pyflakes:ignore +import json + +from contextlib import contextmanager +from storages.backends.s3 import S3Storage + +from django.core.files.base import File + +from ietf.blobdb.storage import BlobdbStorage +from ietf.doc.models import StoredObject +from ietf.utils.log import log +from ietf.utils.storage import MetadataFile +from ietf.utils.timezone import timezone + + +class StoredObjectFile(MetadataFile): + """Django storage File object that represents a StoredObject""" + def __init__(self, file, name, mtime=None, content_type="", store=None, doc_name=None, doc_rev=None): + super().__init__( + file=file, + name=name, + mtime=mtime, + content_type=content_type, + ) + self.store = store + self.doc_name = doc_name + self.doc_rev = doc_rev + + @classmethod + def from_storedobject(cls, file, name, store): + """Alternate constructor for objects that already exist in the StoredObject table""" + stored_object = StoredObject.objects.exclude_deleted().filter(store=store, name=name).first() + if stored_object is None: + raise FileNotFoundError(f"StoredObject for {store}:{name} does not exist or was deleted") + file = cls(file, name, store, doc_name=stored_object.doc_name, doc_rev=stored_object.doc_rev) + if int(file.custom_metadata["len"]) != stored_object.len: + raise RuntimeError(f"File length changed unexpectedly for {store}:{name}") + if file.custom_metadata["sha384"] != stored_object.sha384: + raise RuntimeError(f"SHA-384 hash changed unexpectedly for {store}:{name}") + return file + + +@contextmanager +def maybe_log_timing(enabled, op, **kwargs): + """If enabled, log elapsed time and additional data from kwargs + + Emits log even if an exception occurs + """ + before = timezone.now() + exception = None + try: + yield + except Exception as err: + exception = err + raise + finally: + if enabled: + dt = timezone.now() - before + log( + json.dumps( + { + "log": "S3Storage_timing", + "seconds": dt.total_seconds(), + "op": op, + "exception": "" if exception is None else repr(exception), + **kwargs, + } + ) + ) + + +class MetadataS3Storage(S3Storage): + def get_default_settings(self): + # add a default for the ietf_log_blob_timing boolean + return super().get_default_settings() | {"ietf_log_blob_timing": False} + + def _save(self, name, content: File): + with maybe_log_timing( + self.ietf_log_blob_timing, "_save", bucket_name=self.bucket_name, name=name + ): + return super()._save(name, content) + + def _open(self, name, mode="rb"): + with maybe_log_timing( + self.ietf_log_blob_timing, + "_open", + bucket_name=self.bucket_name, + name=name, + mode=mode, + ): + return super()._open(name, mode) + + def delete(self, name): + with maybe_log_timing( + self.ietf_log_blob_timing, "delete", bucket_name=self.bucket_name, name=name + ): + super().delete(name) + + def _get_write_parameters(self, name, content=None): + # debug.show('f"getting write parameters for {name}"') + params = super()._get_write_parameters(name, content) + # If we have a non-empty explicit content type, use it + content_type = getattr(content, "content_type", "").strip() + if content_type != "": + params["ContentType"] = content_type + if "Metadata" not in params: + params["Metadata"] = {} + if hasattr(content, "custom_metadata"): + params["Metadata"].update(content.custom_metadata) + return params + + +class StoredObjectBlobdbStorage(BlobdbStorage): + warn_if_missing = True # TODO-BLOBSTORE make this configurable (or remove it) + + def _save_stored_object(self, name, content) -> StoredObject: + now = timezone.now() + record, created = StoredObject.objects.get_or_create( + store=self.bucket_name, + name=name, + defaults=dict( + sha384=content.custom_metadata["sha384"], + len=int(content.custom_metadata["len"]), + store_created=now, + created=now, + modified=now, + doc_name=getattr( + content, + "doc_name", # Note that these are assumed to be invariant + None, # should be blank? + ), + doc_rev=getattr( + content, + "doc_rev", # for a given name + None, # should be blank? + ), + ), + ) + if not created and ( + record.sha384 != content.custom_metadata["sha384"] + or record.len != int(content.custom_metadata["len"]) + or record.deleted is not None + ): + record.sha384 = content.custom_metadata["sha384"] + record.len = int(content.custom_metadata["len"]) + record.modified = now + record.deleted = None + record.save() + return record + + def _delete_stored_object(self, name) -> Optional[StoredObject]: + existing_record = StoredObject.objects.filter(store=self.bucket_name, name=name) + if not existing_record.exists() and self.warn_if_missing: + complaint = ( + f"WARNING: Asked to delete {name} from {self.bucket_name} storage, " + f"but there was no matching StoredObject" + ) + log(complaint) + debug.show("complaint") + else: + now = timezone.now() + # Note that existing_record is a queryset that will have one matching object + existing_record.exclude_deleted().update(deleted=now) + return existing_record.first() + + def _save(self, name, content): + """Perform the save operation + + In principle the name could change on save to the blob store. As of now, BlobdbStorage + will not change it, but allow for that possibility. Callers should be prepared for this. + """ + saved_name = super()._save(name, content) + self._save_stored_object(saved_name, content) + return saved_name + + def delete(self, name): + self._delete_stored_object(name) + super().delete(name) diff --git a/ietf/doc/storage_utils.py b/ietf/doc/storage_utils.py new file mode 100644 index 0000000000..c7cc6989cd --- /dev/null +++ b/ietf/doc/storage_utils.py @@ -0,0 +1,203 @@ +# Copyright The IETF Trust 2025, All Rights Reserved +import datetime +from io import BufferedReader +from typing import Optional, Union + +import debug # pyflakes ignore + +from django.conf import settings +from django.core.files.base import ContentFile, File +from django.core.files.storage import storages, Storage + +from ietf.utils.log import log +from ietf.utils.text import decode_document_content + + +class StorageUtilsError(Exception): + pass + + +class AlreadyExistsError(StorageUtilsError): + pass + + +def _get_storage(kind: str) -> Storage: + if kind in settings.ARTIFACT_STORAGE_NAMES: + return storages[kind] + else: + debug.say(f"Got into not-implemented looking for {kind}") + raise NotImplementedError(f"Don't know how to store {kind}") + + +def exists_in_storage(kind: str, name: str) -> bool: + if settings.ENABLE_BLOBSTORAGE: + try: + store = _get_storage(kind) + with store.open(name): + return True + except FileNotFoundError: + return False + except Exception as err: + log(f"Blobstore Error: Failed to test existence of {kind}:{name}: {repr(err)}") + if settings.SERVER_MODE == "development": + raise + return False + + +def remove_from_storage(kind: str, name: str, warn_if_missing: bool = True) -> None: + if settings.ENABLE_BLOBSTORAGE: + try: + if exists_in_storage(kind, name): + _get_storage(kind).delete(name) + elif warn_if_missing: + complaint = ( + f"WARNING: Asked to delete non-existent {name} from {kind} storage" + ) + debug.show("complaint") + log(complaint) + except Exception as err: + log(f"Blobstore Error: Failed to remove {kind}:{name}: {repr(err)}") + if settings.SERVER_MODE == "development": + raise + return None + + +def store_file( + kind: str, + name: str, + file: Union[File, BufferedReader], + allow_overwrite: bool = False, + doc_name: Optional[str] = None, + doc_rev: Optional[str] = None, + content_type: str="", + mtime: Optional[datetime.datetime]=None, +) -> None: + from .storage import StoredObjectFile # avoid circular import + if settings.ENABLE_BLOBSTORAGE: + try: + is_new = not exists_in_storage(kind, name) + # debug.show('f"Asked to store {name} in {kind}: is_new={is_new}, allow_overwrite={allow_overwrite}"') + if not allow_overwrite and not is_new: + debug.show('f"Failed to save {kind}:{name} - name already exists in store"') + raise AlreadyExistsError(f"Failed to save {kind}:{name} - name already exists in store") + new_name = _get_storage(kind).save( + name, + StoredObjectFile( + file=file, + name=name, + doc_name=doc_name, + doc_rev=doc_rev, + mtime=mtime, + content_type=content_type, + ), + ) + if new_name != name: + complaint = f"Error encountered saving '{name}' - results stored in '{new_name}' instead." + debug.show("complaint") + raise StorageUtilsError(complaint) + except Exception as err: + log(f"Blobstore Error: Failed to store file {kind}:{name}: {repr(err)}") + if settings.SERVER_MODE == "development": + raise # TODO-BLOBSTORE eventually make this an error for all modes + return None + + +def store_bytes( + kind: str, + name: str, + content: bytes, + allow_overwrite: bool = False, + doc_name: Optional[str] = None, + doc_rev: Optional[str] = None, + content_type: str = "", + mtime: Optional[datetime.datetime] = None, +) -> None: + if settings.ENABLE_BLOBSTORAGE: + try: + store_file( + kind, + name, + ContentFile(content), + allow_overwrite, + doc_name, + doc_rev, + content_type, + mtime, + ) + except Exception as err: + # n.b., not likely to get an exception here because store_file or store_bytes will catch it + log(f"Blobstore Error: Failed to store bytes to {kind}:{name}: {repr(err)}") + if settings.SERVER_MODE == "development": + raise # TODO-BLOBSTORE eventually make this an error for all modes + return None + + +def store_str( + kind: str, + name: str, + content: str, + allow_overwrite: bool = False, + doc_name: Optional[str] = None, + doc_rev: Optional[str] = None, + content_type: str = "", + mtime: Optional[datetime.datetime] = None, +) -> None: + if settings.ENABLE_BLOBSTORAGE: + try: + content_bytes = content.encode("utf-8") + store_bytes( + kind, + name, + content_bytes, + allow_overwrite, + doc_name, + doc_rev, + content_type, + mtime, + ) + except Exception as err: + # n.b., not likely to get an exception here because store_file or store_bytes will catch it + log(f"Blobstore Error: Failed to store string to {kind}:{name}: {repr(err)}") + if settings.SERVER_MODE == "development": + raise # TODO-BLOBSTORE eventually make this an error for all modes + return None + + +def retrieve_bytes(kind: str, name: str) -> bytes: + from ietf.doc.storage import maybe_log_timing + if not settings.ENABLE_BLOBSTORAGE: + return b"" + try: + store = _get_storage(kind) + with store.open(name) as f: + with maybe_log_timing( + hasattr(store, "ietf_log_blob_timing") and store.ietf_log_blob_timing, + "read", + bucket_name=store.bucket_name if hasattr(store, "bucket_name") else "", + name=name, + ): + content = f.read() + except Exception as err: + log(f"Blobstore Error: Failed to read bytes from {kind}:{name}: {repr(err)}") + raise + return content + + +def retrieve_str(kind: str, name: str) -> str: + if not settings.ENABLE_BLOBSTORAGE: + return "" + try: + content = decode_document_content(retrieve_bytes(kind, name)) + except Exception as err: + log(f"Blobstore Error: Failed to read string from {kind}:{name}: {repr(err)}") + raise + return content + + +def force_replication(kind: str, name: str): + if not settings.ENABLE_BLOBSTORAGE: + return + storage = _get_storage(kind) + from ietf.blobdb.storage import BlobdbStorage + if isinstance(storage, BlobdbStorage): + storage.force_replication(name) diff --git a/ietf/doc/tasks.py b/ietf/doc/tasks.py new file mode 100644 index 0000000000..273242e35f --- /dev/null +++ b/ietf/doc/tasks.py @@ -0,0 +1,222 @@ +# Copyright The IETF Trust 2024-2026, All Rights Reserved +# +# Celery task definitions +# +import datetime + +import debug # pyflakes:ignore + +from celery import shared_task +from celery.exceptions import MaxRetriesExceededError +from pathlib import Path + +from django.conf import settings +from django.utils import timezone + +from ietf.doc.utils_r2 import rfcs_are_in_r2 +from ietf.doc.utils_red import trigger_red_precomputer +from ietf.utils import log, searchindex +from ietf.utils.timezone import datetime_today + +from .expire import ( + in_draft_expire_freeze, + get_expired_drafts, + expirable_drafts, + send_expire_notice_for_draft, + expire_draft, + clean_up_draft_files, + get_soon_to_expire_drafts, + send_expire_warning_for_draft, +) +from .lastcall import get_expired_last_calls, expire_last_call +from .models import Document, NewRevisionDocEvent +from .utils import ( + generate_idnits2_rfc_status, + generate_idnits2_rfcs_obsoleted, + rebuild_reference_relations, + update_or_create_draft_bibxml_file, + ensure_draft_bibxml_path_exists, + investigate_fragment, +) +from .utils_bofreq import fixup_bofreq_timestamps +from .utils_errata import signal_update_rfc_metadata + + +@shared_task +def expire_ids_task(): + try: + if not in_draft_expire_freeze(): + log.log("Expiring drafts ...") + for doc in get_expired_drafts(): + # verify expirability -- it might have changed after get_expired_drafts() was run + # (this whole loop took about 2 minutes on 04 Jan 2018) + # N.B., re-running expirable_drafts() repeatedly is fairly expensive. Where possible, + # it's much faster to run it once on a superset query of the objects you are going + # to test and keep its results. That's not desirable here because it would defeat + # the purpose of double-checking that a document is still expirable when it is actually + # being marked as expired. + if expirable_drafts( + Document.objects.filter(pk=doc.pk) + ).exists() and doc.expires < datetime_today() + datetime.timedelta(1): + send_expire_notice_for_draft(doc) + expire_draft(doc) + log.log(f" Expired draft {doc.name}-{doc.rev}") + + log.log("Cleaning up draft files") + clean_up_draft_files() + except Exception as e: + log.log("Exception in expire-ids: %s" % e) + raise + + +@shared_task +def notify_expirations_task(notify_days=14): + for doc in get_soon_to_expire_drafts(notify_days): + send_expire_warning_for_draft(doc) + + +@shared_task +def expire_last_calls_task(): + for doc in get_expired_last_calls(): + try: + expire_last_call(doc) + except Exception: + log.log( + f"ERROR: Failed to expire last call for {doc.file_tag()} (id={doc.pk})" + ) + else: + log.log(f"Expired last call for {doc.file_tag()} (id={doc.pk})") + + +@shared_task +def generate_idnits2_rfc_status_task(): + outpath = Path(settings.DERIVED_DIR) / "idnits2-rfc-status" + blob = generate_idnits2_rfc_status() + try: + outpath.write_text(blob, encoding="utf8") # TODO-BLOBSTORE + except Exception as e: + log.log(f"failed to write idnits2-rfc-status: {e}") + + +@shared_task +def generate_idnits2_rfcs_obsoleted_task(): + outpath = Path(settings.DERIVED_DIR) / "idnits2-rfcs-obsoleted" + blob = generate_idnits2_rfcs_obsoleted() + try: + outpath.write_text(blob, encoding="utf8") # TODO-BLOBSTORE + except Exception as e: + log.log(f"failed to write idnits2-rfcs-obsoleted: {e}") + + +@shared_task +def generate_draft_bibxml_files_task(days=7, process_all=False): + """Generate bibxml files for recently updated docs + + If process_all is False (the default), processes only docs with new revisions + in the last specified number of days. + """ + if not process_all and days < 1: + raise ValueError("Must call with days >= 1 or process_all=True") + ensure_draft_bibxml_path_exists() + doc_events = NewRevisionDocEvent.objects.filter( + type="new_revision", + doc__type_id="draft", + ).order_by("time") + if not process_all: + doc_events = doc_events.filter( + time__gte=timezone.now() - datetime.timedelta(days=days) + ) + for event in doc_events: + try: + update_or_create_draft_bibxml_file(event.doc, event.rev) + except Exception as err: + log.log(f"Error generating bibxml for {event.doc.name}-{event.rev}: {err}") + + +@shared_task(ignore_result=False) +def investigate_fragment_task(name_fragment: str): + return { + "name_fragment": name_fragment, + "results": investigate_fragment(name_fragment), + } + + +@shared_task +def rebuild_reference_relations_task(doc_names: list[str]): + log.log(f"Task: Rebuilding reference relations for {doc_names}") + for doc in Document.objects.filter(name__in=doc_names, type__in=["rfc", "draft"]): + filenames = dict() + base = ( + settings.RFC_PATH + if doc.type_id == "rfc" + else settings.INTERNET_ALL_DRAFTS_ARCHIVE_DIR + ) + stem = doc.name if doc.type_id == "rfc" else f"{doc.name}-{doc.rev}" + for ext in ["xml", "txt"]: + path = Path(base) / f"{stem}.{ext}" + if path.is_file(): + filenames[ext] = str(path) + if len(filenames) > 0: + rebuild_reference_relations(doc, filenames) + else: + log.log(f"Found no content for {stem}") + + +@shared_task +def fixup_bofreq_timestamps_task(): # pragma: nocover + fixup_bofreq_timestamps() + + +@shared_task +def signal_update_rfc_metadata_task(rfc_number_list=()): + signal_update_rfc_metadata(rfc_number_list) + + +@shared_task(bind=True) +def trigger_red_precomputer_task(self, rfc_number_list=()): + if not rfcs_are_in_r2(rfc_number_list): + log.log(f"Objects are not yet in R2 for RFCs {rfc_number_list}") + try: + countdown = getattr(settings, "RED_PRECOMPUTER_TRIGGER_RETRY_DELAY", 10) + max_retries = getattr(settings, "RED_PRECOMPUTER_TRIGGER_MAX_RETRIES", 12) + self.retry(countdown=countdown, max_retries=max_retries) + except MaxRetriesExceededError: + log.log(f"Gave up waiting for objects in R2 for RFCs {rfc_number_list}") + else: + trigger_red_precomputer(rfc_number_list) + + +@shared_task(bind=True) +def update_rfc_searchindex_task(self, rfc_number: int): + """Update the search index for one RFC""" + if not searchindex.enabled(): + log.log("Search indexing is not enabled, skipping") + return + + rfc = Document.objects.filter(type_id="rfc", rfc_number=rfc_number).first() + if rfc is None: + log.log( + f"ERROR: Document for rfc{rfc_number} not found, not updating search index" + ) + return + try: + searchindex.update_or_create_rfc_entry(rfc) + except Exception as err: + log.log(f"Search index update for {rfc.name} failed ({err})") + if isinstance(err, searchindex.RETRYABLE_ERROR_CLASSES): + searchindex_settings = searchindex.get_settings() + self.retry( + countdown=searchindex_settings["TASK_RETRY_DELAY"], + max_retries=searchindex_settings["TASK_MAX_RETRIES"], + ) + + +@shared_task +def rebuild_searchindex_task(*, batchsize=40, drop_collection=False): + if drop_collection: + searchindex.delete_collection() + searchindex.create_collection() + searchindex.update_or_create_rfc_entries( + Document.objects.filter(type_id="rfc").order_by("-rfc_number"), + batchsize=batchsize, + ) diff --git a/ietf/doc/templatetags/active_groups_menu.py b/ietf/doc/templatetags/active_groups_menu.py index af8f268ebd..c60d6dcd1a 100644 --- a/ietf/doc/templatetags/active_groups_menu.py +++ b/ietf/doc/templatetags/active_groups_menu.py @@ -11,7 +11,7 @@ @register.simple_tag def active_groups_menu(flavor): - parents = GroupTypeName.objects.filter(slug__in=["ag", "area", "rag", "team", "dir", "program"]) + parents = GroupTypeName.objects.filter(slug__in=["ag", "area", "rag", "team", "dir", "program", "iabworkshop"]) others = [] for group in Group.objects.filter(acronym__in=("rsoc",), state_id="active"): group.menu_url = reverse("ietf.group.views.group_home", kwargs=dict(acronym=group.acronym)) # type: ignore @@ -23,4 +23,4 @@ def active_groups_menu(flavor): return render_to_string( "base/menu_active_groups.html", {"parents": parents, "others": others, "flavor": flavor}, - ) \ No newline at end of file + ) diff --git a/ietf/doc/templatetags/ballot_icon.py b/ietf/doc/templatetags/ballot_icon.py index ebcc605cd5..07a6c7f926 100644 --- a/ietf/doc/templatetags/ballot_icon.py +++ b/ietf/doc/templatetags/ballot_icon.py @@ -96,9 +96,14 @@ def sort_key(t): positions = list(ballot.active_balloter_positions().items()) positions.sort(key=sort_key) + request = context.get("request") + ballot_edit_return_point_param = f"ballot_edit_return_point={request.path}" + right_click_string = '' if has_role(user, "Area Director"): - right_click_string = 'oncontextmenu="window.location.href=\'%s\';return false;"' % urlreverse('ietf.doc.views_ballot.edit_position', kwargs=dict(name=doc.name, ballot_id=ballot.pk)) + right_click_string = 'oncontextmenu="window.location.href=\'{}?{}\';return false;"'.format( + urlreverse('ietf.doc.views_ballot.edit_position', kwargs=dict(name=doc.name, ballot_id=ballot.pk)), + ballot_edit_return_point_param) my_blocking = False for i, (balloter, pos) in enumerate(positions): @@ -113,10 +118,14 @@ def sort_key(t): typename = "RSAB" else: typename = "IESG" + + modal_url = "{}?{}".format( + urlreverse("ietf.doc.views_doc.ballot_popup", kwargs=dict(name=doc.name, ballot_id=ballot.pk)), + ballot_edit_return_point_param) res = ['{match[0]}' - cname = doc_canonical_name(name) + cname = doc_name(name) if not cname: return match[0] if name == cname: @@ -221,12 +224,11 @@ def link_non_charter_doc_match(match): def link_other_doc_match(match): doc = match[2].strip().lower() rev = match[3] - if not doc_canonical_name(doc + rev): + if not doc_name(doc + rev): return match[0] url = urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=doc + rev)) return f'{match[1]}' - @register.filter(name="urlize_ietf_docs", is_safe=True, needs_autoescape=True) def urlize_ietf_docs(string, autoescape=None): """ @@ -255,6 +257,7 @@ def urlize_ietf_docs(string, autoescape=None): string, flags=re.IGNORECASE | re.ASCII, ) + return mark_safe(string) @@ -267,7 +270,7 @@ def urlize_related_source_list(related, document_html=False): names = set() titles = set() for rel in related: - name=rel.source.canonical_name() + name=rel.source.name title = rel.source.title if name in names and title in titles: continue @@ -282,14 +285,14 @@ def urlize_related_source_list(related, document_html=False): url=url) )) return links - + @register.filter(name='urlize_related_target_list', is_safe=True, document_html=False) def urlize_related_target_list(related, document_html=False): """Convert a list of RelatedDocuments into list of links using the target document's canonical name""" links = [] for rel in related: - name=rel.target.document.canonical_name() - title = rel.target.document.title + name=rel.target.name + title = rel.target.title url = urlreverse('ietf.doc.views_doc.document_main' if document_html is False else 'ietf.doc.views_doc.document_html', kwargs=dict(name=name)) name = escape(name) title = escape(title) @@ -299,7 +302,7 @@ def urlize_related_target_list(related, document_html=False): url=url) )) return links - + @register.filter(name='dashify') def dashify(string): """ @@ -444,16 +447,16 @@ def ad_area(user): @register.filter def format_history_text(text, trunc_words=25): """Run history text through some cleaning and add ellipsis if it's too long.""" - full = mark_safe(bleach_cleaner.clean(text)) - full = bleach_linker.linkify(urlize_ietf_docs(full)) + full = mark_safe(clean_html(text)) + full = linkify(urlize_ietf_docs(full)) return format_snippet(full, trunc_words) @register.filter def format_snippet(text, trunc_words=25): # urlize if there aren't already links present - text = bleach_linker.linkify(text) - full = keep_spacing(collapsebr(linebreaksbr(mark_safe(sanitize_fragment(text))))) + text = linkify(text) + full = keep_spacing(collapsebr(linebreaksbr(mark_safe(clean_html(text))))) snippet = truncatewords_html(full, trunc_words) if snippet != full: return mark_safe('
%s
%s
' % (snippet, full)) @@ -477,6 +480,19 @@ def state(doc, slug): slug = "%s-stream-%s" % (doc.type_id, doc.stream_id) return doc.get_state(slug) + +@register.filter +def is_unexpected_wg_state(doc): + """Returns a flag indicating whether the document has an unexpected wg state.""" + if not doc.type_id == "draft": + return False + + draft_iesg_state = doc.get_state("draft-iesg") + draft_stream_state = doc.get_state("draft-stream-ietf") + + return draft_iesg_state.slug != "idexists" and draft_stream_state is not None and draft_stream_state.slug != "sub-pub" + + @register.filter def statehelp(state): "Output help icon with tooltip for state." @@ -505,10 +521,52 @@ def plural(text, seq, arg='s'): else: return text + pluralize(len(seq), arg) + +# Translation table to escape ICS characters. The {} | {} construction builds up a dict +# mapping characters to arbitrary-length strings or None. Values in later dicts override +# earlier ones prior to conversion to a translation table, so excluding a char and then +# mapping it to an escape sequence results in its being escaped, not dropped. +rfc5545_text_escapes = str.maketrans( + # text = *(TSAFE-CHAR / ":" / DQUOTE / ESCAPED-CHAR) + # TSAFE-CHAR = WSP / %x21 / %x23-2B / %x2D-39 / %x3C-5B / + # %x5D-7E / NON-US-ASCII + {chr(c): None for c in range(0x00, 0x20)} # strip 0x00-0x20 + | { + # ESCAPED-CHAR = ("\\" / "\;" / "\," / "\N" / "\n") + "\n": r"\n", + ";": r"\;", + ",": r"\,", + "\\": r"\\", # rhs is two backslashes! + "\t": "\t", # htab ok (0x09) + " ": " ", # space ok (0x20) + } +) + + @register.filter def ics_esc(text): - text = re.sub(r"([\n,;\\])", r"\\\1", text) - return text + """Escape a string to use in an iCalendar text context + + >>> ics_esc('simple') + 'simple' + + For the next tests, it helps to know: + chr(0x09) = "\t" + chr(0x0a) = "\n" + chr(0x0d) = "\r" + chr(0x5c) = "\\" + + >>> ics_esc(f'strips{chr(0x0d)}out{chr(0x0d)}LFs') + 'stripsoutLFs' + + + >>> ics_esc(f'escapes;and,and{chr(0x5c)}and{chr(0x0a)}') + 'escapes\\\\;and\\\\,and\\\\\\\\and\\\\n' + + >>> ics_esc(f"keeps spaces : and{chr(0x09)}tabs") + 'keeps spaces : and\\ttabs' + """ + return text.translate(rfc5545_text_escapes) @register.simple_tag @@ -530,15 +588,22 @@ def ics_date_time(dt, tzname): >>> ics_date_time(datetime.datetime(2022,1,2,3,4,5), 'UTC') ':20220102T030405Z' + >>> ics_date_time(datetime.datetime(2022,1,2,3,4,5), 'GmT') + ':20220102T030405Z' + >>> ics_date_time(datetime.datetime(2022,1,2,3,4,5), 'America/Los_Angeles') ';TZID=America/Los_Angeles:20220102T030405' """ timestamp = dt.strftime('%Y%m%dT%H%M%S') - if tzname.lower() == 'utc': + if tzname.lower() in ('gmt', 'utc'): return f':{timestamp}Z' else: return f';TZID={ics_esc(tzname)}:{timestamp}' +@register.filter +def next_day(value): + return value + datetime.timedelta(days=1) + @register.filter def consensus(doc): @@ -556,7 +621,7 @@ def consensus(doc): @register.filter def std_level_to_label_format(doc): """Returns valid Bootstrap classes to label a status level badge.""" - if doc.is_rfc(): + if doc.type_id == "rfc": if doc.related_that("obs"): return "obs" else: @@ -653,7 +718,7 @@ def rfcbis(s): @stringfilter def urlize(value): raise RuntimeError("Use linkify from textfilters instead of urlize") - + @register.filter @stringfilter def charter_major_rev(rev): @@ -850,10 +915,10 @@ def badgeify(blob): Add an appropriate bootstrap badge around "text", based on its contents. """ config = [ - (r"rejected|not ready", "danger", "x-lg"), + (r"rejected|not ready|serious issues", "danger", "x-lg"), (r"complete|accepted|ready", "success", ""), (r"has nits|almost ready", "info", "info-lg"), - (r"has issues", "warning", "exclamation-lg"), + (r"has issues|on the right track", "warning", "exclamation-lg"), (r"assigned", "info", "person-plus-fill"), (r"will not review|overtaken by events|withdrawn", "secondary", "dash-lg"), (r"no response", "warning", "question-lg"), @@ -876,3 +941,137 @@ def badgeify(blob): ) return text + +@register.filter +def simple_history_delta_changes(history): + """Returns diff between given history and previous entry.""" + prev = history.prev_record + if prev: + delta = history.diff_against(prev) + return delta.changes + return [] + +@register.filter +def simple_history_delta_change_cnt(history): + """Returns number of changes between given history and previous entry.""" + prev = history.prev_record + if prev: + delta = history.diff_against(prev) + return len(delta.changes) + return 0 + +@register.filter +def mtime(path): + """Returns a datetime object representing mtime given a pathlib Path object""" + return datetime.datetime.fromtimestamp(path.stat().st_mtime).astimezone(ZoneInfo(settings.TIME_ZONE)) + +@register.filter +def mtime_is_epoch(path): + return path.stat().st_mtime == 0 + +@register.filter +def url_for_path(path): + """Consructs a 'best' URL for web access to the given pathlib Path object. + + Assumes that the path is into the Internet-Draft archive or the proceedings. + """ + if Path(settings.AGENDA_PATH) in path.parents: + return ( + f"https://www.ietf.org/proceedings/{path.relative_to(settings.AGENDA_PATH)}" + ) + elif any( + [ + pathdir in path.parents + for pathdir in [ + Path(settings.INTERNET_DRAFT_PATH), + Path(settings.INTERNET_DRAFT_ARCHIVE_DIR).parent, + Path(settings.INTERNET_ALL_DRAFTS_ARCHIVE_DIR), + ] + ] + ): + return f"{settings.IETF_ID_ARCHIVE_URL}{path.name}" + else: + return "#" + + +@register.filter +def is_in_stream(doc): + """ + Check if the doc is in one of the states in it stream that + indicate that is actually adopted, i.e., part of the stream. + (There are various "candidate" states that necessitate this + filter.) + """ + if not doc.stream: + return False + stream = doc.stream.slug + state = doc.get_state_slug(f"draft-stream-{doc.stream.slug}") + if not state: + return True + if stream == "ietf": + return state not in ["wg-cand", "c-adopt"] + elif stream == "irtf": + return state != "candidat" + elif stream == "iab": + return state not in ["candidat", "diff-org"] + elif stream == "editorial": + return True + return False + + +@register.filter +def is_doc_ietf_adoptable(doc): + return doc.stream_id is None or all( + [ + doc.stream_id == "ietf", + doc.get_state_slug("draft-stream-ietf") + not in [ + "c-adopt", + "adopt-wg", + "info", + "wg-doc", + "parked", + "dead", + "wg-lc", + "waiting-for-implementation", + "chair-w", + "writeupw", + "sub-pub", + ], + doc.get_state_slug("draft") != "rfc", + doc.became_rfc() is None, + ] + ) + + +@register.filter +def can_issue_ietf_wg_lc(doc): + return all( + [ + doc.stream_id == "ietf", + doc.get_state_slug("draft-stream-ietf") + not in ["wg-cand", "c-adopt", "wg-lc"], + doc.get_state_slug("draft") != "rfc", + doc.became_rfc() is None, + ] + ) + + +@register.filter +def can_submit_to_iesg(doc): + return all( + [ + doc.stream_id == "ietf", + doc.get_state_slug("draft-iesg") == "idexists", + doc.get_state_slug("draft-stream-ietf") not in ["wg-cand", "c-adopt"], + ] + ) + + +@register.filter +def has_had_ietf_wg_lc(doc): + return ( + doc.stream_id == "ietf" + and doc.docevent_set.filter(statedocevent__state__slug="wg-lc").exists() + ) + diff --git a/ietf/doc/templatetags/tests_ietf_filters.py b/ietf/doc/templatetags/tests_ietf_filters.py index f791d61530..b5130849ea 100644 --- a/ietf/doc/templatetags/tests_ietf_filters.py +++ b/ietf/doc/templatetags/tests_ietf_filters.py @@ -3,13 +3,26 @@ from django.conf import settings from ietf.doc.factories import ( - WgDraftFactory, + WgRfcFactory, IndividualDraftFactory, CharterFactory, NewRevisionDocEventFactory, + StatusChangeFactory, + RgDraftFactory, + EditorialDraftFactory, + WgDraftFactory, + ConflictReviewFactory, + BofreqFactory, + StatementFactory, + RfcFactory, +) +from ietf.doc.models import DocEvent +from ietf.doc.templatetags.ietf_filters import ( + urlize_ietf_docs, + is_valid_url, + is_in_stream, + is_unexpected_wg_state, ) -from ietf.doc.models import State, DocEvent, DocAlias -from ietf.doc.templatetags.ietf_filters import urlize_ietf_docs, is_valid_url from ietf.person.models import Person from ietf.utils.test_utils import TestCase @@ -19,29 +32,42 @@ class IetfFiltersTests(TestCase): + def test_is_in_stream(self): + for draft in [ + IndividualDraftFactory(), + CharterFactory(), + StatusChangeFactory(), + ConflictReviewFactory(), + StatementFactory(), + BofreqFactory(), + ]: + self.assertFalse(is_in_stream(draft)) + for draft in [RgDraftFactory(), WgDraftFactory(), EditorialDraftFactory()]: + self.assertTrue(is_in_stream(draft)) + for stream in ["iab", "ietf", "irtf", "ise", "editorial"]: + self.assertTrue(is_in_stream(IndividualDraftFactory(stream_id=stream))) + def test_is_valid_url(self): cases = [(settings.IDTRACKER_BASE_URL, True), ("not valid", False)] for url, result in cases: self.assertEqual(is_valid_url(url), result) def test_urlize_ietf_docs(self): - wg_id = WgDraftFactory() - wg_id.set_state(State.objects.get(type="draft", slug="rfc")) - wg_id.std_level_id = "bcp" - wg_id.save_with_history( + rfc = WgRfcFactory(rfc_number=123456, std_level_id="bcp") + rfc.save_with_history( [ DocEvent.objects.create( - doc=wg_id, - rev=wg_id.rev, + doc=rfc, + rev=rfc.rev, type="published_rfc", by=Person.objects.get(name="(System)"), ) ] ) - DocAlias.objects.create(name="rfc123456").docs.add(wg_id) - DocAlias.objects.create(name="bcp123456").docs.add(wg_id) - DocAlias.objects.create(name="std123456").docs.add(wg_id) - DocAlias.objects.create(name="fyi123456").docs.add(wg_id) + # TODO - bring these into existance when subseries are well modeled + # DocAlias.objects.create(name="bcp123456").docs.add(rfc) + # DocAlias.objects.create(name="std123456").docs.add(rfc) + # DocAlias.objects.create(name="fyi123456").docs.add(rfc) id = IndividualDraftFactory(name="draft-me-rfc123456bis") id_num = IndividualDraftFactory(name="draft-rosen-rfcefdp-update-2026") @@ -59,15 +85,16 @@ def test_urlize_ietf_docs(self): cases = [ ("no change", "no change"), - ("bCp123456", 'bCp123456'), - ("Std 00123456", 'Std 00123456'), - ( - "FyI 0123456 changes std 00123456", - 'FyI 0123456 changes std 00123456', - ), + # TODO: rework subseries when we add them + # ("bCp123456", 'bCp123456'), + # ("Std 00123456", 'Std 00123456'), + # ( + # "FyI 0123456 changes std 00123456", + # 'FyI 0123456 changes std 00123456', + # ), ("rfc123456", 'rfc123456'), ("Rfc 0123456", 'Rfc 0123456'), - (wg_id.name, f'{wg_id.name}'), + (rfc.name, f'{rfc.name}'), ( f"{id.name}-{id.rev}.txt", f'{id.name}-{id.rev}.txt', @@ -149,3 +176,17 @@ def test_urlize_ietf_docs(self): for input, output in cases: # debug.show("(input, urlize_ietf_docs(input), output)") self.assertEqual(urlize_ietf_docs(input), output) + + def test_is_unexpected_wg_state(self): + """ + Test that the unexpected_wg_state function works correctly + """ + # test documents with expected wg states + self.assertFalse(is_unexpected_wg_state(RfcFactory())) + self.assertFalse(is_unexpected_wg_state(WgDraftFactory (states=[('draft-stream-ietf', 'sub-pub')]))) + self.assertFalse(is_unexpected_wg_state(WgDraftFactory (states=[('draft-iesg', 'idexists')]))) + self.assertFalse(is_unexpected_wg_state(WgDraftFactory (states=[('draft-stream-ietf', 'wg-cand'), ('draft-iesg','idexists')]))) + + # test documents with unexpected wg states due to invalid combination of states + self.assertTrue(is_unexpected_wg_state(WgDraftFactory (states=[('draft-stream-ietf', 'wg-cand'), ('draft-iesg','lc-req')]))) + self.assertTrue(is_unexpected_wg_state(WgDraftFactory (states=[('draft-stream-ietf', 'chair-w'), ('draft-iesg','pub-req')]))) diff --git a/ietf/doc/templatetags/wg_menu.py b/ietf/doc/templatetags/wg_menu.py index 76bf7eb4d0..3e8d209448 100644 --- a/ietf/doc/templatetags/wg_menu.py +++ b/ietf/doc/templatetags/wg_menu.py @@ -62,8 +62,6 @@ @register.simple_tag def wg_menu(flavor): - global parents - for p in parents: p.short_name = parent_short_names.get(p.acronym) or p.name if p.short_name.endswith(" Area"): diff --git a/ietf/doc/tests.py b/ietf/doc/tests.py index 106ac17821..f92c9648e6 100644 --- a/ietf/doc/tests.py +++ b/ietf/doc/tests.py @@ -1,13 +1,16 @@ -# Copyright The IETF Trust 2012-2020, All Rights Reserved +# Copyright The IETF Trust 2012-2024, All Rights Reserved # -*- coding: utf-8 -*- import os import datetime import io +from hashlib import sha384 + +from django.http import HttpRequest import lxml import bibtexparser -import mock +from unittest import mock import json import copy import random @@ -16,11 +19,9 @@ from pathlib import Path from pyquery import PyQuery from urllib.parse import urlparse, parse_qs -from tempfile import NamedTemporaryFile from collections import defaultdict from zoneinfo import ZoneInfo -from django.core.management import call_command from django.urls import reverse as urlreverse from django.conf import settings from django.forms import Form @@ -31,20 +32,34 @@ from tastypie.test import ResourceTestCaseMixin +from weasyprint.urls import URLFetchingError + import debug # pyflakes:ignore -from ietf.doc.models import ( Document, DocAlias, DocRelationshipName, RelatedDocument, State, +from ietf.doc.models import ( Document, DocRelationshipName, RelatedDocument, State, DocEvent, BallotPositionDocEvent, LastCallDocEvent, WriteupDocEvent, NewRevisionDocEvent, BallotType, - EditedAuthorsDocEvent ) -from ietf.doc.factories import ( DocumentFactory, DocEventFactory, CharterFactory, - ConflictReviewFactory, WgDraftFactory, IndividualDraftFactory, WgRfcFactory, - IndividualRfcFactory, StateDocEventFactory, BallotPositionDocEventFactory, - BallotDocEventFactory, DocumentAuthorFactory, NewRevisionDocEventFactory, - StatusChangeFactory, BofreqFactory, DocExtResourceFactory, RgDraftFactory) + EditedAuthorsDocEvent, StateType) +from ietf.doc.factories import (DocumentFactory, DocEventFactory, CharterFactory, + ConflictReviewFactory, WgDraftFactory, + IndividualDraftFactory, WgRfcFactory, + IndividualRfcFactory, StateDocEventFactory, + BallotPositionDocEventFactory, + BallotDocEventFactory, DocumentAuthorFactory, + NewRevisionDocEventFactory, + StatusChangeFactory, DocExtResourceFactory, + RgDraftFactory, BcpFactory, RfcAuthorFactory) from ietf.doc.forms import NotifyForm from ietf.doc.fields import SearchableDocumentsField -from ietf.doc.utils import create_ballot_if_not_open, uppercase_std_abbreviated_name -from ietf.doc.views_search import ad_dashboard_group, ad_dashboard_group_type, shorten_group_name # TODO: red flag that we're importing from views in tests. Move these to utils. +from ietf.doc.utils import ( + create_ballot_if_not_open, + investigate_fragment, + uppercase_std_abbreviated_name, + DraftAliasGenerator, + generate_idnits2_rfc_status, + generate_idnits2_rfcs_obsoleted, + get_doc_email_aliases, +) +from ietf.doc.views_doc import get_diff_revisions from ietf.group.models import Group, Role from ietf.group.factories import GroupFactory, RoleFactory from ietf.ipr.factories import HolderIprDisclosureFactory @@ -52,20 +67,22 @@ from ietf.meeting.factories import ( MeetingFactory, SessionFactory, SessionPresentationFactory, ProceedingsMaterialFactory ) -from ietf.name.models import SessionStatusName, BallotPositionName, DocTypeName +from ietf.name.models import SessionStatusName, BallotPositionName, DocTypeName, RoleName from ietf.person.models import Person from ietf.person.factories import PersonFactory, EmailFactory -from ietf.utils.mail import outbox, empty_outbox +from ietf.utils.mail import get_payload_text, outbox, empty_outbox from ietf.utils.test_utils import login_testing_unauthorized, unicontent from ietf.utils.test_utils import TestCase from ietf.utils.text import normalize_text from ietf.utils.timezone import date_today, datetime_today, DEADLINE_TZINFO, RPC_TZINFO +from ietf.doc.utils_search import AD_WORKLOAD class SearchTests(TestCase): def test_search(self): draft = WgDraftFactory(name='draft-ietf-mars-test',group=GroupFactory(acronym='mars',parent=Group.objects.get(acronym='farfut')),authors=[PersonFactory()],ad=PersonFactory()) + rfc = WgRfcFactory() draft.set_state(State.objects.get(used=True, type="draft-iesg", slug="pub-req")) old_draft = IndividualDraftFactory(name='draft-foo-mars-test',authors=[PersonFactory()],title="Optimizing Martian Network Topologies") old_draft.set_state(State.objects.get(used=True, type="draft", slug="expired")) @@ -97,11 +114,12 @@ def test_search(self): self.assertEqual(r.status_code, 200) self.assertContains(r, "draft-foo-mars-test") - # find by rfc/active/inactive - draft.set_state(State.objects.get(type="draft", slug="rfc")) - r = self.client.get(base_url + "?rfcs=on&name=%s" % draft.name) + # find by RFC + r = self.client.get(base_url + "?rfcs=on&name=%s" % rfc.name) self.assertEqual(r.status_code, 200) - self.assertContains(r, draft.title) + self.assertContains(r, rfc.title) + + # find by active/inactive draft.set_state(State.objects.get(type="draft", slug="active")) r = self.client.get(base_url + "?activedrafts=on&name=%s" % draft.name) @@ -154,6 +172,23 @@ def test_search(self): self.assertEqual(r.status_code, 200) self.assertContains(r, draft.title) + def test_search_became_rfc(self): + draft = WgDraftFactory() + rfc = WgRfcFactory() + draft.set_state(State.objects.get(type="draft", slug="rfc")) + draft.relateddocument_set.create(relationship_id="became_rfc", target=rfc) + base_url = urlreverse('ietf.doc.views_search.search') + + # find by RFC + r = self.client.get(base_url + f"?rfcs=on&name={rfc.name}") + self.assertEqual(r.status_code, 200) + self.assertContains(r, rfc.title) + + # find by draft + r = self.client.get(base_url + f"?activedrafts=on&rfcs=on&name={draft.name}") + self.assertEqual(r.status_code, 200) + self.assertContains(r, rfc.title) + def test_search_for_name(self): draft = WgDraftFactory(name='draft-ietf-mars-test',group=GroupFactory(acronym='mars',parent=Group.objects.get(acronym='farfut')),authors=[PersonFactory()],ad=PersonFactory()) draft.set_state(State.objects.get(used=True, type="draft-iesg", slug="pub-req")) @@ -279,52 +314,68 @@ def test_frontpage(self): self.assertContains(r, "Document Search") def test_ad_workload(self): - Role.objects.filter(name_id='ad').delete() - ad = RoleFactory(name_id='ad',group__type_id='area',group__state_id='active',person__name='Example Areadirector').person - doc_type_names = ['bofreq', 'charter', 'conflrev', 'draft', 'statchg'] - expected = defaultdict(lambda :0) - for doc_type_name in doc_type_names: - if doc_type_name=='draft': - states = State.objects.filter(type='draft-iesg', used=True).values_list('slug', flat=True) - else: - states = State.objects.filter(type=doc_type_name, used=True).values_list('slug', flat=True) - - for state in states: - target_num = random.randint(0,2) + Role.objects.filter(name_id="ad").delete() + ad = RoleFactory( + name_id="ad", + group__type_id="area", + group__state_id="active", + person__name="Example Areadirector", + ).person + expected = defaultdict(lambda: 0) + for doc_type_slug in AD_WORKLOAD: + for state in AD_WORKLOAD[doc_type_slug]: + target_num = random.randint(0, 2) for _ in range(target_num): - if doc_type_name == 'draft': - doc = IndividualDraftFactory(ad=ad,states=[('draft-iesg', state),('draft','rfc' if state=='pub' else 'active')]) - elif doc_type_name == 'charter': - doc = CharterFactory(ad=ad, states=[(doc_type_name, state)]) - elif doc_type_name == 'bofreq': - # Note that the view currently doesn't handle bofreqs - doc = BofreqFactory(states=[(doc_type_name, state)], bofreqresponsibledocevent__responsible=[ad]) - elif doc_type_name == 'conflrev': - doc = ConflictReviewFactory(ad=ad, states=State.objects.filter(type_id=doc_type_name, slug=state)) - elif doc_type_name == 'statchg': - doc = StatusChangeFactory(ad=ad, states=State.objects.filter(type_id=doc_type_name, slug=state)) - else: - # Currently unreachable - doc = DocumentFactory(type_id=doc_type_name, ad=ad, states=[(doc_type_name, state)]) - - if not slugify(ad_dashboard_group_type(doc)) in ('document', 'none'): - expected[(slugify(ad_dashboard_group_type(doc)), slugify(ad.full_name_as_key()), slugify(shorten_group_name(ad_dashboard_group(doc))))] += 1 - - url = urlreverse('ietf.doc.views_search.ad_workload') + if ( + doc_type_slug == "draft" + or doc_type_slug == "rfc" + and state == "rfcqueue" + ): + IndividualDraftFactory( + ad=ad, + states=[ + ("draft-iesg", state), + ("draft", "rfc" if state == "pub" else "active"), + ], + ) + elif doc_type_slug == "rfc": + WgRfcFactory.create( + states=[("draft", "rfc"), ("draft-iesg", "pub")] + ) + + elif doc_type_slug == "charter": + CharterFactory(ad=ad, states=[(doc_type_slug, state)]) + elif doc_type_slug == "conflrev": + ConflictReviewFactory( + ad=ad, + states=State.objects.filter( + type_id=doc_type_slug, slug=state + ), + ) + elif doc_type_slug == "statchg": + StatusChangeFactory( + ad=ad, + states=State.objects.filter( + type_id=doc_type_slug, slug=state + ), + ) + self.client.login(username="ad", password="ad+password") + url = urlreverse("ietf.doc.views_search.ad_workload") r = self.client.get(url) self.assertEqual(r.status_code, 200) q = PyQuery(r.content) for group_type, ad, group in expected: - self.assertEqual(int(q(f'#{group_type}-{ad}-{group}').text()),expected[(group_type, ad, group)]) + self.assertEqual( + int(q(f"#{group_type}-{ad}-{group}").text()), + expected[(group_type, ad, group)], + ) def test_docs_for_ad(self): ad = RoleFactory(name_id='ad',group__type_id='area',group__state_id='active').person draft = IndividualDraftFactory(ad=ad) draft.action_holders.set([PersonFactory()]) draft.set_state(State.objects.get(type='draft-iesg', slug='lc')) - rfc = IndividualDraftFactory(ad=ad) - rfc.set_state(State.objects.get(type='draft', slug='rfc')) - DocAlias.objects.create(name='rfc6666').docs.add(rfc) + rfc = IndividualRfcFactory(ad=ad) conflrev = DocumentFactory(type_id='conflrev',ad=ad) conflrev.set_state(State.objects.get(type='conflrev', slug='iesgeval')) statchg = DocumentFactory(type_id='statchg',ad=ad) @@ -348,7 +399,7 @@ def test_docs_for_ad(self): self.assertEqual(r.status_code, 200) self.assertContains(r, draft.name) self.assertContains(r, escape(draft.action_holders.first().name)) - self.assertContains(r, rfc.canonical_name()) + self.assertContains(r, rfc.name) self.assertContains(r, conflrev.name) self.assertContains(r, statchg.name) self.assertContains(r, charter.name) @@ -356,6 +407,30 @@ def test_docs_for_ad(self): self.assertContains(r, discuss_other.doc.name) self.assertContains(r, block_other.doc.name) + def test_docs_for_iesg(self): + ad1 = RoleFactory(name_id='ad',group__type_id='area',group__state_id='active').person + ad2 = RoleFactory(name_id='ad',group__type_id='area',group__state_id='active').person + + draft = IndividualDraftFactory(ad=ad1) + draft.action_holders.set([PersonFactory()]) + draft.set_state(State.objects.get(type='draft-iesg', slug='lc')) + rfc = IndividualRfcFactory(ad=ad2) + conflrev = DocumentFactory(type_id='conflrev',ad=ad1) + conflrev.set_state(State.objects.get(type='conflrev', slug='iesgeval')) + statchg = DocumentFactory(type_id='statchg',ad=ad2) + statchg.set_state(State.objects.get(type='statchg', slug='iesgeval')) + charter = CharterFactory(name='charter-ietf-ames',ad=ad1) + charter.set_state(State.objects.get(type='charter', slug='iesgrev')) + + r = self.client.get(urlreverse('ietf.doc.views_search.docs_for_iesg')) + self.assertEqual(r.status_code, 200) + self.assertContains(r, draft.name) + self.assertContains(r, escape(draft.action_holders.first().name)) + self.assertNotContains(r, rfc.name) + self.assertContains(r, conflrev.name) + self.assertContains(r, statchg.name) + self.assertContains(r, charter.name) + def test_auth48_doc_for_ad(self): """Docs in AUTH48 state should have a decoration""" ad = RoleFactory(name_id='ad', group__type_id='area', group__state_id='active').person @@ -378,17 +453,6 @@ def test_drafts_in_last_call(self): self.assertContains(r, draft.title) self.assertContains(r, escape(draft.action_holders.first().name)) - def test_in_iesg_process(self): - doc_in_process = IndividualDraftFactory() - doc_in_process.action_holders.set([PersonFactory()]) - doc_in_process.set_state(State.objects.get(type='draft-iesg', slug='lc')) - doc_not_in_process = IndividualDraftFactory() - r = self.client.get(urlreverse('ietf.doc.views_search.drafts_in_iesg_process')) - self.assertEqual(r.status_code, 200) - self.assertContains(r, doc_in_process.title) - self.assertContains(r, escape(doc_in_process.action_holders.first().name)) - self.assertNotContains(r, doc_not_in_process.title) - def test_indexes(self): draft = IndividualDraftFactory() rfc = WgRfcFactory() @@ -396,16 +460,17 @@ def test_indexes(self): r = self.client.get(urlreverse('ietf.doc.views_search.index_all_drafts')) self.assertEqual(r.status_code, 200) self.assertContains(r, draft.name) - self.assertContains(r, rfc.canonical_name().upper()) + self.assertContains(r, rfc.name.upper()) r = self.client.get(urlreverse('ietf.doc.views_search.index_active_drafts')) self.assertEqual(r.status_code, 200) self.assertContains(r, draft.title) def test_ajax_search_docs(self): - draft = IndividualDraftFactory() + draft = IndividualDraftFactory(name="draft-ietf-rfc1234bis") + rfc = IndividualRfcFactory(rfc_number=1234) + bcp = IndividualRfcFactory(name="bcp12345", type_id="bcp") - # Document url = urlreverse('ietf.doc.views_search.ajax_select2_search_docs', kwargs={ "model_name": "document", "doc_type": "draft", @@ -415,18 +480,27 @@ def test_ajax_search_docs(self): data = r.json() self.assertEqual(data[0]["id"], draft.pk) - # DocAlias - doc_alias = draft.docalias.first() - url = urlreverse('ietf.doc.views_search.ajax_select2_search_docs', kwargs={ - "model_name": "docalias", - "doc_type": "draft", + "model_name": "document", + "doc_type": "rfc", }) + r = self.client.get(url, dict(q=rfc.name)) + self.assertEqual(r.status_code, 200) + data = r.json() + self.assertEqual(data[0]["id"], rfc.pk) - r = self.client.get(url, dict(q=doc_alias.name)) + url = urlreverse('ietf.doc.views_search.ajax_select2_search_docs', kwargs={ + "model_name": "document", + "doc_type": "all", + }) + r = self.client.get(url, dict(q="1234")) self.assertEqual(r.status_code, 200) data = r.json() - self.assertEqual(data[0]["id"], doc_alias.pk) + self.assertEqual(len(data), 3) + pks = set([data[i]["id"] for i in range(3)]) + self.assertEqual(pks, set([bcp.pk, rfc.pk, draft.pk])) + + def test_recent_drafts(self): # Three drafts to show with various warnings @@ -630,23 +704,22 @@ def setUp(self): def test_document_draft(self): draft = WgDraftFactory(name='draft-ietf-mars-test',rev='01', create_revisions=range(0,2)) - HolderIprDisclosureFactory(docs=[draft]) # Docs for testing relationships. Does not test 'possibly-replaces'. The 'replaced_by' direction # is tested separately below. replaced = IndividualDraftFactory() - draft.relateddocument_set.create(relationship_id='replaces',source=draft,target=replaced.docalias.first()) + draft.relateddocument_set.create(relationship_id='replaces',source=draft,target=replaced) obsoleted = IndividualDraftFactory() - draft.relateddocument_set.create(relationship_id='obs',source=draft,target=obsoleted.docalias.first()) + draft.relateddocument_set.create(relationship_id='obs',source=draft,target=obsoleted) obsoleted_by = IndividualDraftFactory() - obsoleted_by.relateddocument_set.create(relationship_id='obs',source=obsoleted_by,target=draft.docalias.first()) + obsoleted_by.relateddocument_set.create(relationship_id='obs',source=obsoleted_by,target=draft) updated = IndividualDraftFactory() - draft.relateddocument_set.create(relationship_id='updates',source=draft,target=updated.docalias.first()) + draft.relateddocument_set.create(relationship_id='updates',source=draft,target=updated) updated_by = IndividualDraftFactory() - updated_by.relateddocument_set.create(relationship_id='updates',source=obsoleted_by,target=draft.docalias.first()) + updated_by.relateddocument_set.create(relationship_id='updates',source=obsoleted_by,target=draft) - external_resource = DocExtResourceFactory(doc=draft) + DocExtResourceFactory(doc=draft) # these tests aren't testing all attributes yet, feel free to # expand them @@ -657,69 +730,32 @@ def test_document_draft(self): if settings.USER_PREFERENCE_DEFAULTS['full_draft'] == 'off': self.assertContains(r, "Show full document") self.assertNotContains(r, "Deimos street") - self.assertContains(r, replaced.canonical_name()) + self.assertContains(r, replaced.name) self.assertContains(r, replaced.title) - # obs/updates not included until draft is RFC - self.assertNotContains(r, obsoleted.canonical_name()) - self.assertNotContains(r, obsoleted.title) - self.assertNotContains(r, obsoleted_by.canonical_name()) - self.assertNotContains(r, obsoleted_by.title) - self.assertNotContains(r, updated.canonical_name()) - self.assertNotContains(r, updated.title) - self.assertNotContains(r, updated_by.canonical_name()) - self.assertNotContains(r, updated_by.title) - self.assertContains(r, external_resource.value) r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name)) + "?include_text=0") self.assertEqual(r.status_code, 200) self.assertContains(r, "Active Internet-Draft") self.assertContains(r, "Show full document") self.assertNotContains(r, "Deimos street") - self.assertContains(r, replaced.canonical_name()) + self.assertContains(r, replaced.name) self.assertContains(r, replaced.title) - # obs/updates not included until draft is RFC - self.assertNotContains(r, obsoleted.canonical_name()) - self.assertNotContains(r, obsoleted.title) - self.assertNotContains(r, obsoleted_by.canonical_name()) - self.assertNotContains(r, obsoleted_by.title) - self.assertNotContains(r, updated.canonical_name()) - self.assertNotContains(r, updated.title) - self.assertNotContains(r, updated_by.canonical_name()) - self.assertNotContains(r, updated_by.title) r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name)) + "?include_text=foo") self.assertEqual(r.status_code, 200) self.assertContains(r, "Active Internet-Draft") self.assertNotContains(r, "Show full document") self.assertContains(r, "Deimos street") - self.assertContains(r, replaced.canonical_name()) + self.assertContains(r, replaced.name) self.assertContains(r, replaced.title) - # obs/updates not included until draft is RFC - self.assertNotContains(r, obsoleted.canonical_name()) - self.assertNotContains(r, obsoleted.title) - self.assertNotContains(r, obsoleted_by.canonical_name()) - self.assertNotContains(r, obsoleted_by.title) - self.assertNotContains(r, updated.canonical_name()) - self.assertNotContains(r, updated.title) - self.assertNotContains(r, updated_by.canonical_name()) - self.assertNotContains(r, updated_by.title) r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name)) + "?include_text=1") self.assertEqual(r.status_code, 200) self.assertContains(r, "Active Internet-Draft") self.assertNotContains(r, "Show full document") self.assertContains(r, "Deimos street") - self.assertContains(r, replaced.canonical_name()) + self.assertContains(r, replaced.name) self.assertContains(r, replaced.title) - # obs/updates not included until draft is RFC - self.assertNotContains(r, obsoleted.canonical_name()) - self.assertNotContains(r, obsoleted.title) - self.assertNotContains(r, obsoleted_by.canonical_name()) - self.assertNotContains(r, obsoleted_by.title) - self.assertNotContains(r, updated.canonical_name()) - self.assertNotContains(r, updated.title) - self.assertNotContains(r, updated_by.canonical_name()) - self.assertNotContains(r, updated_by.title) self.client.cookies = SimpleCookie({str('full_draft'): str('on')}) r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name))) @@ -727,17 +763,8 @@ def test_document_draft(self): self.assertContains(r, "Active Internet-Draft") self.assertNotContains(r, "Show full document") self.assertContains(r, "Deimos street") - self.assertContains(r, replaced.canonical_name()) + self.assertContains(r, replaced.name) self.assertContains(r, replaced.title) - # obs/updates not included until draft is RFC - self.assertNotContains(r, obsoleted.canonical_name()) - self.assertNotContains(r, obsoleted.title) - self.assertNotContains(r, obsoleted_by.canonical_name()) - self.assertNotContains(r, obsoleted_by.title) - self.assertNotContains(r, updated.canonical_name()) - self.assertNotContains(r, updated.title) - self.assertNotContains(r, updated_by.canonical_name()) - self.assertNotContains(r, updated_by.title) self.client.cookies = SimpleCookie({str('full_draft'): str('off')}) r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name))) @@ -745,17 +772,8 @@ def test_document_draft(self): self.assertContains(r, "Active Internet-Draft") self.assertContains(r, "Show full document") self.assertNotContains(r, "Deimos street") - self.assertContains(r, replaced.canonical_name()) + self.assertContains(r, replaced.name) self.assertContains(r, replaced.title) - # obs/updates not included until draft is RFC - self.assertNotContains(r, obsoleted.canonical_name()) - self.assertNotContains(r, obsoleted.title) - self.assertNotContains(r, obsoleted_by.canonical_name()) - self.assertNotContains(r, obsoleted_by.title) - self.assertNotContains(r, updated.canonical_name()) - self.assertNotContains(r, updated.title) - self.assertNotContains(r, updated_by.canonical_name()) - self.assertNotContains(r, updated_by.title) self.client.cookies = SimpleCookie({str('full_draft'): str('foo')}) r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name))) @@ -764,17 +782,8 @@ def test_document_draft(self): if settings.USER_PREFERENCE_DEFAULTS['full_draft'] == 'off': self.assertContains(r, "Show full document") self.assertNotContains(r, "Deimos street") - self.assertContains(r, replaced.canonical_name()) + self.assertContains(r, replaced.name) self.assertContains(r, replaced.title) - # obs/updates not included until draft is RFC - self.assertNotContains(r, obsoleted.canonical_name()) - self.assertNotContains(r, obsoleted.title) - self.assertNotContains(r, obsoleted_by.canonical_name()) - self.assertNotContains(r, obsoleted_by.title) - self.assertNotContains(r, updated.canonical_name()) - self.assertNotContains(r, updated.title) - self.assertNotContains(r, updated_by.canonical_name()) - self.assertNotContains(r, updated_by.title) r = self.client.get(urlreverse("ietf.doc.views_doc.document_html", kwargs=dict(name=draft.name))) self.assertEqual(r.status_code, 200) @@ -800,16 +809,16 @@ def test_document_draft(self): rfc = WgRfcFactory() rfc.save_with_history([DocEventFactory(doc=rfc)]) (Path(settings.RFC_PATH) / rfc.get_base_name()).touch() - r = self.client.get(urlreverse("ietf.doc.views_doc.document_html", kwargs=dict(name=rfc.canonical_name()))) + r = self.client.get(urlreverse("ietf.doc.views_doc.document_html", kwargs=dict(name=rfc.name))) self.assertEqual(r.status_code, 200) q = PyQuery(r.content) - self.assertEqual(q('title').text(), f'RFC {rfc.rfc_number()} - {rfc.title}') + self.assertEqual(q('title').text(), f'RFC {rfc.rfc_number} - {rfc.title}') # synonyms for the rfc should be redirected to its canonical view - r = self.client.get(urlreverse("ietf.doc.views_doc.document_html", kwargs=dict(name=rfc.rfc_number()))) - self.assertRedirects(r, urlreverse("ietf.doc.views_doc.document_html", kwargs=dict(name=rfc.canonical_name()))) - r = self.client.get(urlreverse("ietf.doc.views_doc.document_html", kwargs=dict(name=f'RFC {rfc.rfc_number()}'))) - self.assertRedirects(r, urlreverse("ietf.doc.views_doc.document_html", kwargs=dict(name=rfc.canonical_name()))) + r = self.client.get(urlreverse("ietf.doc.views_doc.document_html", kwargs=dict(name=rfc.rfc_number))) + self.assertRedirects(r, urlreverse("ietf.doc.views_doc.document_html", kwargs=dict(name=rfc.name))) + r = self.client.get(urlreverse("ietf.doc.views_doc.document_html", kwargs=dict(name=f'RFC {rfc.rfc_number}'))) + self.assertRedirects(r, urlreverse("ietf.doc.views_doc.document_html", kwargs=dict(name=rfc.name))) # expired draft draft.set_state(State.objects.get(type="draft", slug="expired")) @@ -830,46 +839,53 @@ def test_document_draft(self): shepherd_id=draft.shepherd_id, ad_id=draft.ad_id, expires=draft.expires, notify=draft.notify) rel = RelatedDocument.objects.create(source=replacement, - target=draft.docalias.get(name__startswith="draft"), + target=draft, relationship_id="replaces") r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name))) self.assertEqual(r.status_code, 200) self.assertContains(r, "Replaced Internet-Draft") - self.assertContains(r, replacement.canonical_name()) + self.assertContains(r, replacement.name) self.assertContains(r, replacement.title) rel.delete() # draft published as RFC draft.set_state(State.objects.get(type="draft", slug="rfc")) - draft.std_level_id = "bcp" - draft.save_with_history([DocEvent.objects.create(doc=draft, rev=draft.rev, type="published_rfc", by=Person.objects.get(name="(System)"))]) + draft.std_level_id = "ps" + + rfc = WgRfcFactory(group=draft.group, name="rfc123456") + rfc.save_with_history([DocEvent.objects.create(doc=rfc, rev=None, type="published_rfc", by=Person.objects.get(name="(System)"))]) + draft.relateddocument_set.create(relationship_id="became_rfc", target=rfc) - rfc_alias = DocAlias.objects.create(name="rfc123456") - rfc_alias.docs.add(draft) - bcp_alias = DocAlias.objects.create(name="bcp123456") - bcp_alias.docs.add(draft) + obsoleted = IndividualRfcFactory() + rfc.relateddocument_set.create(relationship_id='obs',target=obsoleted) + obsoleted_by = IndividualRfcFactory() + obsoleted_by.relateddocument_set.create(relationship_id='obs',target=rfc) + updated = IndividualRfcFactory() + rfc.relateddocument_set.create(relationship_id='updates',target=updated) + updated_by = IndividualRfcFactory() + updated_by.relateddocument_set.create(relationship_id='updates',target=rfc) + + r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name, rev=draft.rev))) + self.assertEqual(r.status_code, 200) + self.assertContains(r, "This is an older version of an Internet-Draft that was ultimately published as") r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name))) self.assertEqual(r.status_code, 302) - r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=bcp_alias.name))) - self.assertEqual(r.status_code, 302) - r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=rfc_alias.name))) + r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=rfc.name))) self.assertEqual(r.status_code, 200) self.assertContains(r, "RFC 123456") self.assertContains(r, draft.name) - self.assertContains(r, replaced.canonical_name()) - self.assertContains(r, replaced.title) # obs/updates included with RFC - self.assertContains(r, obsoleted.canonical_name()) + self.assertContains(r, obsoleted.name) self.assertContains(r, obsoleted.title) - self.assertContains(r, obsoleted_by.canonical_name()) + self.assertContains(r, obsoleted_by.name) self.assertContains(r, obsoleted_by.title) - self.assertContains(r, updated.canonical_name()) + self.assertContains(r, updated.name) self.assertContains(r, updated.title) - self.assertContains(r, updated_by.canonical_name()) + self.assertContains(r, updated_by.name) self.assertContains(r, updated_by.title) # naked RFC - also weird that we test a PS from the ISE @@ -902,7 +918,7 @@ def test_draft_status_changes(self): draft = WgRfcFactory() status_change_doc = StatusChangeFactory( group=draft.group, - changes_status_of=[('tops', draft.docalias.first())], + changes_status_of=[('tops', draft)], ) status_change_url = urlreverse( 'ietf.doc.views_doc.document_main', @@ -910,7 +926,7 @@ def test_draft_status_changes(self): ) proposed_status_change_doc = StatusChangeFactory( group=draft.group, - changes_status_of=[('tobcp', draft.docalias.first())], + changes_status_of=[('tobcp', draft)], states=[State.objects.get(slug='needshep', type='statchg')], ) proposed_status_change_url = urlreverse( @@ -921,7 +937,7 @@ def test_draft_status_changes(self): r = self.client.get( urlreverse( 'ietf.doc.views_doc.document_main', - kwargs={'name': draft.canonical_name()}, + kwargs={'name': draft.name}, ) ) self.assertEqual(r.status_code, 200) @@ -967,7 +983,7 @@ def test_edit_authors_permissions(self): # Relevant users not authorized to edit authors unauthorized_usernames = [ 'plain', - *[author.user.username for author in draft.authors()], + *[author.user.username for author in draft.author_persons()], draft.group.get_chair().person.user.username, 'ad' ] @@ -982,7 +998,7 @@ def test_edit_authors_permissions(self): self.client.logout() # Try to add an author via POST - still only the secretary should be able to do this. - orig_authors = draft.authors() + orig_authors = draft.author_persons() post_data = self.make_edit_authors_post_data( basis='permission test', authors=draft.documentauthor_set.all(), @@ -1000,12 +1016,12 @@ def test_edit_authors_permissions(self): for username in unauthorized_usernames: login_testing_unauthorized(self, username, url, method='post', request_kwargs=dict(data=post_data)) draft = Document.objects.get(pk=draft.pk) - self.assertEqual(draft.authors(), orig_authors) # ensure draft author list was not modified + self.assertEqual(draft.author_persons(), orig_authors) # ensure draft author list was not modified login_testing_unauthorized(self, 'secretary', url, method='post', request_kwargs=dict(data=post_data)) r = self.client.post(url, post_data) self.assertEqual(r.status_code, 302) draft = Document.objects.get(pk=draft.pk) - self.assertEqual(draft.authors(), orig_authors + [new_auth_person]) + self.assertEqual(draft.author_persons(), orig_authors + [new_auth_person]) def make_edit_authors_post_data(self, basis, authors): """Helper to generate edit_authors POST data for a set of authors""" @@ -1353,8 +1369,8 @@ def test_edit_authors_edit_fields(self): basis=change_reason ) - old_address = draft.authors()[0].email() - new_email = EmailFactory(person=draft.authors()[0], address=f'changed-{old_address}') + old_address = draft.author_persons()[0].email() + new_email = EmailFactory(person=draft.author_persons()[0], address=f'changed-{old_address}') post_data['author-0-email'] = new_email.address post_data['author-1-affiliation'] = 'University of Nowhere' post_data['author-2-country'] = 'Chile' @@ -1387,17 +1403,17 @@ def test_edit_authors_edit_fields(self): country_event = change_events.filter(desc__icontains='changed country').first() self.assertIsNotNone(email_event) - self.assertIn(draft.authors()[0].name, email_event.desc) + self.assertIn(draft.author_persons()[0].name, email_event.desc) self.assertIn(before[0]['email'], email_event.desc) self.assertIn(after[0]['email'], email_event.desc) self.assertIsNotNone(affiliation_event) - self.assertIn(draft.authors()[1].name, affiliation_event.desc) + self.assertIn(draft.author_persons()[1].name, affiliation_event.desc) self.assertIn(before[1]['affiliation'], affiliation_event.desc) self.assertIn(after[1]['affiliation'], affiliation_event.desc) self.assertIsNotNone(country_event) - self.assertIn(draft.authors()[2].name, country_event.desc) + self.assertIn(draft.author_persons()[2].name, country_event.desc) self.assertIn(before[2]['country'], country_event.desc) self.assertIn(after[2]['country'], country_event.desc) @@ -1455,6 +1471,14 @@ def test_document_draft_action_holders_buttons(self, mock_method): """Buttons for action holders should be shown when AD or secretary""" draft = WgDraftFactory() draft.action_holders.set([PersonFactory()]) + other_group = GroupFactory(type_id=draft.group.type_id) + + # create a test RoleName and put it in the docman_roles for the document group + RoleName.objects.create(slug="wrangler", name="Wrangler", used=True) + draft.group.features.docman_roles.append("wrangler") + draft.group.features.save() + wrangler = RoleFactory(group=draft.group, name_id="wrangler").person + wrangler_of_other_group = RoleFactory(group=other_group, name_id="wrangler").person url = urlreverse('ietf.doc.views_doc.document_main', kwargs=dict(name=draft.name)) edit_ah_url = urlreverse('ietf.doc.views_doc.edit_action_holders', kwargs=dict(name=draft.name)) @@ -1487,6 +1511,8 @@ def _run_test(username=None, expect_buttons=False): _run_test(None, False) _run_test('plain', False) + _run_test(wrangler_of_other_group.user.username, False) + _run_test(wrangler.user.username, True) _run_test('ad', True) _run_test('secretary', True) @@ -1501,11 +1527,11 @@ def test_draft_group_link(self): self.assertEqual(r.status_code, 200) self.assert_correct_wg_group_link(r, group) - rfc = WgRfcFactory(name='draft-rfc-document-%s' % group_type_id, group=group) + rfc = WgRfcFactory(group=group) + draft = WgDraftFactory(group=group) + draft.relateddocument_set.create(relationship_id="became_rfc", target=rfc) DocEventFactory.create(doc=rfc, type='published_rfc', time=event_datetime) - # get the rfc name to avoid a redirect - rfc_name = rfc.docalias.filter(name__startswith='rfc').first().name - r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=rfc_name))) + r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=rfc.name))) self.assertEqual(r.status_code, 200) self.assert_correct_wg_group_link(r, group) @@ -1516,14 +1542,33 @@ def test_draft_group_link(self): self.assertEqual(r.status_code, 200) self.assert_correct_non_wg_group_link(r, group) - rfc = WgRfcFactory(name='draft-rfc-document-%s' % group_type_id, group=group) + rfc = WgRfcFactory(group=group) + draft = WgDraftFactory(name='draft-rfc-document-%s'% group_type_id, group=group) + draft.relateddocument_set.create(relationship_id="became_rfc", target=rfc) DocEventFactory.create(doc=rfc, type='published_rfc', time=event_datetime) - # get the rfc name to avoid a redirect - rfc_name = rfc.docalias.filter(name__startswith='rfc').first().name - r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=rfc_name))) + r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=rfc.name))) self.assertEqual(r.status_code, 200) self.assert_correct_non_wg_group_link(r, group) + def test_document_email_authors_button(self): + # rfc not from draft + rfc = WgRfcFactory() + DocEventFactory.create(doc=rfc, type='published_rfc') + url = urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=rfc.name)) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertEqual(len(q('a:contains("Email authors")')), 0, 'Did not expect "Email authors" button') + + # rfc from draft + draft = WgDraftFactory(group=rfc.group) + draft.relateddocument_set.create(relationship_id="became_rfc", target=rfc) + draft.set_state(State.objects.get(used=True, type="draft", slug="rfc")) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertEqual(len(q('a:contains("Email authors")')), 1, 'Expected "Email authors" button') + def test_document_primary_and_history_views(self): IndividualDraftFactory(name='draft-imaginary-independent-submission') ConflictReviewFactory(name='conflict-review-imaginary-irtf-submission') @@ -1621,8 +1666,8 @@ def test_status_change(self): statchg = StatusChangeFactory() r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=statchg.name))) self.assertEqual(r.status_code, 200) - r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=statchg.relateddocument_set.first().target.document))) - self.assertEqual(r.status_code, 302) + r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=statchg.relateddocument_set.first().target))) + self.assertEqual(r.status_code, 200) def test_document_charter(self): CharterFactory(name='charter-ietf-mars') @@ -1669,6 +1714,17 @@ def test_document_material(self): r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=doc.name))) self.assertEqual(r.status_code, 200) + self.assertNotContains(r, "The session for this document was cancelled.") + + SchedulingEvent.objects.create( + session=session, + status_id='canceled', + by = Person.objects.get(user__username="marschairman"), + ) + + r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=doc.name))) + self.assertEqual(r.status_code, 200) + self.assertContains(r, "The session for this document was cancelled.") def test_document_ballot(self): doc = IndividualDraftFactory() @@ -1786,8 +1842,8 @@ def test_document_ballot_needed_positions(self): self.assertNotContains(r, 'more YES or NO') # status change - DocAlias.objects.create(name='rfc9998').docs.add(IndividualDraftFactory()) - DocAlias.objects.create(name='rfc9999').docs.add(IndividualDraftFactory()) + Document.objects.create(name='rfc9998') + Document.objects.create(name='rfc9999') doc = DocumentFactory(type_id='statchg',name='status-change-imaginary-mid-review') iesgeval_pk = str(State.objects.get(slug='iesgeval',type__slug='statchg').pk) empty_outbox() @@ -1800,24 +1856,74 @@ def test_document_ballot_needed_positions(self): self.assertIn('iesg-secretary',outbox[0]['To']) self.assertIn('drafts-eval',outbox[1]['To']) - doc.relateddocument_set.create(target=DocAlias.objects.get(name='rfc9998'),relationship_id='tohist') + doc.relateddocument_set.create(target=Document.objects.get(name='rfc9998'),relationship_id='tohist') r = self.client.get(urlreverse("ietf.doc.views_doc.document_ballot", kwargs=dict(name=doc.name))) self.assertNotContains(r, 'Needs a YES') self.assertNotContains(r, 'more YES or NO') - doc.relateddocument_set.create(target=DocAlias.objects.get(name='rfc9999'),relationship_id='tois') + doc.relateddocument_set.create(target=Document.objects.get(name='rfc9999'),relationship_id='tois') r = self.client.get(urlreverse("ietf.doc.views_doc.document_ballot", kwargs=dict(name=doc.name))) self.assertContains(r, 'more YES or NO') def test_document_json(self): doc = IndividualDraftFactory() - + author = DocumentAuthorFactory(document=doc) + r = self.client.get(urlreverse("ietf.doc.views_doc.document_json", kwargs=dict(name=doc.name))) self.assertEqual(r.status_code, 200) data = r.json() - self.assertEqual(doc.name, data['name']) - self.assertEqual(doc.pages,data['pages']) + self.assertEqual(data["name"], doc.name) + self.assertEqual(data["pages"], doc.pages) + self.assertEqual( + data["authors"], + [ + { + "name": author.person.name, + "email": author.email.address, + "affiliation": author.affiliation, + } + ] + ) + def test_document_json_rfc(self): + doc = IndividualRfcFactory() + old_style_author = DocumentAuthorFactory(document=doc) + url = urlreverse("ietf.doc.views_doc.document_json", kwargs=dict(name=doc.name)) + + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + data = r.json() + self.assertEqual(data["name"], doc.name) + self.assertEqual(data["pages"], doc.pages) + self.assertEqual( + data["authors"], + [ + { + "name": old_style_author.person.name, + "email": old_style_author.email.address, + "affiliation": old_style_author.affiliation, + } + ] + ) + + new_style_author = RfcAuthorFactory(document=doc) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + data = r.json() + self.assertEqual(data["name"], doc.name) + self.assertEqual(data["pages"], doc.pages) + self.assertEqual( + data["authors"], + [ + { + "name": new_style_author.titlepage_name, + "email": new_style_author.email.address, + "affiliation": new_style_author.affiliation, + } + ] + ) + + def test_writeup(self): doc = IndividualDraftFactory(states = [('draft','active'),('draft-iesg','iesg-eva')],) @@ -1852,6 +1958,18 @@ def test_writeup(self): self.assertContains(r, notes.text) self.assertContains(r, rfced_note.text) + def test_diff_revisions(self): + ind_doc = IndividualDraftFactory(create_revisions=range(2)) + wg_doc = WgDraftFactory( + relations=[("replaces", ind_doc)], create_revisions=range(2) + ) + diff_revisions = get_diff_revisions(HttpRequest(), wg_doc.name, wg_doc) + self.assertEqual(len(diff_revisions), 4) + self.assertEqual( + [t[3] for t in diff_revisions], + [f"{n}-{v:02d}" for n in [wg_doc.name, ind_doc.name] for v in [1, 0]], + ) + def test_history(self): doc = IndividualDraftFactory() @@ -1868,15 +1986,14 @@ def test_history(self): self.assertContains(r, e.desc) def test_history_bis_00(self): - rfcname='rfc9090' - rfc = WgRfcFactory(alias2=rfcname) - bis_draft = WgDraftFactory(name='draft-ietf-{}-{}bis'.format(rfc.group.acronym,rfcname)) + rfc = WgRfcFactory(rfc_number=9090) + bis_draft = WgDraftFactory(name='draft-ietf-{}-{}bis'.format(rfc.group.acronym,rfc.name)) url = urlreverse('ietf.doc.views_doc.document_history', kwargs=dict(name=bis_draft.name)) r = self.client.get(url) self.assertEqual(r.status_code, 200) q = PyQuery(unicontent(r)) - attr1='value="{}"'.format(rfcname) + attr1='value="{}"'.format(rfc.name) self.assertEqual(len(q('option['+attr1+'][selected="selected"]')), 1) @@ -1926,7 +2043,7 @@ def test_last_call_feed(self): self.assertContains(r, doc.name) def test_rfc_feed(self): - rfc = WgRfcFactory(alias2__name="rfc9000") + rfc = WgRfcFactory(rfc_number=9000) DocEventFactory(doc=rfc, type="published_rfc") r = self.client.get("/feed/rfc/") self.assertTrue(r.status_code, 200) @@ -1983,76 +2100,91 @@ def _parse_bibtex_response(self, response) -> dict: @override_settings(RFC_EDITOR_INFO_BASE_URL='https://www.rfc-editor.ietf.org/info/') def test_document_bibtex(self): + + for factory in [CharterFactory, BcpFactory, StatusChangeFactory, ConflictReviewFactory]: # Should be extended to all other doc types + doc = factory() + url = urlreverse("ietf.doc.views_doc.document_bibtex", kwargs=dict(name=doc.name)) + r = self.client.get(url) + self.assertEqual(r.status_code, 404) rfc = WgRfcFactory.create( - #other_aliases = ['rfc6020',], - states = [('draft','rfc'),('draft-iesg','pub')], - std_level_id = 'ps', - time = datetime.datetime(2010, 10, 10, tzinfo=ZoneInfo(settings.TIME_ZONE)), - ) - num = rfc.rfc_number() + time=datetime.datetime(2010, 10, 10, tzinfo=ZoneInfo(settings.TIME_ZONE)) + ) + num = rfc.rfc_number DocEventFactory.create( doc=rfc, - type='published_rfc', + type="published_rfc", time=datetime.datetime(2010, 10, 10, tzinfo=RPC_TZINFO), ) # - url = urlreverse('ietf.doc.views_doc.document_bibtex', kwargs=dict(name=rfc.name)) + url = urlreverse("ietf.doc.views_doc.document_bibtex", kwargs=dict(name=rfc.name)) r = self.client.get(url) - entry = self._parse_bibtex_response(r)["rfc%s"%num] - self.assertEqual(entry['series'], 'Request for Comments') - self.assertEqual(entry['number'], num) - self.assertEqual(entry['doi'], '10.17487/RFC%s'%num) - self.assertEqual(entry['year'], '2010') - self.assertEqual(entry['month'].lower()[0:3], 'oct') - self.assertEqual(entry['url'], f'https://www.rfc-editor.ietf.org/info/rfc{num}') + entry = self._parse_bibtex_response(r)["rfc%s" % num] + self.assertEqual(entry["series"], "Request for Comments") + self.assertEqual(int(entry["number"]), num) + self.assertEqual(entry["doi"], "10.17487/RFC%s" % num) + self.assertEqual(entry["year"], "2010") + self.assertEqual(entry["month"].lower()[0:3], "oct") + self.assertEqual(entry["url"], f"https://www.rfc-editor.ietf.org/info/rfc{num}") # - self.assertNotIn('day', entry) - + self.assertNotIn("day", entry) + # test for incorrect case - revision for RFC rfc = WgRfcFactory(name="rfc0000") - url = urlreverse('ietf.doc.views_doc.document_bibtex', kwargs=dict(name=rfc.name, rev='00')) + url = urlreverse( + "ietf.doc.views_doc.document_bibtex", kwargs=dict(name=rfc.name, rev="00") + ) r = self.client.get(url) self.assertEqual(r.status_code, 404) - + april1 = IndividualRfcFactory.create( - stream_id = 'ise', - states = [('draft','rfc'),('draft-iesg','pub')], - std_level_id = 'inf', - time = datetime.datetime(1990, 4, 1, tzinfo=ZoneInfo(settings.TIME_ZONE)), - ) - num = april1.rfc_number() + stream_id="ise", + std_level_id="inf", + time=datetime.datetime(1990, 4, 1, tzinfo=ZoneInfo(settings.TIME_ZONE)), + ) + num = april1.rfc_number DocEventFactory.create( doc=april1, - type='published_rfc', + type="published_rfc", time=datetime.datetime(1990, 4, 1, tzinfo=RPC_TZINFO), ) # - url = urlreverse('ietf.doc.views_doc.document_bibtex', kwargs=dict(name=april1.name)) + url = urlreverse( + "ietf.doc.views_doc.document_bibtex", kwargs=dict(name=april1.name) + ) r = self.client.get(url) - self.assertEqual(r.get('Content-Type'), 'text/plain; charset=utf-8') - entry = self._parse_bibtex_response(r)["rfc%s"%num] - self.assertEqual(entry['series'], 'Request for Comments') - self.assertEqual(entry['number'], num) - self.assertEqual(entry['doi'], '10.17487/RFC%s'%num) - self.assertEqual(entry['year'], '1990') - self.assertEqual(entry['month'].lower()[0:3], 'apr') - self.assertEqual(entry['day'], '1') - self.assertEqual(entry['url'], f'https://www.rfc-editor.ietf.org/info/rfc{num}') - + self.assertEqual(r.get("Content-Type"), "text/plain; charset=utf-8") + entry = self._parse_bibtex_response(r)["rfc%s" % num] + self.assertEqual(entry["series"], "Request for Comments") + self.assertEqual(int(entry["number"]), num) + self.assertEqual(entry["doi"], "10.17487/RFC%s" % num) + self.assertEqual(entry["year"], "1990") + self.assertEqual(entry["month"].lower()[0:3], "apr") + self.assertEqual(entry["day"], "1") + self.assertEqual(entry["url"], f"https://www.rfc-editor.ietf.org/info/rfc{num}") + draft = IndividualDraftFactory.create() - docname = '%s-%s' % (draft.name, draft.rev) - bibname = docname[6:] # drop the 'draft-' prefix - url = urlreverse('ietf.doc.views_doc.document_bibtex', kwargs=dict(name=draft.name)) + docname = "%s-%s" % (draft.name, draft.rev) + bibname = docname[6:] # drop the 'draft-' prefix + url = urlreverse("ietf.doc.views_doc.document_bibtex", kwargs=dict(name=draft.name)) r = self.client.get(url) entry = self._parse_bibtex_response(r)[bibname] - self.assertEqual(entry['note'], 'Work in Progress') - self.assertEqual(entry['number'], docname) - self.assertEqual(entry['year'], str(draft.pub_date().year)) - self.assertEqual(entry['month'].lower()[0:3], draft.pub_date().strftime('%b').lower()) - self.assertEqual(entry['day'], str(draft.pub_date().day)) - self.assertEqual(entry['url'], settings.IDTRACKER_BASE_URL + urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name, rev=draft.rev))) + self.assertEqual(entry["note"], "Work in Progress") + self.assertEqual(entry["number"], docname) + self.assertEqual(entry["year"], str(draft.pub_date().year)) + self.assertEqual( + entry["month"].lower()[0:3], draft.pub_date().strftime("%b").lower() + ) + self.assertEqual(entry["day"], str(draft.pub_date().day)) + self.assertEqual( + entry["url"], + settings.IDTRACKER_BASE_URL + + urlreverse( + "ietf.doc.views_doc.document_main", + kwargs=dict(name=draft.name, rev=draft.rev), + ), + ) # - self.assertNotIn('doi', entry) + self.assertNotIn("doi", entry) def test_document_bibxml(self): draft = IndividualDraftFactory.create() @@ -2083,20 +2215,19 @@ def test_trailing_hypen_digit_name_bibxml(self): class AddCommentTestCase(TestCase): def test_add_comment(self): - draft = WgDraftFactory(name='draft-ietf-mars-test',group__acronym='mars') - url = urlreverse('ietf.doc.views_doc.add_comment', kwargs=dict(name=draft.name)) + draft = WgDraftFactory(name="draft-ietf-mars-test", group__acronym="mars") + url = urlreverse("ietf.doc.views_doc.add_comment", kwargs=dict(name=draft.name)) login_testing_unauthorized(self, "secretary", url) # normal get r = self.client.get(url) self.assertEqual(r.status_code, 200) q = PyQuery(unicontent(r)) - self.assertEqual(len(q('form textarea[name=comment]')), 1) + self.assertEqual(len(q("form textarea[name=comment]")), 1) - # request resurrect events_before = draft.docevent_set.count() mailbox_before = len(outbox) - + r = self.client.post(url, dict(comment="This is a test.")) self.assertEqual(r.status_code, 302) @@ -2104,9 +2235,9 @@ def test_add_comment(self): self.assertEqual("This is a test.", draft.latest_event().desc) self.assertEqual("added_comment", draft.latest_event().type) self.assertEqual(len(outbox), mailbox_before + 1) - self.assertIn("Comment added", outbox[-1]['Subject']) - self.assertIn(draft.name, outbox[-1]['Subject']) - self.assertIn('draft-ietf-mars-test@', outbox[-1]['To']) + self.assertIn("Comment added", outbox[-1]["Subject"]) + self.assertIn(draft.name, outbox[-1]["Subject"]) + self.assertIn("draft-ietf-mars-test@", outbox[-1]["To"]) # Make sure we can also do it as IANA self.client.login(username="iana", password="iana+password") @@ -2115,7 +2246,22 @@ def test_add_comment(self): r = self.client.get(url) self.assertEqual(r.status_code, 200) q = PyQuery(unicontent(r)) - self.assertEqual(len(q('form textarea[name=comment]')), 1) + self.assertEqual(len(q("form textarea[name=comment]")), 1) + + empty_outbox() + rfc = WgRfcFactory() + self.client.login(username="rfc", password="rfc+password") + url = urlreverse("ietf.doc.views_doc.add_comment", kwargs=dict(name=rfc.name)) + r = self.client.post( + url, dict(comment="This is an RFC Editor comment on an RFC.") + ) + self.assertEqual(r.status_code, 302) + + self.assertEqual( + "This is an RFC Editor comment on an RFC.", rfc.latest_event().desc + ) + self.assertEqual(len(outbox), 1) + self.assertIn("This is an RFC Editor comment on an RFC.", get_payload_text(outbox[0])) class TemplateTagTest(TestCase): @@ -2129,7 +2275,7 @@ class ReferencesTest(TestCase): def test_references(self): doc1 = WgDraftFactory(name='draft-ietf-mars-test') - doc2 = IndividualDraftFactory(name='draft-imaginary-independent-submission').docalias.first() + doc2 = IndividualDraftFactory(name='draft-imaginary-independent-submission') RelatedDocument.objects.get_or_create(source=doc1,target=doc2,relationship=DocRelationshipName.objects.get(slug='refnorm')) url = urlreverse('ietf.doc.views_doc.document_references', kwargs=dict(name=doc1.name)) r = self.client.get(url) @@ -2141,124 +2287,169 @@ def test_references(self): self.assertContains(r, doc1.name) class GenerateDraftAliasesTests(TestCase): - def setUp(self): - super().setUp() - self.doc_aliases_file = NamedTemporaryFile(delete=False, mode='w+') - self.doc_aliases_file.close() - self.doc_virtual_file = NamedTemporaryFile(delete=False, mode='w+') - self.doc_virtual_file.close() - self.saved_draft_aliases_path = settings.DRAFT_ALIASES_PATH - self.saved_draft_virtual_path = settings.DRAFT_VIRTUAL_PATH - settings.DRAFT_ALIASES_PATH = self.doc_aliases_file.name - settings.DRAFT_VIRTUAL_PATH = self.doc_virtual_file.name - - def tearDown(self): - settings.DRAFT_ALIASES_PATH = self.saved_draft_aliases_path - settings.DRAFT_VIRTUAL_PATH = self.saved_draft_virtual_path - os.unlink(self.doc_aliases_file.name) - os.unlink(self.doc_virtual_file.name) - super().tearDown() - - def testManagementCommand(self): - a_month_ago = (timezone.now() - datetime.timedelta(30)).astimezone(RPC_TZINFO) - a_month_ago = a_month_ago.replace(hour=0, minute=0, second=0, microsecond=0) - ad = RoleFactory(name_id='ad', group__type_id='area', group__state_id='active').person - shepherd = PersonFactory() - author1 = PersonFactory() - author2 = PersonFactory() - author3 = PersonFactory() - author4 = PersonFactory() - author5 = PersonFactory() - author6 = PersonFactory() - mars = GroupFactory(type_id='wg', acronym='mars') - marschairman = PersonFactory(user__username='marschairman') - mars.role_set.create(name_id='chair', person=marschairman, email=marschairman.email()) - doc1 = IndividualDraftFactory(authors=[author1], shepherd=shepherd.email(), ad=ad) - doc2 = WgDraftFactory(name='draft-ietf-mars-test', group__acronym='mars', authors=[author2], ad=ad) - doc3 = WgRfcFactory.create(name='draft-ietf-mars-finished', group__acronym='mars', authors=[author3], ad=ad, std_level_id='ps', states=[('draft','rfc'),('draft-iesg','pub')], time=a_month_ago) - DocEventFactory.create(doc=doc3, type='published_rfc', time=a_month_ago) - doc4 = WgRfcFactory.create(authors=[author4,author5], ad=ad, std_level_id='ps', states=[('draft','rfc'),('draft-iesg','pub')], time=datetime.datetime(2010,10,10, tzinfo=ZoneInfo(settings.TIME_ZONE))) - DocEventFactory.create(doc=doc4, type='published_rfc', time=datetime.datetime(2010, 10, 10, tzinfo=RPC_TZINFO)) - doc5 = IndividualDraftFactory(authors=[author6]) - - args = [ ] - kwargs = { } - out = io.StringIO() - call_command("generate_draft_aliases", *args, **kwargs, stdout=out, stderr=out) - self.assertFalse(out.getvalue()) - - with open(settings.DRAFT_ALIASES_PATH) as afile: - acontent = afile.read() - self.assertTrue(all([x in acontent for x in [ - 'xfilter-' + doc1.name, - 'xfilter-' + doc1.name + '.ad', - 'xfilter-' + doc1.name + '.authors', - 'xfilter-' + doc1.name + '.shepherd', - 'xfilter-' + doc1.name + '.all', - 'xfilter-' + doc2.name, - 'xfilter-' + doc2.name + '.ad', - 'xfilter-' + doc2.name + '.authors', - 'xfilter-' + doc2.name + '.chairs', - 'xfilter-' + doc2.name + '.all', - 'xfilter-' + doc3.name, - 'xfilter-' + doc3.name + '.ad', - 'xfilter-' + doc3.name + '.authors', - 'xfilter-' + doc3.name + '.chairs', - 'xfilter-' + doc5.name, - 'xfilter-' + doc5.name + '.authors', - 'xfilter-' + doc5.name + '.all', - ]])) - self.assertFalse(all([x in acontent for x in [ - 'xfilter-' + doc1.name + '.chairs', - 'xfilter-' + doc2.name + '.shepherd', - 'xfilter-' + doc3.name + '.shepherd', - 'xfilter-' + doc4.name, - 'xfilter-' + doc5.name + '.shepherd', - 'xfilter-' + doc5.name + '.ad', - ]])) - - with open(settings.DRAFT_VIRTUAL_PATH) as vfile: - vcontent = vfile.read() - self.assertTrue(all([x in vcontent for x in [ - ad.email_address(), - shepherd.email_address(), - marschairman.email_address(), - author1.email_address(), - author2.email_address(), - author3.email_address(), - author6.email_address(), - ]])) - self.assertFalse(all([x in vcontent for x in [ - author4.email_address(), - author5.email_address(), - ]])) - self.assertTrue(all([x in vcontent for x in [ - 'xfilter-' + doc1.name, - 'xfilter-' + doc1.name + '.ad', - 'xfilter-' + doc1.name + '.authors', - 'xfilter-' + doc1.name + '.shepherd', - 'xfilter-' + doc1.name + '.all', - 'xfilter-' + doc2.name, - 'xfilter-' + doc2.name + '.ad', - 'xfilter-' + doc2.name + '.authors', - 'xfilter-' + doc2.name + '.chairs', - 'xfilter-' + doc2.name + '.all', - 'xfilter-' + doc3.name, - 'xfilter-' + doc3.name + '.ad', - 'xfilter-' + doc3.name + '.authors', - 'xfilter-' + doc3.name + '.chairs', - 'xfilter-' + doc5.name, - 'xfilter-' + doc5.name + '.authors', - 'xfilter-' + doc5.name + '.all', - ]])) - self.assertFalse(all([x in vcontent for x in [ - 'xfilter-' + doc1.name + '.chairs', - 'xfilter-' + doc2.name + '.shepherd', - 'xfilter-' + doc3.name + '.shepherd', - 'xfilter-' + doc4.name, - 'xfilter-' + doc5.name + '.shepherd', - 'xfilter-' + doc5.name + '.ad', - ]])) + @override_settings(TOOLS_SERVER="tools.example.org", DRAFT_ALIAS_DOMAIN="draft.example.org") + def test_generator_class(self): + """The DraftAliasGenerator should generate the same lists as the old mgmt cmd""" + a_month_ago = (timezone.now() - datetime.timedelta(30)).astimezone(RPC_TZINFO) + a_month_ago = a_month_ago.replace(hour=0, minute=0, second=0, microsecond=0) + ad = RoleFactory( + name_id="ad", group__type_id="area", group__state_id="active" + ).person + shepherd = PersonFactory() + author1 = PersonFactory() + author2 = PersonFactory() + author3 = PersonFactory() + author4 = PersonFactory() + author5 = PersonFactory() + author6 = PersonFactory() + mars = GroupFactory(type_id="wg", acronym="mars") + marschairman = PersonFactory(user__username="marschairman") + mars.role_set.create( + name_id="chair", person=marschairman, email=marschairman.email() + ) + doc1 = IndividualDraftFactory(authors=[author1], shepherd=shepherd.email(), ad=ad) + doc2 = WgDraftFactory( + name="draft-ietf-mars-test", group__acronym="mars", authors=[author2], ad=ad + ) + doc2.notify = f"{doc2.name}.ad@draft.example.org" + doc2.save() + doc3 = WgDraftFactory.create( + name="draft-ietf-mars-finished", + group__acronym="mars", + authors=[author3], + ad=ad, + std_level_id="ps", + states=[("draft", "rfc"), ("draft-iesg", "pub")], + time=a_month_ago, + ) + rfc3 = WgRfcFactory() + DocEventFactory.create(doc=rfc3, type="published_rfc", time=a_month_ago) + doc3.relateddocument_set.create(relationship_id="became_rfc", target=rfc3) + doc4 = WgDraftFactory.create( + authors=[author4, author5], + ad=ad, + std_level_id="ps", + states=[("draft", "rfc"), ("draft-iesg", "pub")], + time=datetime.datetime(2010, 10, 10, tzinfo=ZoneInfo(settings.TIME_ZONE)), + ) + rfc4 = WgRfcFactory() + DocEventFactory.create( + doc=rfc4, + type="published_rfc", + time=datetime.datetime(2010, 10, 10, tzinfo=RPC_TZINFO), + ) + doc4.relateddocument_set.create(relationship_id="became_rfc", target=rfc4) + doc5 = IndividualDraftFactory(authors=[author6]) + + output = [(alias, alist) for alias, alist in DraftAliasGenerator()] + alias_dict = dict(output) + self.assertEqual(len(alias_dict), len(output)) # no duplicate aliases + expected_dict = { + doc1.name: [author1.email_address()], + doc1.name + ".ad": [ad.email_address()], + doc1.name + ".authors": [author1.email_address()], + doc1.name + ".shepherd": [shepherd.email_address()], + doc1.name + + ".all": [ + author1.email_address(), + ad.email_address(), + shepherd.email_address(), + ], + doc2.name: [author2.email_address()], + doc2.name + ".ad": [ad.email_address()], + doc2.name + ".authors": [author2.email_address()], + doc2.name + ".chairs": [marschairman.email_address()], + doc2.name + ".notify": [ad.email_address()], + doc2.name + + ".all": [ + author2.email_address(), + ad.email_address(), + marschairman.email_address(), + ], + doc3.name: [author3.email_address()], + doc3.name + ".ad": [ad.email_address()], + doc3.name + ".authors": [author3.email_address()], + doc3.name + ".chairs": [marschairman.email_address()], + doc3.name + + ".all": [ + author3.email_address(), + ad.email_address(), + marschairman.email_address(), + ], + doc5.name: [author6.email_address()], + doc5.name + ".authors": [author6.email_address()], + doc5.name + ".all": [author6.email_address()], + } + # Sort lists for comparison + self.assertEqual( + {k: sorted(v) for k, v in alias_dict.items()}, + {k: sorted(v) for k, v in expected_dict.items()}, + ) + + # check single name + output = [(alias, alist) for alias, alist in DraftAliasGenerator(Document.objects.filter(name=doc1.name))] + alias_dict = dict(output) + self.assertEqual(len(alias_dict), len(output)) # no duplicate aliases + expected_dict = { + doc1.name: [author1.email_address()], + doc1.name + ".ad": [ad.email_address()], + doc1.name + ".authors": [author1.email_address()], + doc1.name + ".shepherd": [shepherd.email_address()], + doc1.name + + ".all": [ + author1.email_address(), + ad.email_address(), + shepherd.email_address(), + ], + } + # Sort lists for comparison + self.assertEqual( + {k: sorted(v) for k, v in alias_dict.items()}, + {k: sorted(v) for k, v in expected_dict.items()}, + ) + + @override_settings(TOOLS_SERVER="tools.example.org", DRAFT_ALIAS_DOMAIN="draft.example.org") + def test_get_draft_notify_emails(self): + ad = PersonFactory() + shepherd = PersonFactory() + author = PersonFactory() + doc = DocumentFactory(authors=[author], shepherd=shepherd.email(), ad=ad) + generator = DraftAliasGenerator() + + doc.notify = f"{doc.name}@draft.example.org" + doc.save() + self.assertCountEqual(generator.get_draft_notify_emails(doc), [author.email_address()]) + + doc.notify = f"{doc.name}.ad@draft.example.org" + doc.save() + self.assertCountEqual(generator.get_draft_notify_emails(doc), [ad.email_address()]) + + doc.notify = f"{doc.name}.shepherd@draft.example.org" + doc.save() + self.assertCountEqual(generator.get_draft_notify_emails(doc), [shepherd.email_address()]) + + doc.notify = f"{doc.name}.all@draft.example.org" + doc.save() + self.assertCountEqual( + generator.get_draft_notify_emails(doc), + [ad.email_address(), author.email_address(), shepherd.email_address()] + ) + + doc.notify = f"{doc.name}.notify@draft.example.org" + doc.save() + self.assertCountEqual(generator.get_draft_notify_emails(doc), []) + + doc.notify = f"{doc.name}.ad@somewhere.example.com" + doc.save() + self.assertCountEqual(generator.get_draft_notify_emails(doc), [f"{doc.name}.ad@somewhere.example.com"]) + + doc.notify = f"somebody@example.com, nobody@example.com, {doc.name}.ad@tools.example.org" + doc.save() + self.assertCountEqual( + generator.get_draft_notify_emails(doc), + ["somebody@example.com", "nobody@example.com", ad.email_address()] + ) + class EmailAliasesTests(TestCase): @@ -2267,37 +2458,20 @@ def setUp(self): WgDraftFactory(name='draft-ietf-mars-test',group__acronym='mars') WgDraftFactory(name='draft-ietf-ames-test',group__acronym='ames') RoleFactory(group__type_id='review', group__acronym='yangdoctors', name_id='secr') - self.doc_alias_file = NamedTemporaryFile(delete=False, mode='w+') - self.doc_alias_file.write("""# Generated by hand at 2015-02-12_16:26:45 -virtual.ietf.org anything -draft-ietf-mars-test@ietf.org xfilter-draft-ietf-mars-test -expand-draft-ietf-mars-test@virtual.ietf.org mars-author@example.com, mars-collaborator@example.com -draft-ietf-mars-test.authors@ietf.org xfilter-draft-ietf-mars-test.authors -expand-draft-ietf-mars-test.authors@virtual.ietf.org mars-author@example.mars, mars-collaborator@example.mars -draft-ietf-mars-test.chairs@ietf.org xfilter-draft-ietf-mars-test.chairs -expand-draft-ietf-mars-test.chairs@virtual.ietf.org mars-chair@example.mars -draft-ietf-mars-test.all@ietf.org xfilter-draft-ietf-mars-test.all -expand-draft-ietf-mars-test.all@virtual.ietf.org mars-author@example.mars, mars-collaborator@example.mars, mars-chair@example.mars -draft-ietf-ames-test@ietf.org xfilter-draft-ietf-ames-test -expand-draft-ietf-ames-test@virtual.ietf.org ames-author@example.com, ames-collaborator@example.com -draft-ietf-ames-test.authors@ietf.org xfilter-draft-ietf-ames-test.authors -expand-draft-ietf-ames-test.authors@virtual.ietf.org ames-author@example.ames, ames-collaborator@example.ames -draft-ietf-ames-test.chairs@ietf.org xfilter-draft-ietf-ames-test.chairs -expand-draft-ietf-ames-test.chairs@virtual.ietf.org ames-chair@example.ames -draft-ietf-ames-test.all@ietf.org xfilter-draft-ietf-ames-test.all -expand-draft-ietf-ames-test.all@virtual.ietf.org ames-author@example.ames, ames-collaborator@example.ames, ames-chair@example.ames - -""") - self.doc_alias_file.close() - self.saved_draft_virtual_path = settings.DRAFT_VIRTUAL_PATH - settings.DRAFT_VIRTUAL_PATH = self.doc_alias_file.name - - def tearDown(self): - settings.DRAFT_VIRTUAL_PATH = self.saved_draft_virtual_path - os.unlink(self.doc_alias_file.name) - super().tearDown() - - def testAliases(self): + + + @mock.patch("ietf.doc.views_doc.get_doc_email_aliases") + def testAliases(self, mock_get_aliases): + mock_get_aliases.return_value = [ + {"doc_name": "draft-ietf-mars-test", "alias_type": "", "expansion": "mars-author@example.mars, mars-collaborator@example.mars"}, + {"doc_name": "draft-ietf-mars-test", "alias_type": ".authors", "expansion": "mars-author@example.mars, mars-collaborator@example.mars"}, + {"doc_name": "draft-ietf-mars-test", "alias_type": ".chairs", "expansion": "mars-chair@example.mars"}, + {"doc_name": "draft-ietf-mars-test", "alias_type": ".all", "expansion": "mars-author@example.mars, mars-collaborator@example.mars, mars-chair@example.mars"}, + {"doc_name": "draft-ietf-ames-test", "alias_type": "", "expansion": "ames-author@example.ames, ames-collaborator@example.ames"}, + {"doc_name": "draft-ietf-ames-test", "alias_type": ".authors", "expansion": "ames-author@example.ames, ames-collaborator@example.ames"}, + {"doc_name": "draft-ietf-ames-test", "alias_type": ".chairs", "expansion": "ames-chair@example.ames"}, + {"doc_name": "draft-ietf-ames-test", "alias_type": ".all", "expansion": "ames-author@example.ames, ames-collaborator@example.ames, ames-chair@example.ames"}, + ] PersonFactory(user__username='plain') url = urlreverse('ietf.doc.urls.redirect.document_email', kwargs=dict(name="draft-ietf-mars-test")) r = self.client.get(url) @@ -2307,16 +2481,70 @@ def testAliases(self): login_testing_unauthorized(self, "plain", url) r = self.client.get(url) self.assertEqual(r.status_code, 200) + self.assertEqual(mock_get_aliases.call_args, mock.call()) self.assertTrue(all([x in unicontent(r) for x in ['mars-test@','mars-test.authors@','mars-test.chairs@']])) self.assertTrue(all([x in unicontent(r) for x in ['ames-test@','ames-test.authors@','ames-test.chairs@']])) - def testExpansions(self): + + @mock.patch("ietf.doc.views_doc.get_doc_email_aliases") + def testExpansions(self, mock_get_aliases): + mock_get_aliases.return_value = [ + {"doc_name": "draft-ietf-mars-test", "alias_type": "", "expansion": "mars-author@example.mars, mars-collaborator@example.mars"}, + {"doc_name": "draft-ietf-mars-test", "alias_type": ".authors", "expansion": "mars-author@example.mars, mars-collaborator@example.mars"}, + {"doc_name": "draft-ietf-mars-test", "alias_type": ".chairs", "expansion": "mars-chair@example.mars"}, + {"doc_name": "draft-ietf-mars-test", "alias_type": ".all", "expansion": "mars-author@example.mars, mars-collaborator@example.mars, mars-chair@example.mars"}, + ] url = urlreverse('ietf.doc.views_doc.document_email', kwargs=dict(name="draft-ietf-mars-test")) r = self.client.get(url) + self.assertEqual(mock_get_aliases.call_args, mock.call("draft-ietf-mars-test")) self.assertEqual(r.status_code, 200) self.assertContains(r, 'draft-ietf-mars-test.all@ietf.org') self.assertContains(r, 'iesg_ballot_saved') + + @mock.patch("ietf.doc.utils.DraftAliasGenerator") + def test_get_doc_email_aliases(self, mock_alias_gen_cls): + mock_alias_gen_cls.return_value = [ + ("draft-something-or-other.some-type", ["somebody@example.com"]), + ("draft-something-or-other", ["somebody@example.com"]), + ("draft-nothing-at-all", ["nobody@example.com"]), + ("draft-nothing-at-all.some-type", ["nobody@example.com"]), + ] + # order is important in the response - should be sorted by doc name and otherwise left + # in order + self.assertEqual( + get_doc_email_aliases(), + [ + { + "doc_name": "draft-nothing-at-all", + "alias_type": "", + "expansion": "nobody@example.com", + }, + { + "doc_name": "draft-nothing-at-all", + "alias_type": ".some-type", + "expansion": "nobody@example.com", + }, + { + "doc_name": "draft-something-or-other", + "alias_type": ".some-type", + "expansion": "somebody@example.com", + }, + { + "doc_name": "draft-something-or-other", + "alias_type": "", + "expansion": "somebody@example.com", + }, + ], + ) + self.assertEqual(mock_alias_gen_cls.call_args, mock.call(None)) + # Repeat with a name, no need to re-test that the alias list is actually passed through, just + # check that the DraftAliasGenerator is called correctly + draft = WgDraftFactory() + get_doc_email_aliases(draft.name) + self.assertQuerySetEqual(mock_alias_gen_cls.call_args[0][0], Document.objects.filter(pk=draft.pk)) + + class DocumentMeetingTests(TestCase): def setUp(self): @@ -2339,8 +2567,8 @@ def setUp(self): def test_view_document_meetings(self): doc = IndividualDraftFactory.create() - doc.sessionpresentation_set.create(session=self.inprog,rev=None) - doc.sessionpresentation_set.create(session=self.interim,rev=None) + doc.presentations.create(session=self.inprog,rev=None) + doc.presentations.create(session=self.interim,rev=None) url = urlreverse('ietf.doc.views_doc.all_presentations', kwargs=dict(name=doc.name)) response = self.client.get(url) @@ -2351,8 +2579,8 @@ def test_view_document_meetings(self): self.assertFalse(q('#addsessionsbutton')) self.assertFalse(q("a.btn:contains('Remove document')")) - doc.sessionpresentation_set.create(session=self.past_cutoff,rev=None) - doc.sessionpresentation_set.create(session=self.past,rev=None) + doc.presentations.create(session=self.past_cutoff,rev=None) + doc.presentations.create(session=self.past,rev=None) self.client.login(username="secretary", password="secretary+password") response = self.client.get(url) @@ -2385,41 +2613,72 @@ def test_view_document_meetings(self): self.assertFalse(q("#futuremeets a.btn:contains('Remove document')")) self.assertFalse(q("#pastmeets a.btn:contains('Remove document')")) - def test_edit_document_session(self): + @override_settings(MEETECHO_API_CONFIG="fake settings") + @mock.patch("ietf.doc.views_doc.SlidesManager") + def test_edit_document_session(self, mock_slides_manager_cls): doc = IndividualDraftFactory.create() - sp = doc.sessionpresentation_set.create(session=self.future,rev=None) + sp = doc.presentations.create(session=self.future,rev=None) url = urlreverse('ietf.doc.views_doc.edit_sessionpresentation',kwargs=dict(name='no-such-doc',session_id=sp.session_id)) response = self.client.get(url) self.assertEqual(response.status_code, 404) + self.assertFalse(mock_slides_manager_cls.called) url = urlreverse('ietf.doc.views_doc.edit_sessionpresentation',kwargs=dict(name=doc.name,session_id=0)) response = self.client.get(url) self.assertEqual(response.status_code, 404) + self.assertFalse(mock_slides_manager_cls.called) url = urlreverse('ietf.doc.views_doc.edit_sessionpresentation',kwargs=dict(name=doc.name,session_id=sp.session_id)) response = self.client.get(url) self.assertEqual(response.status_code, 404) + self.assertFalse(mock_slides_manager_cls.called) self.client.login(username=self.other_chair.user.username,password='%s+password'%self.other_chair.user.username) response = self.client.get(url) self.assertEqual(response.status_code, 404) - + self.assertFalse(mock_slides_manager_cls.called) + self.client.login(username=self.group_chair.user.username,password='%s+password'%self.group_chair.user.username) response = self.client.get(url) self.assertEqual(response.status_code, 200) q = PyQuery(response.content) self.assertEqual(2,len(q('select#id_version option'))) + self.assertFalse(mock_slides_manager_cls.called) + # edit draft self.assertEqual(1,doc.docevent_set.count()) response = self.client.post(url,{'version':'00','save':''}) self.assertEqual(response.status_code, 302) - self.assertEqual(doc.sessionpresentation_set.get(pk=sp.pk).rev,'00') + self.assertEqual(doc.presentations.get(pk=sp.pk).rev,'00') self.assertEqual(2,doc.docevent_set.count()) + self.assertFalse(mock_slides_manager_cls.called) + + # editing slides should call Meetecho API + slides = SessionPresentationFactory( + session=self.future, + document__type_id="slides", + document__rev="00", + rev=None, + order=1, + ).document + url = urlreverse( + "ietf.doc.views_doc.edit_sessionpresentation", + kwargs={"name": slides.name, "session_id": self.future.pk}, + ) + response = self.client.post(url, {"version": "00", "save": ""}) + self.assertEqual(response.status_code, 302) + self.assertEqual(mock_slides_manager_cls.call_count, 1) + self.assertEqual(mock_slides_manager_cls.call_args, mock.call(api_config="fake settings")) + self.assertEqual(mock_slides_manager_cls.return_value.send_update.call_count, 1) + self.assertEqual( + mock_slides_manager_cls.return_value.send_update.call_args, + mock.call(self.future), + ) def test_edit_document_session_after_proceedings_closed(self): doc = IndividualDraftFactory.create() - sp = doc.sessionpresentation_set.create(session=self.past_cutoff,rev=None) + sp = doc.presentations.create(session=self.past_cutoff,rev=None) url = urlreverse('ietf.doc.views_doc.edit_sessionpresentation',kwargs=dict(name=doc.name,session_id=sp.session_id)) self.client.login(username=self.group_chair.user.username,password='%s+password'%self.group_chair.user.username) @@ -2432,39 +2691,64 @@ def test_edit_document_session_after_proceedings_closed(self): q=PyQuery(response.content) self.assertEqual(1,len(q(".alert-warning:contains('may affect published proceedings')"))) - def test_remove_document_session(self): + @override_settings(MEETECHO_API_CONFIG="fake settings") + @mock.patch("ietf.doc.views_doc.SlidesManager") + def test_remove_document_session(self, mock_slides_manager_cls): doc = IndividualDraftFactory.create() - sp = doc.sessionpresentation_set.create(session=self.future,rev=None) + sp = doc.presentations.create(session=self.future,rev=None) url = urlreverse('ietf.doc.views_doc.remove_sessionpresentation',kwargs=dict(name='no-such-doc',session_id=sp.session_id)) response = self.client.get(url) self.assertEqual(response.status_code, 404) + self.assertFalse(mock_slides_manager_cls.called) url = urlreverse('ietf.doc.views_doc.remove_sessionpresentation',kwargs=dict(name=doc.name,session_id=0)) response = self.client.get(url) self.assertEqual(response.status_code, 404) + self.assertFalse(mock_slides_manager_cls.called) url = urlreverse('ietf.doc.views_doc.remove_sessionpresentation',kwargs=dict(name=doc.name,session_id=sp.session_id)) response = self.client.get(url) self.assertEqual(response.status_code, 404) + self.assertFalse(mock_slides_manager_cls.called) self.client.login(username=self.other_chair.user.username,password='%s+password'%self.other_chair.user.username) response = self.client.get(url) self.assertEqual(response.status_code, 404) - + self.assertFalse(mock_slides_manager_cls.called) + self.client.login(username=self.group_chair.user.username,password='%s+password'%self.group_chair.user.username) response = self.client.get(url) self.assertEqual(response.status_code, 200) + self.assertFalse(mock_slides_manager_cls.called) + # removing a draft self.assertEqual(1,doc.docevent_set.count()) response = self.client.post(url,{'remove_session':''}) self.assertEqual(response.status_code, 302) - self.assertFalse(doc.sessionpresentation_set.filter(pk=sp.pk).exists()) + self.assertFalse(doc.presentations.filter(pk=sp.pk).exists()) self.assertEqual(2,doc.docevent_set.count()) + self.assertFalse(mock_slides_manager_cls.called) + + # removing slides should call Meetecho API + slides = SessionPresentationFactory(session=self.future, document__type_id="slides", order=1).document + url = urlreverse( + "ietf.doc.views_doc.remove_sessionpresentation", + kwargs={"name": slides.name, "session_id": self.future.pk}, + ) + response = self.client.post(url, {"remove_session": ""}) + self.assertEqual(response.status_code, 302) + self.assertEqual(mock_slides_manager_cls.call_count, 1) + self.assertEqual(mock_slides_manager_cls.call_args, mock.call(api_config="fake settings")) + self.assertEqual(mock_slides_manager_cls.return_value.delete.call_count, 1) + self.assertEqual( + mock_slides_manager_cls.return_value.delete.call_args, + mock.call(self.future, slides), + ) def test_remove_document_session_after_proceedings_closed(self): doc = IndividualDraftFactory.create() - sp = doc.sessionpresentation_set.create(session=self.past_cutoff,rev=None) + sp = doc.presentations.create(session=self.past_cutoff,rev=None) url = urlreverse('ietf.doc.views_doc.remove_sessionpresentation',kwargs=dict(name=doc.name,session_id=sp.session_id)) self.client.login(username=self.group_chair.user.username,password='%s+password'%self.group_chair.user.username) @@ -2477,28 +2761,49 @@ def test_remove_document_session_after_proceedings_closed(self): q=PyQuery(response.content) self.assertEqual(1,len(q(".alert-warning:contains('may affect published proceedings')"))) - def test_add_document_session(self): + @override_settings(MEETECHO_API_CONFIG="fake settings") + @mock.patch("ietf.doc.views_doc.SlidesManager") + def test_add_document_session(self, mock_slides_manager_cls): doc = IndividualDraftFactory.create() url = urlreverse('ietf.doc.views_doc.add_sessionpresentation',kwargs=dict(name=doc.name)) login_testing_unauthorized(self,self.group_chair.user.username,url) response = self.client.get(url) self.assertEqual(response.status_code,200) - + self.assertFalse(mock_slides_manager_cls.called) + response = self.client.post(url,{'session':0,'version':'current'}) self.assertEqual(response.status_code,200) q=PyQuery(response.content) self.assertTrue(q('.form-select.is-invalid')) + self.assertFalse(mock_slides_manager_cls.called) response = self.client.post(url,{'session':self.future.pk,'version':'bogus version'}) self.assertEqual(response.status_code,200) q=PyQuery(response.content) self.assertTrue(q('.form-select.is-invalid')) + self.assertFalse(mock_slides_manager_cls.called) + # adding a draft self.assertEqual(1,doc.docevent_set.count()) response = self.client.post(url,{'session':self.future.pk,'version':'current'}) self.assertEqual(response.status_code,302) self.assertEqual(2,doc.docevent_set.count()) + self.assertEqual(doc.presentations.get(session__pk=self.future.pk).order, 0) + self.assertFalse(mock_slides_manager_cls.called) + + # adding slides should set order / call Meetecho API + slides = DocumentFactory(type_id="slides") + url = urlreverse("ietf.doc.views_doc.add_sessionpresentation", kwargs=dict(name=slides.name)) + response = self.client.post(url, {"session": self.future.pk, "version": "current"}) + self.assertEqual(response.status_code,302) + self.assertEqual(slides.presentations.get(session__pk=self.future.pk).order, 1) + self.assertEqual(mock_slides_manager_cls.call_args, mock.call(api_config="fake settings")) + self.assertEqual(mock_slides_manager_cls.return_value.add.call_count, 1) + self.assertEqual( + mock_slides_manager_cls.return_value.add.call_args, + mock.call(self.future, slides, order=1), + ) def test_get_related_meeting(self): """Should be able to retrieve related meeting""" @@ -2531,60 +2836,6 @@ def test_get_related_meeting(self): self.assertIsNone(doc.get_related_meeting(), f'{doc.type.slug} should not be related to meeting') class ChartTests(ResourceTestCaseMixin, TestCase): - def test_search_chart_conf(self): - doc = IndividualDraftFactory() - - conf_url = urlreverse('ietf.doc.views_stats.chart_conf_newrevisiondocevent') - - # No qurey arguments; expect an empty json object - r = self.client.get(conf_url) - self.assertValidJSONResponse(r) - self.assertEqual(unicontent(r), '{}') - - # No match - r = self.client.get(conf_url + '?activedrafts=on&name=thisisnotadocumentname') - self.assertValidJSONResponse(r) - d = r.json() - self.assertEqual(d['chart']['type'], settings.CHART_TYPE_COLUMN_OPTIONS['chart']['type']) - - r = self.client.get(conf_url + '?activedrafts=on&name=%s'%doc.name[6:12]) - self.assertValidJSONResponse(r) - d = r.json() - self.assertEqual(d['chart']['type'], settings.CHART_TYPE_COLUMN_OPTIONS['chart']['type']) - self.assertEqual(len(d['series'][0]['data']), 0) - - def test_search_chart_data(self): - doc = IndividualDraftFactory() - - data_url = urlreverse('ietf.doc.views_stats.chart_data_newrevisiondocevent') - - # No qurey arguments; expect an empty json list - r = self.client.get(data_url) - self.assertValidJSONResponse(r) - self.assertEqual(unicontent(r), '[]') - - # No match - r = self.client.get(data_url + '?activedrafts=on&name=thisisnotadocumentname') - self.assertValidJSONResponse(r) - d = r.json() - self.assertEqual(unicontent(r), '[]') - - r = self.client.get(data_url + '?activedrafts=on&name=%s'%doc.name[6:12]) - self.assertValidJSONResponse(r) - d = r.json() - self.assertEqual(len(d), 1) - self.assertEqual(len(d[0]), 2) - - def test_search_chart(self): - doc = IndividualDraftFactory() - - chart_url = urlreverse('ietf.doc.views_stats.chart_newrevisiondocevent') - r = self.client.get(chart_url) - self.assertEqual(r.status_code, 200) - - r = self.client.get(chart_url + '?activedrafts=on&name=%s'%doc.name[6:12]) - self.assertEqual(r.status_code, 200) - def test_personal_chart(self): person = PersonFactory.create() IndividualDraftFactory.create( @@ -2691,49 +2942,59 @@ def test_markdown_and_text(self): class Idnits2SupportTests(TestCase): settings_temp_path_overrides = TestCase.settings_temp_path_overrides + ['DERIVED_DIR'] - def test_obsoleted(self): - rfc = WgRfcFactory(alias2__name='rfc1001') - WgRfcFactory(alias2__name='rfc1003',relations=[('obs',rfc)]) - rfc = WgRfcFactory(alias2__name='rfc1005') - WgRfcFactory(alias2__name='rfc1007',relations=[('obs',rfc)]) + def test_generate_idnits2_rfcs_obsoleted(self): + rfc = WgRfcFactory(rfc_number=1001) + WgRfcFactory(rfc_number=1003,relations=[('obs',rfc)]) + rfc = WgRfcFactory(rfc_number=1005) + WgRfcFactory(rfc_number=1007,relations=[('obs',rfc)]) + blob = generate_idnits2_rfcs_obsoleted() + self.assertEqual(blob, b'1001 1003\n1005 1007\n'.decode("utf8")) + def test_obsoleted(self): url = urlreverse('ietf.doc.views_doc.idnits2_rfcs_obsoleted') r = self.client.get(url) self.assertEqual(r.status_code, 404) - call_command('generate_idnits2_rfcs_obsoleted') + # value written is arbitrary, expect it to be passed through + (Path(settings.DERIVED_DIR) / "idnits2-rfcs-obsoleted").write_bytes(b'1001 1003\n1005 1007\n') url = urlreverse('ietf.doc.views_doc.idnits2_rfcs_obsoleted') r = self.client.get(url) self.assertEqual(r.status_code, 200) self.assertEqual(r.content, b'1001 1003\n1005 1007\n') - def test_rfc_status(self): + def test_generate_idnits2_rfc_status(self): for slug in ('bcp', 'ds', 'exp', 'hist', 'inf', 'std', 'ps', 'unkn'): WgRfcFactory(std_level_id=slug) + blob = generate_idnits2_rfc_status().replace("\n", "") + self.assertEqual(blob[6312-1], "O") + + def test_rfc_status(self): url = urlreverse('ietf.doc.views_doc.idnits2_rfc_status') r = self.client.get(url) self.assertEqual(r.status_code,404) - call_command('generate_idnits2_rfc_status') + # value written is arbitrary, expect it to be passed through + (Path(settings.DERIVED_DIR) / "idnits2-rfc-status").write_bytes(b'1001 1003\n1005 1007\n') r = self.client.get(url) self.assertEqual(r.status_code,200) - blob = unicontent(r).replace('\n','') - self.assertEqual(blob[6312-1],'O') + self.assertEqual(r.content, b'1001 1003\n1005 1007\n') def test_idnits2_state(self): rfc = WgRfcFactory() - url = urlreverse('ietf.doc.views_doc.idnits2_state', kwargs=dict(name=rfc.canonical_name())) + draft = WgDraftFactory() + draft.relateddocument_set.create(relationship_id="became_rfc", target=rfc) + url = urlreverse('ietf.doc.views_doc.idnits2_state', kwargs=dict(name=rfc.name)) r = self.client.get(url) self.assertEqual(r.status_code, 200) self.assertContains(r,'rfcnum') draft = WgDraftFactory() - url = urlreverse('ietf.doc.views_doc.idnits2_state', kwargs=dict(name=draft.canonical_name())) + url = urlreverse('ietf.doc.views_doc.idnits2_state', kwargs=dict(name=draft.name)) r = self.client.get(url) self.assertEqual(r.status_code, 200) self.assertNotContains(r,'rfcnum') self.assertContains(r,'Unknown') draft = WgDraftFactory(intended_std_level_id='ps') - url = urlreverse('ietf.doc.views_doc.idnits2_state', kwargs=dict(name=draft.canonical_name())) + url = urlreverse('ietf.doc.views_doc.idnits2_state', kwargs=dict(name=draft.name)) r = self.client.get(url) self.assertEqual(r.status_code, 200) self.assertContains(r,'Proposed') @@ -2778,16 +3039,12 @@ def test_raw_id(self): self.should_succeed(dict(name=draft.name, rev='00',ext='txt')) self.should_404(dict(name=draft.name, rev='00',ext='html')) - def test_raw_id_rfc(self): - rfc = WgRfcFactory() - dir = settings.INTERNET_ALL_DRAFTS_ARCHIVE_DIR - (Path(dir) / f'{rfc.name}-{rfc.rev}.txt').touch() - self.should_succeed(dict(name=rfc.name)) - self.should_404(dict(name=rfc.canonical_name())) + # test_raw_id_rfc intentionally removed + # an rfc is no longer a pseudo-version of a draft. def test_non_draft(self): - charter = CharterFactory() - self.should_404(dict(name=charter.name)) + for doc in [CharterFactory(), WgRfcFactory()]: + self.should_404(dict(name=doc.name)) class PdfizedTests(TestCase): @@ -2806,24 +3063,40 @@ def should_404(self, argdict): r = self.client.get(url) self.assertEqual(r.status_code, 404) + # This takes a _long_ time (32s on a 2022 m1 macbook pro) - is it worth what it covers? def test_pdfized(self): - rfc = WgRfcFactory(create_revisions=range(0,2)) + rfc = WgRfcFactory() + draft = WgDraftFactory(create_revisions=range(0,2)) + draft.relateddocument_set.create(relationship_id="became_rfc", target=rfc) dir = settings.RFC_PATH - with (Path(dir) / f'{rfc.canonical_name()}.txt').open('w') as f: + with (Path(dir) / f'{rfc.name}.txt').open('w') as f: f.write('text content') dir = settings.INTERNET_ALL_DRAFTS_ARCHIVE_DIR for r in range(0,2): - with (Path(dir) / f'{rfc.name}-{r:02d}.txt').open('w') as f: + with (Path(dir) / f'{draft.name}-{r:02d}.txt').open('w') as f: f.write('text content') - self.should_succeed(dict(name=rfc.canonical_name())) + self.assertTrue( + login_testing_unauthorized( + self, + PersonFactory().user.username, + urlreverse(self.view, kwargs={"name": draft.name}), + ) + ) self.should_succeed(dict(name=rfc.name)) + self.should_succeed(dict(name=draft.name)) for r in range(0,2): - self.should_succeed(dict(name=rfc.name,rev=f'{r:02d}')) + self.should_succeed(dict(name=draft.name,rev=f'{r:02d}')) for ext in ('pdf','txt','html','anythingatall'): - self.should_succeed(dict(name=rfc.name,rev=f'{r:02d}',ext=ext)) - self.should_404(dict(name=rfc.name,rev='02')) + self.should_succeed(dict(name=draft.name,rev=f'{r:02d}',ext=ext)) + self.should_404(dict(name=draft.name,rev='02')) + + with mock.patch('ietf.doc.models.DocumentInfo.pdfized', side_effect=URLFetchingError): + url = urlreverse(self.view, kwargs=dict(name=rfc.name)) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + self.assertContains(r, "Error while rendering PDF") class NotifyValidationTests(TestCase): def test_notify_validation(self): @@ -2906,3 +3179,383 @@ def test_gets_request_conflict_review_action_button(self): r = self.client.get(url) self.assertContains(r, target_string) +class DocInfoMethodsTests(TestCase): + + def test_became_rfc(self): + draft = WgDraftFactory() + rfc = WgRfcFactory() + draft.relateddocument_set.create(relationship_id="became_rfc",target=rfc) + self.assertEqual(draft.became_rfc(), rfc) + self.assertEqual(rfc.came_from_draft(), draft) + + charter = CharterFactory() + self.assertIsNone(charter.became_rfc()) + self.assertIsNone(charter.came_from_draft()) + + def test_revisions(self): + draft = WgDraftFactory(rev="09",create_revisions=range(0,10)) + self.assertEqual(draft.revisions_by_dochistory(),[f"{i:02d}" for i in range(0,10)]) + self.assertEqual(draft.revisions_by_newrevisionevent(),[f"{i:02d}" for i in range(0,10)]) + rfc = WgRfcFactory() + self.assertEqual(rfc.revisions_by_newrevisionevent(),[]) + self.assertEqual(rfc.revisions_by_dochistory(),[]) + + draft.history_set.filter(rev__lt="08").delete() + draft.docevent_set.filter(newrevisiondocevent__rev="05").delete() + self.assertEqual(draft.revisions_by_dochistory(),[f"{i:02d}" for i in range(8,10)]) + self.assertEqual(draft.revisions_by_newrevisionevent(),[f"{i:02d}" for i in [*range(0,5), *range(6,10)]]) + + def test_referenced_by_rfcs(self): + # n.b., no significance to the ref* values in this test + referring_draft = WgDraftFactory() + (rfc, referring_rfc) = WgRfcFactory.create_batch(2) + rfc.targets_related.create(relationship_id="refnorm", source=referring_draft) + rfc.targets_related.create(relationship_id="refnorm", source=referring_rfc) + self.assertCountEqual( + rfc.referenced_by_rfcs(), + rfc.targets_related.filter(source=referring_rfc), + ) + + def test_referenced_by_rfcs_as_rfc_or_draft(self): + # n.b., no significance to the ref* values in this test + draft = WgDraftFactory() + rfc = WgRfcFactory() + draft.relateddocument_set.create(relationship_id="became_rfc", target=rfc) + + # Draft referring to the rfc and the draft - should not be reported at all + draft_referring_to_both = WgDraftFactory() + draft_referring_to_both.relateddocument_set.create(relationship_id="refnorm", target=draft) + draft_referring_to_both.relateddocument_set.create(relationship_id="refnorm", target=rfc) + + # RFC referring only to the draft - should be reported for either the draft or the rfc + rfc_referring_to_draft = WgRfcFactory() + rfc_referring_to_draft.relateddocument_set.create(relationship_id="refinfo", target=draft) + + # RFC referring only to the rfc - should be reported only for the rfc + rfc_referring_to_rfc = WgRfcFactory() + rfc_referring_to_rfc.relateddocument_set.create(relationship_id="refinfo", target=rfc) + + # RFC referring only to the rfc - should be reported only for the rfc + rfc_referring_to_rfc = WgRfcFactory() + rfc_referring_to_rfc.relateddocument_set.create(relationship_id="refinfo", target=rfc) + + # RFC referring to the rfc and the draft - should be reported for both + rfc_referring_to_both = WgRfcFactory() + rfc_referring_to_both.relateddocument_set.create(relationship_id="refnorm", target=draft) + rfc_referring_to_both.relateddocument_set.create(relationship_id="refnorm", target=rfc) + + self.assertCountEqual( + draft.referenced_by_rfcs_as_rfc_or_draft(), + draft.targets_related.filter(source__type="rfc"), + ) + + self.assertCountEqual( + rfc.referenced_by_rfcs_as_rfc_or_draft(), + draft.targets_related.filter(source__type="rfc") | rfc.targets_related.filter(source__type="rfc"), + ) + +class StateIndexTests(TestCase): + + def test_state_index(self): + url = urlreverse('ietf.doc.views_help.state_index') + r = self.client.get(url) + q = PyQuery(r.content) + content = [ e.text for e in q('#content table td a ') ] + names = StateType.objects.values_list('slug', flat=True) + # The following doesn't cover all doc types, only a selection + for name in names: + if not '-' in name: + self.assertIn(name, content) + +class InvestigateTests(TestCase): + settings_temp_path_overrides = TestCase.settings_temp_path_overrides + [ + "AGENDA_PATH", + # "INTERNET_DRAFT_PATH", + # "INTERNET_DRAFT_ARCHIVE_DIR", + # "INTERNET_ALL_DRAFTS_ARCHIVE_DIR", + ] + + def setUp(self): + super().setUp() + # Contort the draft archive dir temporary replacement + # to match the "collections" concept + archive_tmp_dir = Path(settings.INTERNET_DRAFT_ARCHIVE_DIR) + new_archive_dir = archive_tmp_dir / "draft-archive" + new_archive_dir.mkdir() + settings.INTERNET_DRAFT_ARCHIVE_DIR = str(new_archive_dir) + donated_personal_copy_dir = archive_tmp_dir / "donated-personal-copy" + donated_personal_copy_dir.mkdir() + meeting_dir = Path(settings.AGENDA_PATH) / "666" + meeting_dir.mkdir() + all_archive_dir = Path(settings.INTERNET_ALL_DRAFTS_ARCHIVE_DIR) + repository_dir = Path(settings.INTERNET_DRAFT_PATH) + + for path in [repository_dir, all_archive_dir]: + (path / "draft-this-is-active-00.txt").touch() + for path in [new_archive_dir, all_archive_dir]: + (path / "draft-old-but-can-authenticate-00.txt").touch() + (path / "draft-has-mixed-provenance-01.txt").touch() + for path in [donated_personal_copy_dir, all_archive_dir]: + (path / "draft-donated-from-a-personal-collection-00.txt").touch() + (path / "draft-has-mixed-provenance-00.txt").touch() + (path / "draft-has-mixed-provenance-00.txt.Z").touch() + (all_archive_dir / "draft-this-should-not-be-possible-00.txt").touch() + (meeting_dir / "draft-this-predates-the-archive-00.txt").touch() + + def test_investigate_fragment(self): + + result = investigate_fragment("this-is-active") + self.assertEqual(len(result["can_verify"]), 1) + self.assertEqual(len(result["unverifiable_collections"]), 0) + self.assertEqual(len(result["unexpected"]), 0) + self.assertEqual( + list(result["can_verify"])[0].name, "draft-this-is-active-00.txt" + ) + + result = investigate_fragment("old-but-can") + self.assertEqual(len(result["can_verify"]), 1) + self.assertEqual(len(result["unverifiable_collections"]), 0) + self.assertEqual(len(result["unexpected"]), 0) + self.assertEqual( + list(result["can_verify"])[0].name, "draft-old-but-can-authenticate-00.txt" + ) + + result = investigate_fragment("predates") + self.assertEqual(len(result["can_verify"]), 1) + self.assertEqual(len(result["unverifiable_collections"]), 0) + self.assertEqual(len(result["unexpected"]), 0) + self.assertEqual( + list(result["can_verify"])[0].name, "draft-this-predates-the-archive-00.txt" + ) + + result = investigate_fragment("personal-collection") + self.assertEqual(len(result["can_verify"]), 0) + self.assertEqual(len(result["unverifiable_collections"]), 1) + self.assertEqual(len(result["unexpected"]), 0) + self.assertEqual( + list(result["unverifiable_collections"])[0].name, + "draft-donated-from-a-personal-collection-00.txt", + ) + + result = investigate_fragment("mixed-provenance") + self.assertEqual(len(result["can_verify"]), 1) + self.assertEqual(len(result["unverifiable_collections"]), 2) + self.assertEqual(len(result["unexpected"]), 0) + self.assertEqual( + list(result["can_verify"])[0].name, "draft-has-mixed-provenance-01.txt" + ) + self.assertEqual( + set([p.name for p in result["unverifiable_collections"]]), + set( + [ + "draft-has-mixed-provenance-00.txt", + "draft-has-mixed-provenance-00.txt.Z", + ] + ), + ) + + result = investigate_fragment("not-be-possible") + self.assertEqual(len(result["can_verify"]), 0) + self.assertEqual(len(result["unverifiable_collections"]), 0) + self.assertEqual(len(result["unexpected"]), 1) + self.assertEqual( + list(result["unexpected"])[0].name, + "draft-this-should-not-be-possible-00.txt", + ) + + @mock.patch("ietf.doc.utils.caches") + def test_investigate_fragment_cache(self, mock_caches): + """investigate_fragment should cache its result""" + mock_default_cache = mock_caches["default"] + mock_default_cache.get.return_value = None # disable cache + result = investigate_fragment("this-is-active") + self.assertEqual(len(result["can_verify"]), 1) + self.assertEqual(len(result["unverifiable_collections"]), 0) + self.assertEqual(len(result["unexpected"]), 0) + self.assertEqual( + list(result["can_verify"])[0].name, "draft-this-is-active-00.txt" + ) + self.assertTrue(mock_default_cache.get.called) + self.assertTrue(mock_default_cache.set.called) + expected_key = f"investigate_fragment:{sha384(b'this-is-active').hexdigest()}" + self.assertEqual(mock_default_cache.set.call_args.kwargs["key"], expected_key) + cached_value = mock_default_cache.set.call_args.kwargs["value"] # hang on to this + mock_default_cache.reset_mock() + + # Check that a cached value is used + mock_default_cache.get.return_value = cached_value + with mock.patch("ietf.doc.utils.Path") as mock_path: + result = investigate_fragment("this-is-active") + # Check that we got the same results + self.assertEqual(len(result["can_verify"]), 1) + self.assertEqual(len(result["unverifiable_collections"]), 0) + self.assertEqual(len(result["unexpected"]), 0) + self.assertEqual( + list(result["can_verify"])[0].name, "draft-this-is-active-00.txt" + ) + # And that we used the cache + self.assertFalse(mock_path.called) # a proxy for "did the method do any real work" + self.assertTrue(mock_default_cache.get.called) + self.assertEqual(mock_default_cache.get.call_args, mock.call(expected_key)) + + def test_investigate_get(self): + """GET with no querystring should retrieve the investigate UI""" + url = urlreverse("ietf.doc.views_doc.investigate") + login_testing_unauthorized(self, "secretary", url) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertEqual(len(q("form#investigate")), 1) + self.assertEqual(len(q("div#results")), 0) + + @mock.patch("ietf.doc.views_doc.AsyncResult") + def test_investgate_get_task_id(self, mock_asyncresult): + """GET with querystring should lookup task status""" + url = urlreverse("ietf.doc.views_doc.investigate") + login_testing_unauthorized(self, "secretary", url) + mock_asyncresult.return_value.ready.return_value = True + r = self.client.get(url + "?id=a-task-id") + self.assertEqual(r.status_code, 200) + self.assertEqual(r.json(), {"status": "ready"}) + self.assertTrue(mock_asyncresult.called) + self.assertEqual(mock_asyncresult.call_args, mock.call("a-task-id")) + mock_asyncresult.reset_mock() + + mock_asyncresult.return_value.ready.return_value = False + r = self.client.get(url + "?id=a-task-id") + self.assertEqual(r.status_code, 200) + self.assertEqual(r.json(), {"status": "notready"}) + self.assertTrue(mock_asyncresult.called) + self.assertEqual(mock_asyncresult.call_args, mock.call("a-task-id")) + + @mock.patch("ietf.doc.views_doc.investigate_fragment_task") + def test_investigate_post(self, mock_investigate_fragment_task): + """POST with a name_fragment and no task_id should start a celery task""" + url = urlreverse("ietf.doc.views_doc.investigate") + login_testing_unauthorized(self, "secretary", url) + + # test some invalid cases + r = self.client.post(url, {"name_fragment": "short"}) # limit is >= 8 characters + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertEqual(len(q("#id_name_fragment.is-invalid")), 1) + self.assertFalse(mock_investigate_fragment_task.delay.called) + for char in ["*", "%", "/", "\\"]: + r = self.client.post(url, {"name_fragment": f"bad{char}character"}) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertEqual(len(q("#id_name_fragment.is-invalid")), 1) + self.assertFalse(mock_investigate_fragment_task.delay.called) + + # now a valid one + mock_investigate_fragment_task.delay.return_value.id = "a-task-id" + r = self.client.post(url, {"name_fragment": "this-is-a-valid-fragment"}) + self.assertEqual(r.status_code, 200) + self.assertTrue(mock_investigate_fragment_task.delay.called) + self.assertEqual(mock_investigate_fragment_task.delay.call_args, mock.call("this-is-a-valid-fragment")) + self.assertEqual(r.json(), {"id": "a-task-id"}) + + @mock.patch("ietf.doc.views_doc.AsyncResult") + def test_investigate_post_task_id(self, mock_asyncresult): + """POST with name_fragment and task_id should retrieve results""" + url = urlreverse("ietf.doc.views_doc.investigate") + login_testing_unauthorized(self, "secretary", url) + + # First, test a non-successful result - this could be a failure or non-existent task id + mock_result = mock_asyncresult.return_value + mock_result.successful.return_value = False + r = self.client.post(url, {"name_fragment": "some-fragment", "task_id": "a-task-id"}) + self.assertContains(r, "The investigation task failed.", status_code=200) + self.assertTrue(mock_asyncresult.called) + self.assertEqual(mock_asyncresult.call_args, mock.call("a-task-id")) + self.assertFalse(mock_result.get.called) + mock_asyncresult.reset_mock() + q = PyQuery(r.content) + self.assertEqual(q("#id_name_fragment").val(), "some-fragment") + self.assertEqual(q("#id_task_id").val(), "a-task-id") + + # now the various successful result mixes + mock_result = mock_asyncresult.return_value + mock_result.successful.return_value = True + mock_result.get.return_value = { + "name_fragment": "different-fragment", + "results": { + "can_verify": set(), + "unverifiable_collections": set(), + "unexpected": set(), + } + } + r = self.client.post(url, {"name_fragment": "some-fragment", "task_id": "a-task-id"}) + self.assertEqual(r.status_code, 200) + self.assertTrue(mock_asyncresult.called) + self.assertEqual(mock_asyncresult.call_args, mock.call("a-task-id")) + mock_asyncresult.reset_mock() + q = PyQuery(r.content) + self.assertEqual(q("#id_name_fragment").val(), "different-fragment", "name_fragment should be reset") + self.assertEqual(q("#id_task_id").val(), "", "task_id should be cleared") + self.assertEqual(len(q("div#results")), 1) + self.assertEqual(len(q("table#authenticated")), 0) + self.assertEqual(len(q("table#unverifiable")), 0) + self.assertEqual(len(q("table#unexpected")), 0) + + # This file was created in setUp. It allows the view to render properly + # but its location / content don't matter for this test otherwise. + a_file_that_exists = Path(settings.INTERNET_DRAFT_PATH) / "draft-this-is-active-00.txt" + + mock_result.get.return_value = { + "name_fragment": "different-fragment", + "results": { + "can_verify": {a_file_that_exists}, + "unverifiable_collections": {a_file_that_exists}, + "unexpected": set(), + } + } + r = self.client.post(url, {"name_fragment": "some-fragment", "task_id": "a-task-id"}) + self.assertEqual(r.status_code, 200) + self.assertTrue(mock_asyncresult.called) + self.assertEqual(mock_asyncresult.call_args, mock.call("a-task-id")) + mock_asyncresult.reset_mock() + q = PyQuery(r.content) + self.assertEqual(q("#id_name_fragment").val(), "different-fragment", "name_fragment should be reset") + self.assertEqual(q("#id_task_id").val(), "", "task_id should be cleared") + self.assertEqual(len(q("div#results")), 1) + self.assertEqual(len(q("table#authenticated")), 1) + self.assertEqual(len(q("table#unverifiable")), 1) + self.assertEqual(len(q("table#unexpected")), 0) + + mock_result.get.return_value = { + "name_fragment": "different-fragment", + "results": { + "can_verify": set(), + "unverifiable_collections": set(), + "unexpected": {a_file_that_exists}, + } + } + r = self.client.post(url, {"name_fragment": "some-fragment", "task_id": "a-task-id"}) + self.assertEqual(r.status_code, 200) + self.assertTrue(mock_asyncresult.called) + self.assertEqual(mock_asyncresult.call_args, mock.call("a-task-id")) + mock_asyncresult.reset_mock() + q = PyQuery(r.content) + self.assertEqual(q("#id_name_fragment").val(), "different-fragment", "name_fragment should be reset") + self.assertEqual(q("#id_task_id").val(), "", "task_id should be cleared") + self.assertEqual(len(q("div#results")), 1) + self.assertEqual(len(q("table#authenticated")), 0) + self.assertEqual(len(q("table#unverifiable")), 0) + self.assertEqual(len(q("table#unexpected")), 1) + + +class LogIOErrorTests(TestCase): + + def test_doc_text_io_error(self): + + d = IndividualDraftFactory() + + with mock.patch("ietf.doc.models.Path") as path_cls_mock: + with mock.patch("ietf.doc.models.log.log") as log_mock: + path_cls_mock.return_value.exists.return_value = True + path_cls_mock.return_value.open.return_value.__enter__.return_value.read.side_effect = IOError("Bad things happened") + text = d.text() + self.assertIsNone(text) + self.assertTrue(log_mock.called) + self.assertIn("Bad things happened", log_mock.call_args[0][0]) diff --git a/ietf/doc/tests_ballot.py b/ietf/doc/tests_ballot.py index 8a4717c748..8420e411e2 100644 --- a/ietf/doc/tests_ballot.py +++ b/ietf/doc/tests_ballot.py @@ -3,7 +3,7 @@ import datetime -import mock +from unittest import mock from pyquery import PyQuery @@ -17,28 +17,37 @@ from ietf.doc.models import (Document, State, DocEvent, BallotPositionDocEvent, LastCallDocEvent, WriteupDocEvent, TelechatDocEvent) from ietf.doc.factories import (DocumentFactory, IndividualDraftFactory, IndividualRfcFactory, WgDraftFactory, - BallotPositionDocEventFactory, BallotDocEventFactory, IRSGBallotDocEventFactory) + BallotPositionDocEventFactory, BallotDocEventFactory, IRSGBallotDocEventFactory, RgDraftFactory) from ietf.doc.templatetags.ietf_filters import can_defer from ietf.doc.utils import create_ballot_if_not_open +from ietf.doc.views_ballot import parse_ballot_edit_return_point from ietf.doc.views_doc import document_ballot_content from ietf.group.models import Group, Role from ietf.group.factories import GroupFactory, RoleFactory, ReviewTeamFactory from ietf.ipr.factories import HolderIprDisclosureFactory -from ietf.name.models import BallotPositionName from ietf.iesg.models import TelechatDate -from ietf.person.models import Person, PersonalApiKey -from ietf.person.factories import PersonFactory +from ietf.person.models import Person +from ietf.person.factories import PersonFactory, PersonalApiKeyFactory from ietf.person.utils import get_active_ads from ietf.utils.test_utils import TestCase, login_testing_unauthorized from ietf.utils.mail import outbox, empty_outbox, get_payload_text from ietf.utils.text import unwrap -from ietf.utils.timezone import date_today +from ietf.utils.timezone import date_today, datetime_today class EditPositionTests(TestCase): + + # N.B. This test needs to be rewritten to exercise all types of ballots (iesg, irsg, rsab) + # and test against the output of the mailtriggers instead of looking for hardcoded values + # in the To and CC results. See #7864 def test_edit_position(self): ad = Person.objects.get(user__username="ad") - draft = IndividualDraftFactory(ad=ad,stream_id='ietf') + draft = WgDraftFactory( + ad=ad, + stream_id="ietf", + notify="somebody@example.com", + group__acronym="mars", + ) ballot = create_ballot_if_not_open(None, draft, ad, 'approve') url = urlreverse('ietf.doc.views_ballot.edit_position', kwargs=dict(name=draft.name, ballot_id=ballot.pk)) @@ -54,11 +63,20 @@ def test_edit_position(self): self.assertEqual(len(q('form textarea[name=comment]')), 1) # vote + empty_outbox() events_before = draft.docevent_set.count() - - r = self.client.post(url, dict(position="discuss", - discuss=" This is a discussion test. \n ", - comment=" This is a test. \n ")) + + r = self.client.post( + url, + dict( + position="discuss", + discuss=" This is a discussion test. \n ", + comment=" This is a test. \n ", + additional_cc="test298347@example.com", + cc_choices=["doc_notify", "doc_group_chairs"], + send_mail=1, + ), + ) self.assertEqual(r.status_code, 302) pos = draft.latest_event(BallotPositionDocEvent, balloter=ad) @@ -69,6 +87,22 @@ def test_edit_position(self): self.assertTrue(pos.comment_time != None) self.assertTrue("New position" in pos.desc) self.assertEqual(draft.docevent_set.count(), events_before + 3) + self.assertEqual(len(outbox),1) + m = outbox[0] + self.assertTrue("COMMENT" in m['Subject']) + self.assertTrue("DISCUSS" in m['Subject']) + self.assertTrue(draft.name in m['Subject']) + self.assertTrue("This is a discussion test." in str(m)) + self.assertTrue("This is a test" in str(m)) + self.assertTrue("iesg@" in m['To']) + # cc_choice doc_group_chairs + self.assertTrue("mars-chairs@" in m['Cc']) + # cc_choice doc_notify + self.assertTrue("somebody@example.com" in m['Cc']) + # cc_choice doc_group_email_list was not selected + self.assertFalse(draft.group.list_email in m['Cc']) + # extra-cc + self.assertTrue("test298347@example.com" in m['Cc']) # recast vote events_before = draft.docevent_set.count() @@ -110,7 +144,7 @@ def test_api_set_position(self): create_ballot_if_not_open(None, draft, ad, 'approve') ad.user.last_login = timezone.now() ad.user.save() - apikey = PersonalApiKey.objects.create(endpoint=url, person=ad) + apikey = PersonalApiKeyFactory(endpoint=url, person=ad) # vote events_before = draft.docevent_set.count() @@ -229,61 +263,6 @@ def test_cannot_edit_position_as_pre_ad(self): r = self.client.post(url, dict(position="discuss", discuss="Test discuss text")) self.assertEqual(r.status_code, 403) - def test_send_ballot_comment(self): - ad = Person.objects.get(user__username="ad") - draft = WgDraftFactory(ad=ad,group__acronym='mars') - draft.notify = "somebody@example.com" - draft.save_with_history([DocEvent.objects.create(doc=draft, rev=draft.rev, type="changed_document", by=Person.objects.get(user__username="secretary"), desc="Test")]) - - ballot = create_ballot_if_not_open(None, draft, ad, 'approve') - - BallotPositionDocEvent.objects.create( - doc=draft, rev=draft.rev, type="changed_ballot_position", - by=ad, balloter=ad, ballot=ballot, pos=BallotPositionName.objects.get(slug="discuss"), - discuss="This draft seems to be lacking a clearer title?", - discuss_time=timezone.now(), - comment="Test!", - comment_time=timezone.now()) - - url = urlreverse('ietf.doc.views_ballot.send_ballot_comment', kwargs=dict(name=draft.name, - ballot_id=ballot.pk)) - login_testing_unauthorized(self, "ad", url) - - # normal get - r = self.client.get(url) - self.assertEqual(r.status_code, 200) - q = PyQuery(r.content) - self.assertTrue(len(q('form input[name="extra_cc"]')) > 0) - - # send - mailbox_before = len(outbox) - - r = self.client.post(url, dict(extra_cc="test298347@example.com", cc_choices=['doc_notify','doc_group_chairs'])) - self.assertEqual(r.status_code, 302) - - self.assertEqual(len(outbox), mailbox_before + 1) - m = outbox[-1] - self.assertTrue("COMMENT" in m['Subject']) - self.assertTrue("DISCUSS" in m['Subject']) - self.assertTrue(draft.name in m['Subject']) - self.assertTrue("clearer title" in str(m)) - self.assertTrue("Test!" in str(m)) - self.assertTrue("iesg@" in m['To']) - # cc_choice doc_group_chairs - self.assertTrue("mars-chairs@" in m['Cc']) - # cc_choice doc_notify - self.assertTrue("somebody@example.com" in m['Cc']) - # cc_choice doc_group_email_list was not selected - self.assertFalse(draft.group.list_email in m['Cc']) - # extra-cc - self.assertTrue("test298347@example.com" in m['Cc']) - - r = self.client.post(url, dict(cc="")) - self.assertEqual(r.status_code, 302) - self.assertEqual(len(outbox), mailbox_before + 2) - m = outbox[-1] - self.assertTrue("iesg@" in m['To']) - self.assertFalse(m['Cc'] and draft.group.list_email in m['Cc']) class BallotWriteupsTests(TestCase): @@ -356,7 +335,7 @@ def test_request_last_call(self): self.assertTrue('aread@' in outbox[-1]['Cc']) def test_edit_ballot_writeup(self): - draft = IndividualDraftFactory(states=[('draft','active'),('draft-iesg','iesg-eva')]) + draft = IndividualDraftFactory(states=[('draft','active'),('draft-iesg','iesg-eva')], stream_id='ietf') url = urlreverse('ietf.doc.views_ballot.ballot_writeupnotes', kwargs=dict(name=draft.name)) login_testing_unauthorized(self, "secretary", url) @@ -386,8 +365,25 @@ def test_edit_ballot_writeup(self): self.assertTrue("This is a simple test" in d.latest_event(WriteupDocEvent, type="changed_ballot_writeup_text").text) self.assertTrue('iesg-eva' == d.get_state_slug('draft-iesg')) + def test_edit_ballot_writeup_unauthorized_stream(self): + # Test that accessing a document from unauthorized (irtf) stream returns a 404 error + draft = RgDraftFactory() + url = urlreverse('ietf.doc.views_ballot.ballot_writeupnotes', kwargs=dict(name=draft.name)) + login_testing_unauthorized(self, "ad", url) + + r = self.client.get(url) + self.assertEqual(r.status_code, 404) + + def test_edit_ballot_writeup_invalid_name(self): + # Test that accessing a non-existent document returns a 404 error + url = urlreverse('ietf.doc.views_ballot.ballot_writeupnotes', kwargs=dict(name="invalid_name")) + login_testing_unauthorized(self, "ad", url) + + r = self.client.get(url) + self.assertEqual(r.status_code, 404) + def test_edit_ballot_writeup_already_approved(self): - draft = IndividualDraftFactory(states=[('draft','active'),('draft-iesg','approved')]) + draft = IndividualDraftFactory(states=[('draft','active'),('draft-iesg','approved')], stream_id='ietf') url = urlreverse('ietf.doc.views_ballot.ballot_writeupnotes', kwargs=dict(name=draft.name)) login_testing_unauthorized(self, "secretary", url) @@ -461,7 +457,7 @@ def test_edit_ballot_rfceditornote(self): def test_issue_ballot(self): ad = Person.objects.get(user__username="ad") for case in ('none','past','future'): - draft = IndividualDraftFactory(ad=ad) + draft = IndividualDraftFactory(ad=ad, stream_id='ietf') if case in ('past','future'): LastCallDocEvent.objects.create( by=Person.objects.get(name='(System)'), @@ -500,7 +496,7 @@ def test_issue_ballot(self): def test_issue_ballot_auto_state_change(self): ad = Person.objects.get(user__username="ad") - draft = IndividualDraftFactory(ad=ad, states=[('draft','active'),('draft-iesg','writeupw')]) + draft = IndividualDraftFactory(ad=ad, states=[('draft','active'),('draft-iesg','writeupw')], stream_id='ietf') url = urlreverse('ietf.doc.views_ballot.ballot_writeupnotes', kwargs=dict(name=draft.name)) login_testing_unauthorized(self, "secretary", url) @@ -524,11 +520,12 @@ def test_issue_ballot_auto_state_change(self): def test_issue_ballot_warn_if_early(self): ad = Person.objects.get(user__username="ad") - draft = IndividualDraftFactory(ad=ad, states=[('draft','active'),('draft-iesg','lc')]) + draft = IndividualDraftFactory(ad=ad, states=[('draft','active'),('draft-iesg','lc')], stream_id='ietf') url = urlreverse('ietf.doc.views_ballot.ballot_writeupnotes', kwargs=dict(name=draft.name)) login_testing_unauthorized(self, "secretary", url) # expect warning about issuing a ballot before IETF Last Call is done + # No last call has yet been issued r = self.client.get(url) self.assertEqual(r.status_code, 200) q = PyQuery(r.content) @@ -536,6 +533,38 @@ def test_issue_ballot_warn_if_early(self): self.assertTrue(q('[class=text-danger]:contains("not completed IETF Last Call")')) self.assertTrue(q('[type=submit]:contains("Save")')) + # Last call exists but hasn't expired + LastCallDocEvent.objects.create( + doc=draft, + expires=datetime_today()+datetime.timedelta(days=14), + by=Person.objects.get(name="(System)") + ) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertTrue(q('[class=text-danger]:contains("not completed IETF Last Call")')) + + # Last call exists and has expired + LastCallDocEvent.objects.filter(doc=draft).update(expires=datetime_today()-datetime.timedelta(days=2)) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertFalse(q('[class=text-danger]:contains("not completed IETF Last Call")')) + + for state_slug in ["lc", "ad-eval"]: + draft.set_state(State.objects.get(type="draft-iesg",slug=state_slug)) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertTrue(q('[class=text-danger]:contains("It would be unexpected to issue a ballot while in this state.")')) + + draft.set_state(State.objects.get(type="draft-iesg",slug="writeupw")) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertFalse(q('[class=text-danger]:contains("It would be unexpected to issue a ballot while in this state.")')) + + def test_edit_approval_text(self): ad = Person.objects.get(user__username="ad") draft = WgDraftFactory(ad=ad,states=[('draft','active'),('draft-iesg','iesg-eva')],intended_std_level_id='ps',group__parent=Group.objects.get(acronym='farfut')) @@ -773,7 +802,7 @@ def test_clear_ballot(self): ballot = create_ballot_if_not_open(None, draft, ad, 'approve') old_ballot_id = ballot.id draft.set_state(State.objects.get(used=True, type="draft-iesg", slug="iesg-eva")) - url = urlreverse('ietf.doc.views_ballot.clear_ballot', kwargs=dict(name=draft.name,ballot_type_slug=draft.ballot_open('approve').ballot_type.slug)) + url = urlreverse('ietf.doc.views_ballot.clear_ballot', kwargs=dict(name=draft.name,ballot_type_slug="approve")) login_testing_unauthorized(self, "secretary", url) r = self.client.get(url) self.assertEqual(r.status_code, 200) @@ -783,6 +812,11 @@ def test_clear_ballot(self): self.assertIsNotNone(ballot) self.assertEqual(ballot.ballotpositiondocevent_set.count(),0) self.assertNotEqual(old_ballot_id, ballot.id) + # It's not valid to clear a ballot of a type where there's no matching state + url = urlreverse('ietf.doc.views_ballot.clear_ballot', kwargs=dict(name=draft.name,ballot_type_slug="statchg")) + r = self.client.post(url,{}) + self.assertEqual(r.status_code, 404) + def test_ballot_downref_approve(self): ad = Person.objects.get(name="Areað Irector") @@ -803,8 +837,8 @@ def test_ballot_downref_approve(self): desc='Last call announcement was changed', text='this is simple last call text.' ) rfc = IndividualRfcFactory.create( + name = "rfc6666", stream_id='ise', - other_aliases=['rfc6666',], states=[('draft','rfc'),('draft-iesg','pub')], std_level_id='inf', ) @@ -821,7 +855,7 @@ def test_ballot_downref_approve(self): self.assertContains(r, "No downward references for") # Add a downref, the page should ask if it should be added to the registry - rel = draft.relateddocument_set.create(target=rfc.docalias.get(name='rfc6666'),relationship_id='refnorm') + rel = draft.relateddocument_set.create(target=rfc, relationship_id='refnorm') d = [rdoc for rdoc in draft.relateddocument_set.all() if rel.is_approved_downref()] original_len = len(d) r = self.client.get(url) @@ -1121,13 +1155,13 @@ def test_regenerate_last_call(self): self.assertFalse("contains these normative down" in lc_text) rfc = IndividualRfcFactory.create( + rfc_number=6666, stream_id='ise', - other_aliases=['rfc6666',], states=[('draft','rfc'),('draft-iesg','pub')], std_level_id='inf', ) - draft.relateddocument_set.create(target=rfc.docalias.get(name='rfc6666'),relationship_id='refnorm') + draft.relateddocument_set.create(target=rfc,relationship_id='refnorm') r = self.client.post(url, dict(regenerate_last_call_text="1")) self.assertEqual(r.status_code, 200) @@ -1137,7 +1171,7 @@ def test_regenerate_last_call(self): self.assertTrue("rfc6666" in lc_text) self.assertTrue("Independent Submission" in lc_text) - draft.relateddocument_set.create(target=rfc.docalias.get(name='rfc6666'), relationship_id='downref-approval') + draft.relateddocument_set.create(target=rfc, relationship_id='downref-approval') r = self.client.post(url, dict(regenerate_last_call_text="1")) self.assertEqual(r.status_code, 200) @@ -1413,3 +1447,28 @@ def test_document_ballot_content_without_send_email_values(self): self._assertBallotMessage(q, balloters[0], 'No discuss send log available') self._assertBallotMessage(q, balloters[1], 'No comment send log available') self._assertBallotMessage(q, old_balloter, 'No ballot position send log available') + +class ReturnToUrlTests(TestCase): + def test_invalid_return_to_url(self): + with self.assertRaises(ValueError): + parse_ballot_edit_return_point('/', 'draft-ietf-opsawg-ipfix-tcpo-v6eh', '998718') + + with self.assertRaises(ValueError): + parse_ballot_edit_return_point('/a-route-that-does-not-exist/', 'draft-ietf-opsawg-ipfix-tcpo-v6eh', '998718') + + with self.assertRaises(ValueError): + parse_ballot_edit_return_point('https://example.com/phishing', 'draft-ietf-opsawg-ipfix-tcpo-v6eh', '998718') + + def test_valid_default_return_to_url(self): + self.assertEqual(parse_ballot_edit_return_point( + None, + 'draft-ietf-opsawg-ipfix-tcpo-v6eh', + '998718' + ), '/doc/draft-ietf-opsawg-ipfix-tcpo-v6eh/ballot/998718/') + + def test_valid_return_to_url(self): + self.assertEqual(parse_ballot_edit_return_point( + '/doc/draft-ietf-opsawg-ipfix-tcpo-v6eh/ballot/998718/', + 'draft-ietf-opsawg-ipfix-tcpo-v6eh', + '998718' + ), '/doc/draft-ietf-opsawg-ipfix-tcpo-v6eh/ballot/998718/') diff --git a/ietf/doc/tests_bofreq.py b/ietf/doc/tests_bofreq.py index 2fdc8c282a..6b142149be 100644 --- a/ietf/doc/tests_bofreq.py +++ b/ietf/doc/tests_bofreq.py @@ -16,9 +16,10 @@ from django.template.loader import render_to_string from django.utils import timezone +from ietf.doc.storage_utils import retrieve_str from ietf.group.factories import RoleFactory from ietf.doc.factories import BofreqFactory, NewRevisionDocEventFactory -from ietf.doc.models import State, Document, DocAlias, NewRevisionDocEvent +from ietf.doc.models import State, Document, NewRevisionDocEvent from ietf.doc.utils_bofreq import bofreq_editors, bofreq_responsible from ietf.ietfauth.utils import has_role from ietf.person.factories import PersonFactory @@ -32,7 +33,7 @@ class BofreqTests(TestCase): settings_temp_path_overrides = TestCase.settings_temp_path_overrides + ['BOFREQ_PATH'] def write_bofreq_file(self, bofreq): - fname = Path(settings.BOFREQ_PATH) / ("%s-%s.md" % (bofreq.canonical_name(), bofreq.rev)) + fname = Path(settings.BOFREQ_PATH) / ("%s-%s.md" % (bofreq.name, bofreq.rev)) with fname.open("w") as f: f.write(f"""# This is a test bofreq. Version: {bofreq.rev} @@ -54,8 +55,8 @@ def test_show_bof_requests(self): self.assertEqual(r.status_code, 200) q = PyQuery(r.content) for state in states: - self.assertEqual(len(q(f'#bofreqs-{state.slug}')), 1) - self.assertEqual(len(q(f'#bofreqs-{state.slug} tbody tr')), 3) + self.assertEqual(len(q(f'#bofreqs-{state.slug}')), 1 if state.slug!="spam" else 0) + self.assertEqual(len(q(f'#bofreqs-{state.slug} tbody tr')), 3 if state.slug!="spam" else 0) self.assertFalse(q('#start_button')) PersonFactory(user__username='nobody') self.client.login(username='nobody', password='nobody+password') @@ -63,6 +64,13 @@ def test_show_bof_requests(self): self.assertEqual(r.status_code, 200) q = PyQuery(r.content) self.assertTrue(q('#start_button')) + self.client.logout() + self.client.login(username='secretary', password='secretary+password') + r = self.client.get(url) + q = PyQuery(r.content) + for state in states: + self.assertEqual(len(q(f'#bofreqs-{state.slug}')), 1) + self.assertEqual(len(q(f'#bofreqs-{state.slug} tbody tr')), 3) def test_bofreq_main_page(self): @@ -299,17 +307,20 @@ def test_submit(self): url = urlreverse('ietf.doc.views_bofreq.submit', kwargs=dict(name=doc.name)) rev = doc.rev + doc_time = doc.time r = self.client.post(url,{'bofreq_submission':'enter','bofreq_content':'# oiwefrase'}) self.assertEqual(r.status_code, 302) doc = reload_db_objects(doc) - self.assertEqual(rev, doc.rev) + self.assertEqual(doc.rev, rev) + self.assertEqual(doc.time, doc_time) nobody = PersonFactory() self.client.login(username=nobody.user.username, password=nobody.user.username+'+password') r = self.client.post(url,{'bofreq_submission':'enter','bofreq_content':'# oiwefrase'}) self.assertEqual(r.status_code, 403) doc = reload_db_objects(doc) - self.assertEqual(rev, doc.rev) + self.assertEqual(doc.rev, rev) + self.assertEqual(doc.time, doc_time) self.client.logout() editor = bofreq_editors(doc).first() @@ -331,11 +342,14 @@ def test_submit(self): r = self.client.post(url, postdict) self.assertEqual(r.status_code, 302) doc = reload_db_objects(doc) - self.assertEqual('%02d'%(int(rev)+1) ,doc.rev) - self.assertEqual(f'# {username}', doc.text()) - self.assertEqual(docevent_count+1, doc.docevent_set.count()) - self.assertEqual(1, len(outbox)) + self.assertEqual(doc.rev, '%02d'%(int(rev)+1)) + self.assertGreater(doc.time, doc_time) + self.assertEqual(doc.text(), f'# {username}') + self.assertEqual(retrieve_str('bofreq', doc.get_base_name()), f'# {username}') + self.assertEqual(doc.docevent_set.count(), docevent_count+1) + self.assertEqual(len(outbox), 1) rev = doc.rev + doc_time = doc.time finally: os.unlink(file.name) @@ -366,13 +380,13 @@ def test_start_new_bofreq(self): name = f"bofreq-{xslugify(nobody.last_name())[:64]}-{postdict['title']}".replace(' ','-') bofreq = Document.objects.filter(name=name,type_id='bofreq').first() self.assertIsNotNone(bofreq) - self.assertIsNotNone(DocAlias.objects.filter(name=name).first()) self.assertEqual(bofreq.title, postdict['title']) self.assertEqual(bofreq.rev, '00') self.assertEqual(bofreq.get_state_slug(), 'proposed') self.assertEqual(list(bofreq_editors(bofreq)), [nobody]) self.assertEqual(bofreq.latest_event(NewRevisionDocEvent).rev, '00') self.assertEqual(bofreq.text_or_error(), 'some stuff') + self.assertEqual(retrieve_str('bofreq',bofreq.get_base_name()), 'some stuff') self.assertEqual(len(outbox),1) finally: os.unlink(file.name) diff --git a/ietf/doc/tests_charter.py b/ietf/doc/tests_charter.py index 0350fc0221..62e49559e2 100644 --- a/ietf/doc/tests_charter.py +++ b/ietf/doc/tests_charter.py @@ -16,6 +16,7 @@ from ietf.doc.factories import CharterFactory, NewRevisionDocEventFactory, TelechatDocEventFactory from ietf.doc.models import ( Document, State, BallotDocEvent, BallotType, NewRevisionDocEvent, TelechatDocEvent, WriteupDocEvent ) +from ietf.doc.storage_utils import retrieve_str from ietf.doc.utils_charter import ( next_revision, default_review_text, default_action_text, charter_name_for_group ) from ietf.doc.utils import close_open_ballots @@ -87,11 +88,12 @@ def test_view_revisions(self): class EditCharterTests(TestCase): settings_temp_path_overrides = TestCase.settings_temp_path_overrides + ['CHARTER_PATH'] + def setUp(self): + super().setUp() + (Path(settings.FTP_DIR)/"charter").mkdir() + def write_charter_file(self, charter): - with (Path(settings.CHARTER_PATH) / - ("%s-%s.txt" % (charter.canonical_name(), charter.rev)) - ).open("w") as f: - f.write("This is a charter.") + (Path(settings.CHARTER_PATH) / f"{charter.name}-{charter.rev}.txt").write_text("This is a charter.") def test_startstop_process(self): CharterFactory(group__acronym='mars') @@ -509,8 +511,21 @@ def test_submit_charter(self): self.assertEqual(charter.rev, next_revision(prev_rev)) self.assertTrue("new_revision" in charter.latest_event().type) - with (Path(settings.CHARTER_PATH) / (charter.canonical_name() + "-" + charter.rev + ".txt")).open(encoding='utf-8') as f: - self.assertEqual(f.read(), "Windows line\nMac line\nUnix line\n" + utf_8_snippet.decode('utf-8')) + charter_path = Path(settings.CHARTER_PATH) / (charter.name + "-" + charter.rev + ".txt") + file_contents = (charter_path).read_text("utf-8") + self.assertEqual( + file_contents, + "Windows line\nMac line\nUnix line\n" + utf_8_snippet.decode("utf-8"), + ) + ftp_charter_path = Path(settings.FTP_DIR) / "charter" / charter_path.name + self.assertTrue(ftp_charter_path.exists()) + self.assertTrue(charter_path.samefile(ftp_charter_path)) + blobstore_contents = retrieve_str("charter", charter.get_base_name()) + self.assertEqual( + blobstore_contents, + "Windows line\nMac line\nUnix line\n" + utf_8_snippet.decode("utf-8"), + ) + def test_submit_initial_charter(self): group = GroupFactory(type_id='wg',acronym='mars',list_email='mars-wg@ietf.org') @@ -538,6 +553,24 @@ def test_submit_initial_charter(self): group = Group.objects.get(pk=group.pk) self.assertEqual(group.charter, charter) + def test_submit_charter_with_invalid_name(self): + self.client.login(username="secretary", password="secretary+password") + ietf_group = GroupFactory(type_id="wg") + for bad_name in ("charter-irtf-{}", "charter-randomjunk-{}", "charter-ietf-thisisnotagroup"): + url = urlreverse("ietf.doc.views_charter.submit", kwargs={"name": bad_name.format(ietf_group.acronym)}) + r = self.client.get(url) + self.assertEqual(r.status_code, 404, f"GET of charter named {bad_name} should 404") + r = self.client.post(url, {}) + self.assertEqual(r.status_code, 404, f"POST of charter named {bad_name} should 404") + + irtf_group = GroupFactory(type_id="rg") + for bad_name in ("charter-ietf-{}", "charter-whatisthis-{}", "charter-irtf-thisisnotagroup"): + url = urlreverse("ietf.doc.views_charter.submit", kwargs={"name": bad_name.format(irtf_group.acronym)}) + r = self.client.get(url) + self.assertEqual(r.status_code, 404, f"GET of charter named {bad_name} should 404") + r = self.client.post(url, {}) + self.assertEqual(r.status_code, 404, f"POST of charter named {bad_name} should 404") + def test_edit_review_announcement_text(self): area = GroupFactory(type_id='area') RoleFactory(name_id='ad',group=area,person=Person.objects.get(user__username='ad')) @@ -788,9 +821,11 @@ def test_approve(self): self.assertTrue(not charter.ballot_open("approve")) self.assertEqual(charter.rev, "01") - self.assertTrue( - (Path(settings.CHARTER_PATH) / ("charter-ietf-%s-%s.txt" % (group.acronym, charter.rev))).exists() - ) + charter_path = Path(settings.CHARTER_PATH) / ("charter-ietf-%s-%s.txt" % (group.acronym, charter.rev)) + charter_ftp_path = Path(settings.FTP_DIR) / "charter" / charter_path.name + self.assertTrue(charter_path.exists()) + self.assertTrue(charter_ftp_path.exists()) + self.assertTrue(charter_path.samefile(charter_ftp_path)) self.assertEqual(len(outbox), 2) # diff --git a/ietf/doc/tests_conflict_review.py b/ietf/doc/tests_conflict_review.py index 1b4b8eefa0..791db17f5a 100644 --- a/ietf/doc/tests_conflict_review.py +++ b/ietf/doc/tests_conflict_review.py @@ -4,6 +4,7 @@ import io import os +from pathlib import Path from pyquery import PyQuery from textwrap import wrap @@ -15,6 +16,7 @@ from ietf.doc.factories import IndividualDraftFactory, ConflictReviewFactory, RgDraftFactory from ietf.doc.models import Document, DocEvent, NewRevisionDocEvent, BallotPositionDocEvent, TelechatDocEvent, State, DocTagName +from ietf.doc.storage_utils import retrieve_str from ietf.doc.utils import create_ballot_if_not_open from ietf.doc.views_conflict_review import default_approval_text from ietf.group.models import Person @@ -70,12 +72,12 @@ def test_start_review_as_secretary(self): self.assertEqual(review_doc.ad.name,'Areað Irector') self.assertEqual(review_doc.notify,'ipu@ietf.org') doc = Document.objects.get(name='draft-imaginary-independent-submission') - self.assertTrue(doc in [x.target.document for x in review_doc.relateddocument_set.filter(relationship__slug='conflrev')]) + self.assertTrue(doc in [x.target for x in review_doc.relateddocument_set.filter(relationship__slug='conflrev')]) self.assertTrue(review_doc.latest_event(DocEvent,type="added_comment").desc.startswith("IETF conflict review requested")) self.assertTrue(doc.latest_event(DocEvent,type="added_comment").desc.startswith("IETF conflict review initiated")) self.assertTrue('Conflict Review requested' in outbox[-1]['Subject']) - + # verify you can't start a review when a review is already in progress r = self.client.post(url,dict(ad="Areað Irector",create_in_state="Needs Shepherd",notify='ipu@ietf.org')) self.assertEqual(r.status_code, 404) @@ -119,7 +121,7 @@ def test_start_review_as_stream_owner(self): self.assertEqual(review_doc.ad.name,'Ietf Chair') self.assertEqual(review_doc.notify,'ipu@ietf.org') doc = Document.objects.get(name='draft-imaginary-independent-submission') - self.assertTrue(doc in [x.target.document for x in review_doc.relateddocument_set.filter(relationship__slug='conflrev')]) + self.assertTrue(doc in [x.target for x in review_doc.relateddocument_set.filter(relationship__slug='conflrev')]) self.assertEqual(len(outbox), messages_before + 2) @@ -387,7 +389,7 @@ def setUp(self): class ConflictReviewSubmitTests(TestCase): - settings_temp_path_overrides = TestCase.settings_temp_path_overrides + ['CONFLICT_REVIEW_PATH'] + settings_temp_path_overrides = TestCase.settings_temp_path_overrides + ['CONFLICT_REVIEW_PATH','FTP_PATH'] def test_initial_submission(self): doc = Document.objects.get(name='conflict-review-imaginary-irtf-submission') url = urlreverse('ietf.doc.views_conflict_review.submit',kwargs=dict(name=doc.name)) @@ -403,9 +405,15 @@ def test_initial_submission(self): # Right now, nothing to test - we let people put whatever the web browser will let them put into that textbox # sane post using textbox - path = os.path.join(settings.CONFLICT_REVIEW_PATH, '%s-%s.txt' % (doc.canonical_name(), doc.rev)) + basename = f"{doc.name}-{doc.rev}.txt" + path = Path(settings.CONFLICT_REVIEW_PATH) / basename + ftp_dir = Path(settings.FTP_DIR) / "conflict-reviews" + if not ftp_dir.exists(): + ftp_dir.mkdir() + ftp_path = ftp_dir / basename self.assertEqual(doc.rev,'00') - self.assertFalse(os.path.exists(path)) + self.assertFalse(path.exists()) + self.assertFalse(ftp_path.exists()) r = self.client.post(url,dict(content="Some initial review text\n",submit_response="1")) self.assertEqual(r.status_code,302) doc = Document.objects.get(name='conflict-review-imaginary-irtf-submission') @@ -413,7 +421,9 @@ def test_initial_submission(self): with io.open(path) as f: self.assertEqual(f.read(),"Some initial review text\n") f.close() + self.assertTrue(ftp_path.exists()) self.assertTrue( "submission-00" in doc.latest_event(NewRevisionDocEvent).desc) + self.assertEqual(retrieve_str("conflrev",basename), "Some initial review text\n") def test_subsequent_submission(self): doc = Document.objects.get(name='conflict-review-imaginary-irtf-submission') @@ -423,7 +433,7 @@ def test_subsequent_submission(self): # A little additional setup # doc.rev is u'00' per the test setup - double-checking that here - if it fails, the breakage is in setUp self.assertEqual(doc.rev,'00') - path = os.path.join(settings.CONFLICT_REVIEW_PATH, '%s-%s.txt' % (doc.canonical_name(), doc.rev)) + path = os.path.join(settings.CONFLICT_REVIEW_PATH, '%s-%s.txt' % (doc.name, doc.rev)) with io.open(path,'w') as f: f.write('This is the old proposal.') f.close() @@ -450,7 +460,7 @@ def test_subsequent_submission(self): self.assertEqual(r.status_code, 302) doc = Document.objects.get(name='conflict-review-imaginary-irtf-submission') self.assertEqual(doc.rev,'01') - path = os.path.join(settings.CONFLICT_REVIEW_PATH, '%s-%s.txt' % (doc.canonical_name(), doc.rev)) + path = os.path.join(settings.CONFLICT_REVIEW_PATH, '%s-%s.txt' % (doc.name, doc.rev)) with io.open(path) as f: self.assertEqual(f.read(),"This is a new proposal.") f.close() diff --git a/ietf/doc/tests_downref.py b/ietf/doc/tests_downref.py index 258494e364..0222ad7942 100644 --- a/ietf/doc/tests_downref.py +++ b/ietf/doc/tests_downref.py @@ -19,12 +19,9 @@ def setUp(self): super().setUp() PersonFactory(name='Plain Man',user__username='plain') self.draft = WgDraftFactory(name='draft-ietf-mars-test') - self.draftalias = self.draft.docalias.get(name='draft-ietf-mars-test') self.doc = WgDraftFactory(name='draft-ietf-mars-approved-document',states=[('draft-iesg','rfcqueue')]) - self.docalias = self.doc.docalias.get(name='draft-ietf-mars-approved-document') - self.rfc = WgRfcFactory(alias2__name='rfc9998') - self.rfcalias = self.rfc.docalias.get(name='rfc9998') - RelatedDocument.objects.create(source=self.doc, target=self.rfcalias, relationship_id='downref-approval') + self.rfc = WgRfcFactory(rfc_number=9998) + RelatedDocument.objects.create(source=self.doc, target=self.rfc, relationship_id='downref-approval') def test_downref_registry(self): url = urlreverse('ietf.doc.views_downref.downref_registry') @@ -64,44 +61,44 @@ def test_downref_registry_add(self): self.assertContains(r, 'Save downref') # error - already in the downref registry - r = self.client.post(url, dict(rfc=self.rfcalias.pk, drafts=(self.docalias.pk, ))) + r = self.client.post(url, dict(rfc=self.rfc.pk, drafts=(self.doc.pk, ))) self.assertContains(r, 'Downref is already in the registry') # error - source is not in an approved state r = self.client.get(url) self.assertEqual(r.status_code, 200) - r = self.client.post(url, dict(rfc=self.rfcalias.pk, drafts=(self.draftalias.pk, ))) + r = self.client.post(url, dict(rfc=self.rfc.pk, drafts=(self.draft.pk, ))) self.assertContains(r, 'Draft is not yet approved') # error - the target is not a normative reference of the source self.draft.set_state(State.objects.get(used=True, type="draft-iesg", slug="pub")) r = self.client.get(url) self.assertEqual(r.status_code, 200) - r = self.client.post(url, dict(rfc=self.rfcalias.pk, drafts=(self.draftalias.pk, ))) + r = self.client.post(url, dict(rfc=self.rfc.pk, drafts=(self.draft.pk, ))) self.assertContains(r, 'There does not seem to be a normative reference to RFC') self.assertContains(r, 'Save downref anyway') # normal - approve the document so the downref is now okay - RelatedDocument.objects.create(source=self.draft, target=self.rfcalias, relationship_id='refnorm') + RelatedDocument.objects.create(source=self.draft, target=self.rfc, relationship_id='refnorm') draft_de_count_before = self.draft.docevent_set.count() rfc_de_count_before = self.rfc.docevent_set.count() r = self.client.get(url) self.assertEqual(r.status_code, 200) - r = self.client.post(url, dict(rfc=self.rfcalias.pk, drafts=(self.draftalias.pk, ))) + r = self.client.post(url, dict(rfc=self.rfc.pk, drafts=(self.draft.pk, ))) self.assertEqual(r.status_code, 302) newurl = urlreverse('ietf.doc.views_downref.downref_registry') r = self.client.get(newurl) self.assertContains(r, 'tr>th:first-child").text() + self.assertNotIn("IESG", top_level_metadata_headings) + self.assertNotIn("IANA", top_level_metadata_headings) + +class IetfGroupActionHelperTests(TestCase): + def test_manage_adoption_routing(self): + draft = IndividualDraftFactory() + nobody = PersonFactory() + rgchair = RoleFactory(group__type_id="rg", name_id="chair").person + wgchair = RoleFactory(group__type_id="wg", name_id="chair").person + multichair = RoleFactory(group__type_id="rg", name_id="chair").person + RoleFactory(group__type_id="wg", person=multichair, name_id="chair") + ad = RoleFactory(group__type_id="area", name_id="ad").person + secretary = Role.objects.filter( + name_id="secr", group__acronym="secretariat" + ).first() + self.assertIsNotNone(secretary) + secretary = secretary.person + self.assertFalse( + has_role(rgchair.user, ["Secretariat", "Area Director", "WG Chair"]) + ) + url = urlreverse( + "ietf.doc.views_doc.document_main", kwargs={"name": draft.name} + ) + ask_about_ietf_link = urlreverse( + "ietf.doc.views_draft.ask_about_ietf_adoption_call", + kwargs={"name": draft.name}, + ) + non_ietf_adoption_link = urlreverse( + "ietf.doc.views_draft.adopt_draft", kwargs={"name": draft.name} + ) + for person in (None, nobody, rgchair, wgchair, multichair, ad, secretary): + if person is not None: + self.client.login( + username=person.user.username, + password=f"{person.user.username}+password", + ) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + has_ask_about_ietf_link = len(q(f'a[href="{ask_about_ietf_link}"]')) != 0 + has_non_ietf_adoption_link = ( + len(q(f'a[href="{non_ietf_adoption_link}"]')) != 0 + ) + ask_about_r = self.client.get(ask_about_ietf_link) + ask_about_link_return_code = ask_about_r.status_code + if person == rgchair: + self.assertFalse(has_ask_about_ietf_link) + self.assertTrue(has_non_ietf_adoption_link) + self.assertEqual(ask_about_link_return_code, 403) + elif person in (ad, nobody, None): + self.assertFalse(has_ask_about_ietf_link) + self.assertFalse(has_non_ietf_adoption_link) + self.assertEqual( + ask_about_link_return_code, 302 if person is None else 403 + ) + else: + self.assertTrue(has_ask_about_ietf_link) + self.assertFalse(has_non_ietf_adoption_link) + self.assertEqual(ask_about_link_return_code, 200) + self.client.logout() + + def test_ask_about_ietf_adoption_call(self): + # Basic permission tests above + doc = IndividualDraftFactory() + self.assertEqual(doc.docevent_set.count(), 1) + chair_role = RoleFactory(group__type_id="wg", name_id="chair") + chair = chair_role.person + group = chair_role.group + othergroup = GroupFactory(type_id="wg") + url = urlreverse( + "ietf.doc.views_draft.ask_about_ietf_adoption_call", + kwargs={"name": doc.name}, + ) + login_testing_unauthorized(self, chair.user.username, url) + r = self.client.post(url, {"group": othergroup.pk}) + self.assertEqual(r.status_code, 200) + r = self.client.post(url, {"group": group.pk}) + self.assertEqual(r.status_code, 302) + + def test_offer_wg_action_helpers(self): + def _assert_view_presents_buttons(testcase, response, expected): + q = PyQuery(response.content) + for id, expect in expected: + button = q(f"#{id}") + testcase.assertEqual( + len(button) != 0, + expect + ) + + # View rejects access + came_from_draft = WgDraftFactory(states=[("draft","rfc")]) + rfc = WgRfcFactory(group=came_from_draft.group) + came_from_draft.relateddocument_set.create(relationship_id="became_rfc",target=rfc) + rfc_chair = RoleFactory(name_id="chair", group=rfc.group).person + url = urlreverse("ietf.doc.views_draft.offer_wg_action_helpers", kwargs=dict(name=came_from_draft.name)) + login_testing_unauthorized(self, rfc_chair.user.username, url) + r = self.client.get(url) + self.assertEqual(r.status_code, 404) + self.client.logout() + rg_draft = RgDraftFactory() + rg_chair = RoleFactory(group=rg_draft.group, name_id="chair").person + url = urlreverse("ietf.doc.views_draft.offer_wg_action_helpers", kwargs=dict(name=rg_draft.name)) + login_testing_unauthorized(self, rg_chair.user.username, url) + r = self.client.get(url) + self.assertEqual(r.status_code,404) + self.client.logout() + + # View offers access + draft = WgDraftFactory() + chair = RoleFactory(group=draft.group, name_id="chair").person + url = urlreverse("ietf.doc.views_draft.offer_wg_action_helpers", kwargs=dict(name=draft.name)) + login_testing_unauthorized(self, chair.user.username, url) + r = self.client.get(url) + self.assertEqual(r.status_code,200) + _assert_view_presents_buttons( + self, + r, + [ + ("id_wgadopt_button", False), + ("id_wglc_button", True), + ("id_pubreq_button", True), + ], + ) + draft.set_state(State.objects.get(type_id="draft-stream-ietf", slug="wg-cand")) + r = self.client.get(url) + self.assertEqual(r.status_code,200) + _assert_view_presents_buttons( + self, + r, + [ + ("id_wgadopt_button", True), + ("id_wglc_button", False), + ("id_pubreq_button", False), + ], + ) + draft.set_state(State.objects.get(type_id="draft-stream-ietf", slug="wg-lc")) + StateDocEventFactory( + doc=draft, + state_type_id="draft-stream-ietf", + state=("draft-stream-ietf", "wg-lc"), + ) + self.assertEqual(draft.docevent_set.count(), 2) + r = self.client.get(url) + self.assertEqual(r.status_code,200) + _assert_view_presents_buttons( + self, + r, + [ + ("id_wgadopt_button", False), + ("id_wglc_button", False), + ("id_pubreq_button", True), + ], + ) + draft.set_state(State.objects.get(type_id="draft-stream-ietf",slug="chair-w")) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + _assert_view_presents_buttons( + self, + r, + [ + ("id_wgadopt_button", False), + ("id_wglc_button", True), + ("id_pubreq_button", True), + ], + ) + self.assertContains(response=r,text="Issue Another Working Group Last Call", status_code=200) + other_draft = WgDraftFactory() + self.client.logout() + url = urlreverse("ietf.doc.views_draft.offer_wg_action_helpers", kwargs=dict(name=other_draft.name)) + login_testing_unauthorized(self, "secretary", url) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + _assert_view_presents_buttons( + self, + r, + [ + ("id_wgadopt_button", False), + ("id_wglc_button", True), + ("id_pubreq_button", True), + ], + ) + self.assertContains( + response=r, text="Issue Working Group Last Call", status_code=200 + ) + +class BallotEmailAjaxTests(TestCase): + def test_ajax_build_position_email(self): + def _post_json(self, url, json_to_post): + r = self.client.post( + url, json.dumps(json_to_post), content_type="application/json" + ) + self.assertEqual(r.status_code, 200) + return json.loads(r.content) + + doc = WgDraftFactory() + ad = RoleFactory( + name_id="ad", group=doc.group, person__name="Some Areadirector" + ).person + url = urlreverse("ietf.doc.views_ballot.ajax_build_position_email") + login_testing_unauthorized(self, "secretary", url) + r = self.client.get(url) + self.assertEqual(r.status_code, 405) + response = _post_json(self, url, {}) + self.assertFalse(response["success"]) + self.assertEqual(response["errors"], ["post_data not provided"]) + response = _post_json(self, url, {"dictis": "not empty"}) + self.assertFalse(response["success"]) + self.assertEqual(response["errors"], ["post_data not provided"]) + response = _post_json(self, url, {"post_data": {}}) + self.assertFalse(response["success"]) + self.assertEqual(len(response["errors"]), 7) + response = _post_json( + self, + url, + { + "post_data": { + "discuss": "aaaaaa", + "comment": "bbbbbb", + "position": "discuss", + "balloter": Person.objects.aggregate(maxpk=Max("pk") + 1)["maxpk"], + "docname": "this-draft-does-not-exist", + "cc_choices": ["doc_group_mail_list"], + "additional_cc": "foo@example.com", + } + }, + ) + self.assertFalse(response["success"]) + self.assertEqual( + response["errors"], + ["No person found matching balloter", "No document found matching docname"], + ) + response = _post_json( + self, + url, + { + "post_data": { + "discuss": "aaaaaa", + "comment": "bbbbbb", + "position": "discuss", + "balloter": ad.pk, + "docname": doc.name, + "cc_choices": ["doc_group_mail_list"], + "additional_cc": "foo@example.com", + } + }, + ) + self.assertTrue(response["success"]) + for snippet in [ + "aaaaaa", + "bbbbbb", + "DISCUSS", + ad.plain_name(), + doc.name, + doc.group.list_email, + "foo@example.com", + ]: + self.assertIn(snippet, response["text"]) + diff --git a/ietf/doc/tests_irsg_ballot.py b/ietf/doc/tests_irsg_ballot.py index 92752e48c4..d96cf9dbef 100644 --- a/ietf/doc/tests_irsg_ballot.py +++ b/ietf/doc/tests_irsg_ballot.py @@ -288,7 +288,7 @@ def test_edit_ballot_position_permissions(self): def test_iesg_ballot_no_irsg_actions(self): ad = Person.objects.get(user__username="ad") - wg_draft = IndividualDraftFactory(ad=ad) + wg_draft = IndividualDraftFactory(ad=ad, stream_id='ietf') irsgmember = get_active_irsg()[0] url = urlreverse('ietf.doc.views_ballot.ballot_writeupnotes', kwargs=dict(name=wg_draft.name)) @@ -355,28 +355,35 @@ def test_issue_ballot(self): def test_take_and_email_position(self): draft = RgDraftFactory() ballot = IRSGBallotDocEventFactory(doc=draft) - url = urlreverse('ietf.doc.views_ballot.edit_position', kwargs=dict(name=draft.name, ballot_id=ballot.pk)) + self.balloter + url = ( + urlreverse( + "ietf.doc.views_ballot.edit_position", + kwargs=dict(name=draft.name, ballot_id=ballot.pk), + ) + + self.balloter + ) empty_outbox() login_testing_unauthorized(self, self.username, url) r = self.client.get(url) self.assertEqual(r.status_code, 200) - r = self.client.post(url, dict(position='yes', comment='oib239sb', send_mail='Save and send email')) + empty_outbox() + r = self.client.post( + url, + dict( + position="yes", + comment="oib239sb", + send_mail="Save and send email", + cc_choices=["doc_authors", "doc_group_chairs", "doc_group_mail_list"], + ), + ) self.assertEqual(r.status_code, 302) e = draft.latest_event(BallotPositionDocEvent) - self.assertEqual(e.pos.slug,'yes') - self.assertEqual(e.comment, 'oib239sb') - - url = urlreverse('ietf.doc.views_ballot.send_ballot_comment', kwargs=dict(name=draft.name, ballot_id=ballot.pk)) + self.balloter - - r = self.client.get(url) - self.assertEqual(r.status_code, 200) - - r = self.client.post(url, dict(cc_choices=['doc_authors','doc_group_chairs','doc_group_mail_list'], body="Stuff")) - self.assertEqual(r.status_code, 302) - self.assertEqual(len(outbox),1) - self.assertNotIn('discuss-criteria', get_payload_text(outbox[0])) + self.assertEqual(e.pos.slug, "yes") + self.assertEqual(e.comment, "oib239sb") + self.assertEqual(len(outbox), 1) + self.assertNotIn("discuss-criteria", get_payload_text(outbox[0])) def test_close_ballot(self): draft = RgDraftFactory() @@ -482,27 +489,31 @@ def test_cant_take_position_on_iesg_ballot(self): def test_take_and_email_position(self): draft = RgDraftFactory() ballot = IRSGBallotDocEventFactory(doc=draft) - url = urlreverse('ietf.doc.views_ballot.edit_position', kwargs=dict(name=draft.name, ballot_id=ballot.pk)) + url = urlreverse( + "ietf.doc.views_ballot.edit_position", + kwargs=dict(name=draft.name, ballot_id=ballot.pk), + ) empty_outbox() login_testing_unauthorized(self, self.username, url) r = self.client.get(url) self.assertEqual(r.status_code, 200) - r = self.client.post(url, dict(position='yes', comment='oib239sb', send_mail='Save and send email')) + r = self.client.post( + url, + dict( + position="yes", + comment="oib239sb", + send_mail="Save and send email", + cc_choices=["doc_authors", "doc_group_chairs", "doc_group_mail_list"], + ), + ) self.assertEqual(r.status_code, 302) e = draft.latest_event(BallotPositionDocEvent) - self.assertEqual(e.pos.slug,'yes') - self.assertEqual(e.comment, 'oib239sb') - - url = urlreverse('ietf.doc.views_ballot.send_ballot_comment', kwargs=dict(name=draft.name, ballot_id=ballot.pk)) - - r = self.client.get(url) - self.assertEqual(r.status_code, 200) - - r = self.client.post(url, dict(cc_choices=['doc_authors','doc_group_chairs','doc_group_mail_list'], body="Stuff")) + self.assertEqual(e.pos.slug, "yes") + self.assertEqual(e.comment, "oib239sb") self.assertEqual(r.status_code, 302) - self.assertEqual(len(outbox),1) + self.assertEqual(len(outbox), 1) class IESGMemberTests(TestCase): diff --git a/ietf/doc/tests_js.py b/ietf/doc/tests_js.py index ac63c0995e..9a5aad13b9 100644 --- a/ietf/doc/tests_js.py +++ b/ietf/doc/tests_js.py @@ -41,7 +41,7 @@ def _fill_in_author_form(form_elt, name, email, affiliation, country): (By.CSS_SELECTOR, result_selector), name )) - input.send_keys('\n') # select the object + self.driver.find_element(By.CSS_SELECTOR, result_selector).click() # After the author is selected, the email select options will be populated. # Wait for that, then click on the option corresponding to the requested email. @@ -92,12 +92,8 @@ def _read_author_form(form_elt): self.assertEqual(len(author_forms), 1) # get the "add author" button so we can add blank author forms - add_author_button = self.driver.find_element(By.ID, 'add-author-button') for index, auth in enumerate(authors): - self.driver.execute_script("arguments[0].scrollIntoView();", add_author_button) # FIXME: no idea why this fails: - # self.scroll_to_element(add_author_button) # Can only click if it's in view! - self.driver.execute_script("arguments[0].click();", add_author_button) # FIXME: no idea why this fails: - # add_author_button.click() # Create a new form. Automatically scrolls to it. + self.scroll_and_click((By.ID, 'add-author-button')) # Create new form. Automatically scrolls to it. author_forms = authors_list.find_elements(By.CLASS_NAME, 'author-panel') authors_added = index + 1 self.assertEqual(len(author_forms), authors_added + 1) # Started with 1 author, hence +1 @@ -119,9 +115,8 @@ def _read_author_form(form_elt): self.driver.find_element(By.ID, 'id_basis').send_keys('change testing') # Now click the 'submit' button and check that the update was accepted. submit_button = self.driver.find_element(By.CSS_SELECTOR, '#content button[type="submit"]') - self.driver.execute_script("arguments[0].click();", submit_button) # FIXME: no idea why this fails: - # self.scroll_to_element(submit_button) - # submit_button.click() + self.scroll_to_element(submit_button) + submit_button.click() # Wait for redirect to the document_main view self.wait.until( expected_conditions.url_to_be( diff --git a/ietf/doc/tests_material.py b/ietf/doc/tests_material.py index 05bbc2078b..04779bdaf1 100644 --- a/ietf/doc/tests_material.py +++ b/ietf/doc/tests_material.py @@ -6,19 +6,22 @@ import shutil import io +from unittest.mock import call, patch from pathlib import Path from pyquery import PyQuery import debug # pyflakes:ignore from django.conf import settings +from django.test import override_settings from django.urls import reverse as urlreverse from django.utils import timezone -from ietf.doc.models import Document, State, DocAlias, NewRevisionDocEvent +from ietf.doc.models import Document, State, NewRevisionDocEvent +from ietf.doc.storage_utils import retrieve_str from ietf.group.factories import RoleFactory from ietf.group.models import Group -from ietf.meeting.factories import MeetingFactory, SessionFactory +from ietf.meeting.factories import MeetingFactory, SessionFactory, SessionPresentationFactory from ietf.meeting.models import Meeting, SessionPresentation, SchedulingEvent from ietf.name.models import SessionStatusName from ietf.person.models import Person @@ -26,7 +29,7 @@ class GroupMaterialTests(TestCase): - settings_temp_path_overrides = TestCase.settings_temp_path_overrides + ['AGENDA_PATH'] + settings_temp_path_overrides = TestCase.settings_temp_path_overrides + ['AGENDA_PATH', 'FTP_DIR'] def setUp(self): super().setUp() self.materials_dir = self.tempdir("materials") @@ -35,6 +38,10 @@ def setUp(self): self.slides_dir.mkdir() self.saved_document_path_pattern = settings.DOCUMENT_PATH_PATTERN settings.DOCUMENT_PATH_PATTERN = self.materials_dir + "/{doc.type_id}/" + self.assertTrue(Path(settings.FTP_DIR).exists()) + ftp_slides_dir = Path(settings.FTP_DIR) / "slides" + if not ftp_slides_dir.exists(): + ftp_slides_dir.mkdir() self.meeting_slides_dir = Path(settings.AGENDA_PATH) / "42" / "slides" if not self.meeting_slides_dir.exists(): @@ -54,7 +61,6 @@ def create_slides(self): doc = Document.objects.create(name="slides-testteam-test-file", rev="01", type_id="slides", group=group) doc.set_state(State.objects.get(type="slides", slug="active")) doc.set_state(State.objects.get(type="reuse_policy", slug="multiple")) - DocAlias.objects.create(name=doc.name).docs.add(doc) NewRevisionDocEvent.objects.create(doc=doc,by=Person.objects.get(name="(System)"),rev='00',type='new_revision',desc='New revision available') NewRevisionDocEvent.objects.create(doc=doc,by=Person.objects.get(name="(System)"),rev='01',type='new_revision',desc='New revision available') @@ -111,8 +117,16 @@ def test_upload_slides(self): self.assertEqual(doc.title, "Test File - with fancy title") self.assertEqual(doc.get_state_slug(), "active") - with io.open(os.path.join(self.materials_dir, "slides", doc.name + "-" + doc.rev + ".pdf")) as f: + basename=f"{doc.name}-{doc.rev}.pdf" + filepath=Path(self.materials_dir) / "slides" / basename + with filepath.open() as f: self.assertEqual(f.read(), content) + ftp_filepath=Path(settings.FTP_DIR) / "slides" / basename + with ftp_filepath.open() as f: + self.assertEqual(f.read(), content) + # This test is very sloppy wrt the actual file content. + # Working with/around that for the moment. + self.assertEqual(retrieve_str("slides", basename), content) # check that posting same name is prevented test_file.seek(0) @@ -136,19 +150,47 @@ def test_change_state(self): doc = Document.objects.get(name=doc.name) self.assertEqual(doc.get_state_slug(), "deleted") - def test_edit_title(self): + @override_settings(MEETECHO_API_CONFIG="fake settings") + @patch("ietf.doc.views_material.SlidesManager") + def test_edit_title(self, mock_slides_manager_cls): doc = self.create_slides() url = urlreverse('ietf.doc.views_material.edit_material', kwargs=dict(name=doc.name, action="title")) login_testing_unauthorized(self, "secretary", url) + self.assertFalse(mock_slides_manager_cls.called) # post r = self.client.post(url, dict(title="New title")) self.assertEqual(r.status_code, 302) doc = Document.objects.get(name=doc.name) self.assertEqual(doc.title, "New title") + self.assertFalse(mock_slides_manager_cls.return_value.send_update.called) + + # assign to a session to see that it now sends updates to Meetecho + session = SessionPresentationFactory(session__group=doc.group, document=doc).session + + # Grab the title on the slides when the API call was made (to be sure it's not before it was updated) + titles_sent = [] + mock_slides_manager_cls.return_value.send_update.side_effect = lambda sess: titles_sent.extend( + list(sess.presentations.values_list("document__title", flat=True)) + ) + + r = self.client.post(url, dict(title="Newer title")) + self.assertEqual(r.status_code, 302) + doc = Document.objects.get(name=doc.name) + self.assertEqual(doc.title, "Newer title") + self.assertTrue(mock_slides_manager_cls.called) + self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings")) + self.assertEqual(mock_slides_manager_cls.return_value.send_update.call_count, 1) + self.assertEqual( + mock_slides_manager_cls.return_value.send_update.call_args, + call(session), + ) + self.assertEqual(titles_sent, ["Newer title"]) - def test_revise(self): + @override_settings(MEETECHO_API_CONFIG="fake settings") + @patch("ietf.doc.views_material.SlidesManager") + def test_revise(self, mock_slides_manager_cls): doc = self.create_slides() session = SessionFactory( @@ -166,11 +208,18 @@ def test_revise(self): url = urlreverse('ietf.doc.views_material.edit_material', kwargs=dict(name=doc.name, action="revise")) login_testing_unauthorized(self, "secretary", url) + self.assertFalse(mock_slides_manager_cls.called) content = "some text" test_file = io.StringIO(content) test_file.name = "unnamed.txt" + # Grab the title on the slides when the API call was made (to be sure it's not before it was updated) + titles_sent = [] + mock_slides_manager_cls.return_value.send_update.side_effect = lambda sess: titles_sent.extend( + list(sess.presentations.values_list("document__title", flat=True)) + ) + # post r = self.client.post(url, dict(title="New title", abstract="New abstract", @@ -181,7 +230,17 @@ def test_revise(self): self.assertEqual(doc.rev, "02") self.assertEqual(doc.title, "New title") self.assertEqual(doc.get_state_slug(), "active") + self.assertTrue(mock_slides_manager_cls.called) + self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings")) + self.assertEqual(mock_slides_manager_cls.return_value.send_update.call_count, 1) + self.assertEqual( + mock_slides_manager_cls.return_value.send_update.call_args, + call(session), + ) + self.assertEqual(titles_sent, ["New title"]) with io.open(os.path.join(doc.get_file_path(), doc.name + "-" + doc.rev + ".txt")) as f: self.assertEqual(f.read(), content) + self.assertEqual(retrieve_str("slides", f"{doc.name}-{doc.rev}.txt"), content) + diff --git a/ietf/doc/tests_models.py b/ietf/doc/tests_models.py index 419574c094..d835f646fb 100644 --- a/ietf/doc/tests_models.py +++ b/ietf/doc/tests_models.py @@ -92,7 +92,7 @@ def test_is_downref(self): for source, target in itertools.product(rfcs, rfcs): ref = RelatedDocument.objects.create( source=source, - target=target.docalias.first(), + target=target, relationship_id=rel, ) diff --git a/ietf/doc/tests_notprepped.py b/ietf/doc/tests_notprepped.py new file mode 100644 index 0000000000..f417aa7931 --- /dev/null +++ b/ietf/doc/tests_notprepped.py @@ -0,0 +1,122 @@ +# Copyright The IETF Trust 2026, All Rights Reserved + +from django.conf import settings +from django.utils import timezone +from django.urls import reverse as urlreverse + +from pyquery import PyQuery + +from ietf.doc.factories import WgRfcFactory +from ietf.doc.models import StoredObject +from ietf.doc.storage_utils import store_bytes +from ietf.utils.test_utils import TestCase + + +class NotpreppedRfcXmlTests(TestCase): + def test_editor_source_button_visibility(self): + pre_v3 = WgRfcFactory(rfc_number=settings.FIRST_V3_RFC - 1) + first_v3 = WgRfcFactory(rfc_number=settings.FIRST_V3_RFC) + post_v3 = WgRfcFactory(rfc_number=settings.FIRST_V3_RFC + 1) + + for rfc, expect_button in [(pre_v3, False), (first_v3, True), (post_v3, True)]: + r = self.client.get( + urlreverse( + "ietf.doc.views_doc.document_main", kwargs=dict(name=rfc.name) + ) + ) + self.assertEqual(r.status_code, 200) + buttons = PyQuery(r.content)('a.btn:contains("Get editor source")') + if expect_button: + self.assertEqual(len(buttons), 1, msg=f"rfc_number={rfc.rfc_number}") + expected_href = urlreverse( + "ietf.doc.views_doc.rfcxml_notprepped_wrapper", + kwargs=dict(number=rfc.rfc_number), + ) + self.assertEqual( + buttons.attr("href"), + expected_href, + msg=f"rfc_number={rfc.rfc_number}", + ) + else: + self.assertEqual(len(buttons), 0, msg=f"rfc_number={rfc.rfc_number}") + + def test_rfcxml_notprepped(self): + number = settings.FIRST_V3_RFC + stored_name = f"notprepped/rfc{number}.notprepped.xml" + url = f"/doc/rfc{number}/notprepped/" + + # 404 for pre-v3 RFC numbers (no document needed) + r = self.client.get(f"/doc/rfc{number - 1}/notprepped/") + self.assertEqual(r.status_code, 404) + + # 404 when no RFC document exists in the database + r = self.client.get(url) + self.assertEqual(r.status_code, 404) + + # 404 when RFC document exists but has no StoredObject + WgRfcFactory(rfc_number=number) + r = self.client.get(url) + self.assertEqual(r.status_code, 404) + + # 404 when StoredObject exists but backing storage is missing (FileNotFoundError) + now = timezone.now() + StoredObject.objects.create( + store="rfc", + name=stored_name, + sha384="a" * 96, + len=0, + store_created=now, + created=now, + modified=now, + ) + r = self.client.get(url) + self.assertEqual(r.status_code, 404) + + # 200 with correct content-type, attachment disposition, and body when object is fully stored + xml_content = b"test" + store_bytes("rfc", stored_name, xml_content, allow_overwrite=True) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + self.assertEqual(r["Content-Type"], "application/xml") + self.assertEqual( + r["Content-Disposition"], + f'attachment; filename="rfc{number}.notprepped.xml"', + ) + self.assertEqual(b"".join(r.streaming_content), xml_content) + + def test_rfcxml_notprepped_wrapper(self): + number = settings.FIRST_V3_RFC + + # 404 for pre-v3 RFC numbers (no document needed) + r = self.client.get( + urlreverse( + "ietf.doc.views_doc.rfcxml_notprepped_wrapper", + kwargs=dict(number=number - 1), + ) + ) + self.assertEqual(r.status_code, 404) + + # 404 when no RFC document exists in the database + r = self.client.get( + urlreverse( + "ietf.doc.views_doc.rfcxml_notprepped_wrapper", + kwargs=dict(number=number), + ) + ) + self.assertEqual(r.status_code, 404) + + # 200 with rendered template when RFC document exists + rfc = WgRfcFactory(rfc_number=number) + r = self.client.get( + urlreverse( + "ietf.doc.views_doc.rfcxml_notprepped_wrapper", + kwargs=dict(number=number), + ) + ) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertIn(str(rfc.rfc_number), q("h1").text()) + download_url = urlreverse( + "ietf.doc.views_doc.rfcxml_notprepped", kwargs=dict(number=number) + ) + self.assertEqual(len(q(f'a.btn[href="{download_url}"]')), 1) diff --git a/ietf/doc/tests_review.py b/ietf/doc/tests_review.py index 33a53178ab..82d1b5c232 100644 --- a/ietf/doc/tests_review.py +++ b/ietf/doc/tests_review.py @@ -2,12 +2,13 @@ # -*- coding: utf-8 -*- -import datetime, os, shutil +from pathlib import Path +import datetime import io -import tarfile, tempfile, mailbox -import email.mime.multipart, email.mime.text, email.utils +import os +import shutil -from mock import patch +from unittest.mock import patch, Mock from requests import Response from django.apps import apps @@ -19,6 +20,7 @@ import debug # pyflakes:ignore +from ietf.doc.storage_utils import retrieve_str import ietf.review.mailarch from ietf.doc.factories import ( NewRevisionDocEventFactory, IndividualDraftFactory, WgDraftFactory, @@ -47,6 +49,7 @@ def setUp(self): self.review_dir = self.tempdir('review') self.old_document_path_pattern = settings.DOCUMENT_PATH_PATTERN settings.DOCUMENT_PATH_PATTERN = self.review_dir + "/{doc.type_id}/" + (Path(settings.FTP_DIR) / "review").mkdir() self.review_subdir = os.path.join(self.review_dir, "review") if not os.path.exists(self.review_subdir): @@ -57,6 +60,17 @@ def tearDown(self): settings.DOCUMENT_PATH_PATTERN = self.old_document_path_pattern super().tearDown() + def verify_review_files_were_written(self, assignment, expected_content = "This is a review\nwith two lines"): + review_file = Path(self.review_subdir) / f"{assignment.review.name}.txt" + content = review_file.read_text() + self.assertEqual(content, expected_content) + self.assertEqual( + retrieve_str("review", review_file.name), + expected_content + ) + review_ftp_file = Path(settings.FTP_DIR) / "review" / review_file.name + self.assertTrue(review_file.samefile(review_ftp_file)) + def test_request_review(self): doc = WgDraftFactory(group__acronym='mars',rev='01') NewRevisionDocEventFactory(doc=doc,rev='01') @@ -137,10 +151,18 @@ def test_request_review_of_rfc(self): url = urlreverse('ietf.doc.views_review.request_review', kwargs={ "name": doc.name }) login_testing_unauthorized(self, "ad", url) - # get should fail + # get should fail - all non draft types 404 + r = self.client.get(url) + self.assertEqual(r.status_code, 404) + + # Can only request reviews on active draft documents + doc = WgDraftFactory(states=[("draft","rfc")]) + url = urlreverse('ietf.doc.views_review.request_review', kwargs={ "name": doc.name }) r = self.client.get(url) self.assertEqual(r.status_code, 403) + + def test_doc_page(self): doc = WgDraftFactory(group__acronym='mars',rev='01') @@ -153,8 +175,8 @@ def test_doc_page(self): # check we can fish it out old_doc = WgDraftFactory(name="draft-foo-mars-test") older_doc = WgDraftFactory(name="draft-older") - RelatedDocument.objects.create(source=old_doc, target=older_doc.docalias.first(), relationship_id='replaces') - RelatedDocument.objects.create(source=doc, target=old_doc.docalias.first(), relationship_id='replaces') + RelatedDocument.objects.create(source=old_doc, target=older_doc, relationship_id='replaces') + RelatedDocument.objects.create(source=doc, target=old_doc, relationship_id='replaces') review_req.doc = older_doc review_req.save() @@ -372,6 +394,25 @@ def test_assign_reviewer_after_reject(self): reviewer_label = q("option[value=\"{}\"]".format(reviewer_email.address)).text().lower() self.assertIn("rejected review of document before", reviewer_label) + def test_assign_reviewer_after_withdraw(self): + doc = WgDraftFactory() + review_team = ReviewTeamFactory() + rev_role = RoleFactory(group=review_team,person__user__username='reviewer',person__user__email='reviewer@example.com',name_id='reviewer') + RoleFactory(group=review_team,person__user__username='reviewsecretary',name_id='secr') + review_req = ReviewRequestFactory(team=review_team,doc=doc) + reviewer = rev_role.person.email_set.first() + ReviewAssignmentFactory(review_request=review_req, state_id='withdrawn', reviewer=reviewer) + req_url = urlreverse('ietf.doc.views_review.review_request', kwargs={ "name": doc.name, "request_id": review_req.pk }) + assign_url = urlreverse('ietf.doc.views_review.assign_reviewer', kwargs={ "name": doc.name, "request_id": review_req.pk }) + + login_testing_unauthorized(self, "reviewsecretary", assign_url) + r = self.client.post(assign_url, { "action": "assign", "reviewer": reviewer.pk }) + self.assertRedirects(r, req_url) + review_req = reload_db_objects(review_req) + assignment = review_req.reviewassignment_set.last() + self.assertEqual(assignment.state, ReviewAssignmentStateName.objects.get(slug='assigned')) + self.assertEqual(review_req.state, ReviewRequestStateName.objects.get(slug='assigned')) + def test_previously_reviewed_replaced_doc(self): review_team = ReviewTeamFactory(acronym="reviewteam", name="Review Team", type_id="review", list_email="reviewteam@ietf.org", parent=Group.objects.get(acronym="farfut")) rev_role = RoleFactory(group=review_team,person__user__username='reviewer',person__user__email='reviewer@example.com',person__name='Some Reviewer',name_id='reviewer') @@ -609,112 +650,132 @@ def test_accept_reviewer_assignment_after_reject(self): assignment = reload_db_objects(assignment) self.assertEqual(assignment.state_id, "accepted") - def make_test_mbox_tarball(self, review_req): - mbox_path = os.path.join(self.review_dir, "testmbox.tar.gz") - with tarfile.open(mbox_path, "w:gz") as tar: - with tempfile.NamedTemporaryFile(dir=self.review_dir, suffix=".mbox") as tmp: - mbox = mailbox.mbox(tmp.name) - - # plain text - msg = email.mime.text.MIMEText("Hello,\n\nI have reviewed the document and did not find any problems.\n\nJohn Doe") - msg["From"] = "John Doe " - msg["To"] = review_req.team.list_email - msg["Subject"] = "Review of {}-01".format(review_req.doc.name) - msg["Message-ID"] = email.utils.make_msgid() - msg["Archived-At"] = "" - msg["Date"] = email.utils.formatdate() - - mbox.add(msg) - - # plain text + HTML - msg = email.mime.multipart.MIMEMultipart('alternative') - msg["From"] = "John Doe II " - msg["To"] = review_req.team.list_email - msg["Subject"] = "Review of {}".format(review_req.doc.name) - msg["Message-ID"] = email.utils.make_msgid() - msg["Archived-At"] = "" - - msg.attach(email.mime.text.MIMEText("Hi!,\r\nLooks OK!\r\n-John", "plain")) - msg.attach(email.mime.text.MIMEText("

Hi!,

Looks OK!

-John

", "html")) - mbox.add(msg) - - tmp.flush() + @patch('ietf.review.mailarch.requests.post') + def test_retrieve_messages(self, mock_post): + mock_data = { + "results": [ + { + "from": "Alice ", + "subject": "Hello", + "content": "Hi\n This is a really good document.\n", + "message_id": "abc123", + "url": "https://example.com/message", + "date": "2025-04-07T12:00:00", + } + ] + } + mock_post.return_value.json.return_value = mock_data - tar.add(os.path.relpath(tmp.name)) + doc = WgDraftFactory(group__acronym='mars', rev='01') + review_team = ReviewTeamFactory(acronym="reviewteam", name="Review Team", type_id="review", list_email="reviewteam@ietf.org", parent=Group.objects.get(acronym="farfut")) + rev_role = RoleFactory(group=review_team, person__user__username='reviewer', person__user__email='reviewer@example.com', name_id='reviewer') + RoleFactory(group=review_team, person__user__username='reviewsecretary', person__user__email='reviewsecretary@example.com', name_id='secr') + review_req = ReviewRequestFactory(doc=doc, team=review_team, type_id='early', state_id='assigned', requested_by=rev_role.person, deadline=timezone.now() + datetime.timedelta(days=20)) + ReviewAssignmentFactory(review_request=review_req, reviewer=rev_role.person.email_set.first(), state_id='accepted') - mbox.close() + query_data = ietf.review.mailarch.construct_query_data(doc, review_team, query=None) + response = ietf.review.mailarch.retrieve_messages(query_data) - return mbox_path + self.assertEqual(len(response), 1) + self.assertEqual(response[0]['from'], 'Alice ') + self.assertEqual(response[0]['splitfrom'], ('Alice', 'alice@example.com')) + self.assertEqual(response[0]['subject'], 'Hello') + self.assertEqual(response[0]['content'], 'Hi\n This is a really good document.') + self.assertEqual(response[0]['message_id'], 'abc123') + self.assertEqual(response[0]['url'], 'https://example.com/message') + self.assertEqual(response[0]['utcdate'], ('2025-04-07', '12:00:00')) - def test_search_mail_archive(self): - doc = WgDraftFactory(group__acronym='mars',rev='01') + def test_construct_query_data(self): + doc = WgDraftFactory(group__acronym='mars', rev='01') review_team = ReviewTeamFactory(acronym="reviewteam", name="Review Team", type_id="review", list_email="reviewteam@ietf.org", parent=Group.objects.get(acronym="farfut")) - rev_role = RoleFactory(group=review_team,person__user__username='reviewer',person__user__email='reviewer@example.com',name_id='reviewer') - RoleFactory(group=review_team,person__user__username='reviewsecretary',person__user__email='reviewsecretary@example.com',name_id='secr') - review_req = ReviewRequestFactory(doc=doc,team=review_team,type_id='early',state_id='assigned',requested_by=rev_role.person,deadline=timezone.now()+datetime.timedelta(days=20)) + data = ietf.review.mailarch.construct_query_data(doc, review_team, query=None) + self.assertEqual(data['start_date'], (date_today() - datetime.timedelta(days=180)).isoformat()) + self.assertEqual(data['email_list'], 'reviewteam') + self.assertEqual(data['query_value'], doc.name) + self.assertEqual(data['query'], f'subject:({doc.name})') + self.assertEqual(data['limit'], '30') + + @patch('ietf.doc.views_review.requests.post') + def test_search_mail_archive(self, mock_post): + doc = WgDraftFactory(group__acronym='mars', rev='01') + review_team = ReviewTeamFactory(acronym="reviewteam", name="Review Team", type_id="review", list_email="reviewteam@ietf.org", parent=Group.objects.get(acronym="farfut")) + rev_role = RoleFactory(group=review_team, person__user__username='reviewer', person__user__email='reviewer@example.com', name_id='reviewer') + RoleFactory(group=review_team, person__user__username='reviewsecretary', person__user__email='reviewsecretary@example.com', name_id='secr') + review_req = ReviewRequestFactory(doc=doc, team=review_team, type_id='early', state_id='assigned', requested_by=rev_role.person, deadline=timezone.now() + datetime.timedelta(days=20)) assignment = ReviewAssignmentFactory(review_request=review_req, reviewer=rev_role.person.email_set.first(), state_id='accepted') + mock_data = { + "results": [ + { + "from": "Alice ", + "subject": "Review of {}-01".format(review_req.doc.name), + "content": "Hi\n This is a really good document.\n", + "message_id": "abc123", + "url": "https://example.com/message", + "date": "2025-04-07T12:00:00", + }, + { + "from": "Joe ", + "subject": "Review of {}".format(review_req.doc.name), + "content": "Hi\n I believe this is the best document.\n", + "message_id": "abc456", + "url": "https://example.com/message", + "date": "2025-04-07T12:00:00", + } + ] + } + response1 = Mock() + response1.json.return_value = mock_data + + response2 = Mock() + response2.json.return_value = mock_data + + response3 = Mock() + response3.json.return_value = {"results": []} + + mock_post.side_effect = [response1, response2, response3] + # test URL construction - query_urls = ietf.review.mailarch.construct_query_urls(doc, review_team) - self.assertTrue(review_req.doc.name in query_urls["query_data_url"]) + query_data = ietf.review.mailarch.construct_query_data(doc, review_team) + self.assertTrue(review_req.doc.name in query_data["query_value"]) - # test parsing - mbox_path = self.make_test_mbox_tarball(review_req) + url = urlreverse('ietf.doc.views_review.search_mail_archive', kwargs={"name": doc.name, "assignment_id": assignment.pk}) + url2 = urlreverse('ietf.doc.views_review.search_mail_archive', kwargs={"name": doc.name, "acronym": review_team.acronym}) + login_testing_unauthorized(self, "reviewsecretary", url) - try: - # mock URL generator and point it to local file - for this - # to work, the module (and not the function) must be - # imported in the view - real_fn = ietf.review.mailarch.construct_query_urls - ietf.review.mailarch.construct_query_urls = lambda doc, team, query=None: { "query_data_url": "file://" + os.path.abspath(mbox_path) } - url = urlreverse('ietf.doc.views_review.search_mail_archive', kwargs={ "name": doc.name, "assignment_id": assignment.pk }) - url2 = urlreverse('ietf.doc.views_review.search_mail_archive', kwargs={ "name": doc.name, "acronym": review_team.acronym }) - login_testing_unauthorized(self, "reviewsecretary", url) - - r = self.client.get(url) - self.assertEqual(r.status_code, 200) - messages = r.json()["messages"] - self.assertEqual(len(messages), 2) - - r = self.client.get(url2) - self.assertEqual(r.status_code, 200) - messages = r.json()["messages"] - self.assertEqual(len(messages), 2) - - today = date_today(datetime.timezone.utc) - - self.assertEqual(messages[0]["url"], "https://www.example.com/testmessage") - self.assertTrue("John Doe" in messages[0]["content"]) - self.assertEqual(messages[0]["subject"], "Review of {}-01".format(review_req.doc.name)) - self.assertEqual(messages[0]["revision_guess"], "01") - self.assertEqual(messages[0]["splitfrom"], ["John Doe", "johndoe@example.com"]) - self.assertEqual(messages[0]["utcdate"][0], today.isoformat()) - - self.assertEqual(messages[1]["url"], "https://www.example.com/testmessage2") - self.assertTrue("Looks OK" in messages[1]["content"]) - self.assertTrue("" not in messages[1]["content"]) - self.assertEqual(messages[1]["subject"], "Review of {}".format(review_req.doc.name)) - self.assertFalse('revision_guess' in messages[1]) - self.assertEqual(messages[1]["splitfrom"], ["John Doe II", "johndoe2@example.com"]) - self.assertEqual(messages[1]["utcdate"][0], "") - - - # Test failure to return mailarch results - no_result_path = os.path.join(self.review_dir, "mailarch_no_result.html") - with io.open(no_result_path, "w") as f: - f.write('Content-Type: text/html\n\n
No results found
') - ietf.review.mailarch.construct_query_urls = lambda doc, team, query=None: { "query_data_url": "file://" + os.path.abspath(no_result_path) } - - url = urlreverse('ietf.doc.views_review.search_mail_archive', kwargs={ "name": doc.name, "assignment_id": assignment.pk }) - - r = self.client.get(url) - self.assertEqual(r.status_code, 200) - result = r.json() - self.assertNotIn('messages', result) - self.assertIn('No results found', result['error']) - - finally: - ietf.review.mailarch.construct_query_urls = real_fn + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + messages = r.json()["messages"] + self.assertEqual(len(messages), 2) + + r = self.client.get(url2) + self.assertEqual(r.status_code, 200) + messages = r.json()["messages"] + self.assertEqual(len(messages), 2) + + self.assertEqual(messages[0]["url"], "https://example.com/message") + self.assertTrue("Hi" in messages[0]["content"]) + self.assertEqual(messages[0]["subject"], "Review of {}-01".format(review_req.doc.name)) + self.assertEqual(messages[0]["revision_guess"], "01") + self.assertEqual(messages[0]["splitfrom"], ["Alice", "alice@example.com"]) + self.assertEqual(messages[0]["utcdate"], ['2025-04-07', '12:00:00']) + + self.assertEqual(messages[1]["url"], "https://example.com/message") + self.assertTrue("Hi" in messages[1]["content"]) + self.assertTrue("" not in messages[1]["content"]) + self.assertEqual(messages[1]["subject"], "Review of {}".format(review_req.doc.name)) + self.assertFalse('revision_guess' in messages[1]) + self.assertEqual(messages[1]["splitfrom"], ["Joe", "joe@example.com"]) + self.assertEqual(messages[1]["utcdate"], ['2025-04-07', '12:00:00']) + + # Test failure to return mailarch results + url = urlreverse('ietf.doc.views_review.search_mail_archive', kwargs={"name": doc.name, "assignment_id": assignment.pk}) + + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + result = r.json() + self.assertNotIn('messages', result) + self.assertIn('No results found', result['error']) def test_submit_unsolicited_review_choose_team(self): doc = WgDraftFactory(group__acronym='mars', rev='01') @@ -761,7 +822,7 @@ def test_complete_review_upload_content(self): r = self.client.get(url) self.assertEqual(r.status_code, 200) self.assertContains(r, assignment.review_request.team.list_email) - for author in assignment.review_request.doc.authors(): + for author in assignment.review_request.doc.author_persons(): self.assertContains(r, author.formatted_email()) # faulty post @@ -803,8 +864,7 @@ def test_complete_review_upload_content(self): self.assertTrue(assignment.review_request.team.acronym.lower() in assignment.review.name) self.assertTrue(assignment.review_request.doc.rev in assignment.review.name) - with io.open(os.path.join(self.review_subdir, assignment.review.name + ".txt")) as f: - self.assertEqual(f.read(), "This is a review\nwith two lines") + self.verify_review_files_were_written(assignment) self.assertEqual(len(outbox), 1) self.assertIn(assignment.review_request.team.list_email, outbox[0]["To"]) @@ -858,12 +918,14 @@ def test_complete_review_enter_content(self): completed_time_diff = timezone.now() - assignment.completed_on self.assertLess(completed_time_diff, datetime.timedelta(seconds=10)) - with io.open(os.path.join(self.review_subdir, assignment.review.name + ".txt")) as f: - self.assertEqual(f.read(), "This is a review\nwith two lines") + self.verify_review_files_were_written(assignment) self.assertEqual(len(outbox), 1) self.assertIn(assignment.review_request.team.list_email, outbox[0]["To"]) - self.assertIn("This is a review", get_payload_text(outbox[0])) + payload = get_payload_text(outbox[0]) + self.assertIn("This is a review", payload) + self.assertIn(f"Document: {assignment.review_request.doc.name}", payload) + self.assertIn(f"Title: {assignment.review_request.doc.title}", payload) self.assertIn(settings.MAILING_LIST_ARCHIVE_URL, assignment.review.external_url) @@ -899,8 +961,7 @@ def test_complete_review_enter_content_by_secretary(self): self.assertLess(event0_time_diff, datetime.timedelta(seconds=10)) self.assertEqual(events[1].time, datetime.datetime(2012, 12, 24, 12, 13, 14, tzinfo=DEADLINE_TZINFO)) - with io.open(os.path.join(self.review_subdir, assignment.review.name + ".txt")) as f: - self.assertEqual(f.read(), "This is a review\nwith two lines") + self.verify_review_files_were_written(assignment) self.assertEqual(len(outbox), 1) self.assertIn(assignment.review_request.team.list_email, outbox[0]["To"]) @@ -986,8 +1047,7 @@ def test_complete_review_link_to_mailing_list(self, mock): assignment = reload_db_objects(assignment) self.assertEqual(assignment.state_id, "completed") - with io.open(os.path.join(self.review_subdir, assignment.review.name + ".txt")) as f: - self.assertEqual(f.read(), "This is a review\nwith two lines") + self.verify_review_files_were_written(assignment) self.assertEqual(len(outbox), 0) self.assertTrue("http://example.com" in assignment.review.external_url) @@ -1036,8 +1096,7 @@ def test_complete_unsolicited_review_link_to_mailing_list_by_secretary(self, moc self.assertEqual(assignment.reviewer, rev_role.person.role_email('reviewer')) self.assertEqual(assignment.state_id, "completed") - with io.open(os.path.join(self.review_subdir, assignment.review.name + ".txt")) as f: - self.assertEqual(f.read(), "This is a review\nwith two lines") + self.verify_review_files_were_written(assignment) self.assertEqual(len(outbox), 0) self.assertTrue("http://example.com" in assignment.review.external_url) @@ -1145,8 +1204,9 @@ def test_revise_review_enter_content(self): self.assertLess(event_time_diff, datetime.timedelta(seconds=10)) self.assertTrue('revised' in event1.desc.lower()) - with io.open(os.path.join(self.review_subdir, assignment.review.name + ".txt")) as f: - self.assertEqual(f.read(), "This is a review\nwith two lines") + # See https://github.com/ietf-tools/datatracker/issues/6941 + # These are _not_ getting written as a new version as intended. + self.verify_review_files_were_written(assignment) self.assertEqual(len(outbox), 0) @@ -1173,6 +1233,8 @@ def test_revise_review_enter_content(self): # Ensure that a new event was created for the new revision (#2590) self.assertNotEqual(event1.id, event2.id) + self.verify_review_files_were_written(assignment, "This is a revised review") + self.assertEqual(len(outbox), 0) def test_edit_comment(self): diff --git a/ietf/doc/tests_rsab_ballot.py b/ietf/doc/tests_rsab_ballot.py index ab2a58c004..9086106ba9 100644 --- a/ietf/doc/tests_rsab_ballot.py +++ b/ietf/doc/tests_rsab_ballot.py @@ -41,7 +41,7 @@ def test_issue_ballot_button_presence(self): self.client.login(username="rsab-chair", password="rsab-chair+password") for name in [ - doc.canonical_name() + doc.name for doc in (individual_draft, wg_draft, rg_draft, ed_rfc) ]: url = urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=name)) @@ -333,34 +333,19 @@ def test_take_and_email_position(self): r = self.client.get(url) self.assertEqual(r.status_code, 200) - r = self.client.post( - url, - dict(position="yes", comment="oib239sb", send_mail="Save and send email"), - ) - self.assertEqual(r.status_code, 302) - e = draft.latest_event(BallotPositionDocEvent) - self.assertEqual(e.pos.slug, "yes") - self.assertEqual(e.comment, "oib239sb") - - url = ( - urlreverse( - "ietf.doc.views_ballot.send_ballot_comment", - kwargs=dict(name=draft.name, ballot_id=ballot.pk), - ) - + self.balloter - ) - - r = self.client.get(url) - self.assertEqual(r.status_code, 200) - r = self.client.post( url, dict( + position="yes", + comment="oib239sb", + send_mail="Save and send email", cc_choices=["doc_authors", "doc_group_chairs", "doc_group_mail_list"], - body="Stuff", ), ) self.assertEqual(r.status_code, 302) + e = draft.latest_event(BallotPositionDocEvent) + self.assertEqual(e.pos.slug, "yes") + self.assertEqual(e.comment, "oib239sb") self.assertEqual(len(outbox), 1) self.assertNotIn("discuss-criteria", get_payload_text(outbox[0])) @@ -532,31 +517,19 @@ def test_take_and_email_position(self): r = self.client.get(url) self.assertEqual(r.status_code, 200) - r = self.client.post( - url, - dict(position="yes", comment="oib239sb", send_mail="Save and send email"), - ) - self.assertEqual(r.status_code, 302) - e = draft.latest_event(BallotPositionDocEvent) - self.assertEqual(e.pos.slug, "yes") - self.assertEqual(e.comment, "oib239sb") - - url = urlreverse( - "ietf.doc.views_ballot.send_ballot_comment", - kwargs=dict(name=draft.name, ballot_id=ballot.pk), - ) - - r = self.client.get(url) - self.assertEqual(r.status_code, 200) - r = self.client.post( url, dict( + position="yes", + comment="oib239sb", + send_mail="Save and send email", cc_choices=["doc_authors", "doc_group_chairs", "doc_group_mail_list"], - body="Stuff", ), ) self.assertEqual(r.status_code, 302) + e = draft.latest_event(BallotPositionDocEvent) + self.assertEqual(e.pos.slug, "yes") + self.assertEqual(e.comment, "oib239sb") self.assertEqual(len(outbox), 1) diff --git a/ietf/doc/tests_statement.py b/ietf/doc/tests_statement.py index a19b146d70..bb097b1828 100644 --- a/ietf/doc/tests_statement.py +++ b/ietf/doc/tests_statement.py @@ -1,4 +1,4 @@ -# Copyright The IETF Trust 2023, All Rights Reserved +# Copyright The IETF Trust 2023-2025, All Rights Reserved import debug # pyflakes:ignore @@ -13,7 +13,8 @@ from django.urls import reverse as urlreverse from ietf.doc.factories import StatementFactory, DocEventFactory -from ietf.doc.models import Document, DocAlias, State, NewRevisionDocEvent +from ietf.doc.models import Document, State, NewRevisionDocEvent +from ietf.doc.storage_utils import retrieve_str from ietf.group.models import Group from ietf.person.factories import PersonFactory from ietf.utils.mail import outbox, empty_outbox @@ -76,9 +77,7 @@ def test_statement_doc_view(self): doc.set_state(State.objects.get(type_id="statement", slug="replaced")) doc2 = StatementFactory() - doc2.relateddocument_set.create( - relationship_id="replaces", target=doc.docalias.first() - ) + doc2.relateddocument_set.create(relationship_id="replaces", target=doc) response = self.client.get(url) self.assertEqual(response.status_code, 200) q = PyQuery(response.content) @@ -187,8 +186,16 @@ def test_submit(self): self.assertEqual("%02d" % (int(rev) + 1), doc.rev) if postdict["statement_submission"] == "enter": self.assertEqual(f"# {username}", doc.text()) + self.assertEqual( + retrieve_str("statement", f"{doc.name}-{doc.rev}.md"), + f"# {username}" + ) else: self.assertEqual("not valid pdf", doc.text()) + self.assertEqual( + retrieve_str("statement", f"{doc.name}-{doc.rev}.pdf"), + "not valid pdf" + ) self.assertEqual(docevent_count + 1, doc.docevent_set.count()) self.assertEqual(0, len(outbox)) rev = doc.rev @@ -247,7 +254,6 @@ def test_start_new_statement(self): name=name, type_id="statement" ).first() self.assertIsNotNone(statement) - self.assertIsNotNone(DocAlias.objects.filter(name=name).first()) self.assertEqual(statement.title, postdict["title"]) self.assertEqual(statement.rev, "00") self.assertEqual(statement.get_state_slug(), "active") @@ -258,8 +264,16 @@ def test_start_new_statement(self): self.assertIsNotNone(statement.history_set.last().latest_event(type="published_statement")) if postdict["statement_submission"] == "enter": self.assertEqual(statement.text_or_error(), "some stuff") + self.assertEqual( + retrieve_str("statement", statement.uploaded_filename), + "some stuff" + ) else: self.assertTrue(statement.uploaded_filename.endswith("pdf")) + self.assertEqual( + retrieve_str("statement", f"{statement.name}-{statement.rev}.pdf"), + "not valid pdf" + ) self.assertEqual(len(outbox), 0) existing_statement = StatementFactory() @@ -358,3 +372,36 @@ def test_submit_non_markdown_formats(self): self.assertEqual(r.status_code, 200) q = PyQuery(r.content) self.assertTrue("Unexpected content" in q("#id_statement_file").next().text()) + + def test_change_statement_state(self): + statement = StatementFactory() # starts in "active" state + active_state = State.objects.get(type_id="statement", slug="active") + replaced_state = State.objects.get(type_id="statement", slug="replaced") + url = urlreverse( + "ietf.doc.views_statement.change_statement_state", + kwargs={"name": statement.name}, + ) + + events_before = statement.docevent_set.count() + login_testing_unauthorized(self, "secretary", url) + + r = self.client.get(url) + self.assertEqual(r.status_code,200) + + r = self.client.post(url, {"state": active_state.pk}, follow=True) + self.assertContains(r, "State not changed", status_code=200) + statement = Document.objects.get(pk=statement.pk) # bust the state cache + self.assertEqual(statement.get_state(), active_state) + + r = self.client.post(url, {"state": replaced_state.pk}, follow=True) + self.assertContains(r, "State changed to", status_code=200) + statement = Document.objects.get(pk=statement.pk) # bust the state cache + self.assertEqual(statement.get_state(), replaced_state) + + events_after = statement.docevent_set.count() + self.assertEqual(events_after, events_before + 1) + event = statement.docevent_set.first() + self.assertEqual(event.type, "changed_state") + self.assertEqual( + event.desc, "Statement State changed to Replaced from Active" + ) diff --git a/ietf/doc/tests_status_change.py b/ietf/doc/tests_status_change.py index 7afd5e9d29..da1a4f1906 100644 --- a/ietf/doc/tests_status_change.py +++ b/ietf/doc/tests_status_change.py @@ -4,6 +4,7 @@ import io import os +from pathlib import Path import debug # pyflakes:ignore @@ -14,9 +15,11 @@ from django.conf import settings from django.urls import reverse as urlreverse -from ietf.doc.factories import DocumentFactory, IndividualRfcFactory, WgRfcFactory, DocEventFactory -from ietf.doc.models import ( Document, DocAlias, State, DocEvent, +from ietf.doc.factories import ( DocumentFactory, IndividualRfcFactory, + WgRfcFactory, DocEventFactory, WgDraftFactory ) +from ietf.doc.models import ( Document, State, DocEvent, BallotPositionDocEvent, NewRevisionDocEvent, TelechatDocEvent, WriteupDocEvent ) +from ietf.doc.storage_utils import retrieve_str from ietf.doc.utils import create_ballot_if_not_open from ietf.doc.views_status_change import default_approval_text from ietf.group.models import Person @@ -69,12 +72,12 @@ def test_start_review(self): statchg_relation_row_blah="tois") ) self.assertEqual(r.status_code, 302) - status_change = Document.objects.get(name='status-change-imaginary-new') + status_change = Document.objects.get(name='status-change-imaginary-new') self.assertEqual(status_change.get_state('statchg').slug,'adrev') self.assertEqual(status_change.rev,'00') self.assertEqual(status_change.ad.name,'Areað Irector') self.assertEqual(status_change.notify,'ipu@ietf.org') - self.assertTrue(status_change.relateddocument_set.filter(relationship__slug='tois',target__docs__name='draft-ietf-random-thing')) + self.assertTrue(status_change.relateddocument_set.filter(relationship__slug='tois',target__name='rfc9999')) # Verify that it's possible to start a status change without a responsible ad. r = self.client.post(url,dict( @@ -184,8 +187,8 @@ def test_edit_notices(self): self.assertTrue(doc.latest_event(DocEvent,type="added_comment").desc.startswith('Notification list changed')) # Some additional setup so there's something to put in a generated notify list - doc.relateddocument_set.create(target=DocAlias.objects.get(name='rfc9999'),relationship_id='tois') - doc.relateddocument_set.create(target=DocAlias.objects.get(name='rfc9998'),relationship_id='tohist') + doc.relateddocument_set.create(target=Document.objects.get(name='rfc9999'),relationship_id='tois') + doc.relateddocument_set.create(target=Document.objects.get(name='rfc9998'),relationship_id='tohist') # Ask the form to regenerate the list r = self.client.post(url,dict(regenerate_addresses="1")) @@ -288,8 +291,8 @@ def test_edit_lc(self): login_testing_unauthorized(self, "ad", url) # additional setup - doc.relateddocument_set.create(target=DocAlias.objects.get(name='rfc9999'),relationship_id='tois') - doc.relateddocument_set.create(target=DocAlias.objects.get(name='rfc9998'),relationship_id='tohist') + doc.relateddocument_set.create(target=Document.objects.get(name='rfc9999'),relationship_id='tois') + doc.relateddocument_set.create(target=Document.objects.get(name='rfc9998'),relationship_id='tohist') doc.ad = Person.objects.get(name='Ad No2') doc.save_with_history([DocEvent.objects.create(doc=doc, rev=doc.rev, type="changed_document", by=Person.objects.get(user__username="secretary"), desc="Test")]) @@ -344,8 +347,8 @@ def test_approve(self): login_testing_unauthorized(self, "secretary", url) # Some additional setup - doc.relateddocument_set.create(target=DocAlias.objects.get(name='rfc9999'),relationship_id='tois') - doc.relateddocument_set.create(target=DocAlias.objects.get(name='rfc9998'),relationship_id='tohist') + doc.relateddocument_set.create(target=Document.objects.get(name='rfc9999'),relationship_id='tois') + doc.relateddocument_set.create(target=Document.objects.get(name='rfc9998'),relationship_id='tohist') create_ballot_if_not_open(None, doc, Person.objects.get(user__username="secretary"), "statchg") doc.set_state(State.objects.get(slug='appr-pend',type='statchg')) @@ -385,10 +388,10 @@ def approval_pend_notice_test_helper(self, role): url = urlreverse('ietf.doc.views_status_change.change_state',kwargs=dict(name=doc.name)) # Add some status change related documents - doc.relateddocument_set.create(target=DocAlias.objects.get(name='rfc9999'),relationship_id='tois') - doc.relateddocument_set.create(target=DocAlias.objects.get(name='rfc9998'),relationship_id='tohist') + doc.relateddocument_set.create(target=Document.objects.get(name='rfc9999'),relationship_id='tois') + doc.relateddocument_set.create(target=Document.objects.get(name='rfc9998'),relationship_id='tohist') # And a non-status change related document - doc.relateddocument_set.create(target=DocAlias.objects.get(name='rfc14'),relationship_id='updates') + doc.relateddocument_set.create(target=Document.objects.get(name='rfc14'),relationship_id='updates') login_testing_unauthorized(self, role, url) empty_outbox() @@ -410,9 +413,9 @@ def approval_pend_notice_test_helper(self, role): self.assertTrue(notification['Subject'].startswith('Approved:')) notification_text = get_payload_text(notification) self.assertIn('The AD has approved changing the status', notification_text) - self.assertIn(DocAlias.objects.get(name='rfc9999').document.canonical_name(), notification_text) - self.assertIn(DocAlias.objects.get(name='rfc9998').document.canonical_name(), notification_text) - self.assertNotIn(DocAlias.objects.get(name='rfc14').document.canonical_name(), notification_text) + self.assertIn(Document.objects.get(name='rfc9999').name, notification_text) + self.assertIn(Document.objects.get(name='rfc9998').name, notification_text) + self.assertNotIn(Document.objects.get(name='rfc14').name, notification_text) self.assertNotIn('No value found for', notification_text) # make sure all interpolation values were set else: self.assertEqual(len(outbox), 0) @@ -432,8 +435,8 @@ def test_edit_relations(self): login_testing_unauthorized(self, "secretary", url) # Some additional setup - doc.relateddocument_set.create(target=DocAlias.objects.get(name='rfc9999'),relationship_id='tois') - doc.relateddocument_set.create(target=DocAlias.objects.get(name='rfc9998'),relationship_id='tohist') + doc.relateddocument_set.create(target=Document.objects.get(name='rfc9999'),relationship_id='tois') + doc.relateddocument_set.create(target=Document.objects.get(name='rfc9998'),relationship_id='tohist') # get r = self.client.get(url) @@ -483,16 +486,63 @@ def verify_relations(doc,target_name,status): verify_relations(doc,'rfc9998','tobcp' ) verify_relations(doc,'rfc14' ,'tohist') self.assertTrue(doc.latest_event(DocEvent,type="added_comment").desc.startswith('Affected RFC list changed.')) + + def test_clear_ballot(self): + doc = Document.objects.get(name='status-change-imaginary-mid-review') + url = urlreverse('ietf.doc.views_ballot.clear_ballot',kwargs=dict(name=doc.name, ballot_type_slug="statchg")) + login_testing_unauthorized(self, "secretary", url) + + # Some additional setup + doc.relateddocument_set.create(target=Document.objects.get(name='rfc9999'),relationship_id='tois') + doc.relateddocument_set.create(target=Document.objects.get(name='rfc9998'),relationship_id='tohist') + create_ballot_if_not_open(None, doc, Person.objects.get(user__username="secretary"), "statchg") + doc.set_state(State.objects.get(slug='iesgeval',type='statchg')) + old_ballot = doc.ballot_open("statchg") + self.assertIsNotNone(old_ballot) + + r = self.client.post(url, dict()) + self.assertEqual(r.status_code,302) + new_ballot = doc.ballot_open("statchg") + self.assertIsNotNone(new_ballot) + self.assertNotEqual(new_ballot, old_ballot) + self.assertEqual(doc.get_state_slug("statchg"),"iesgeval") + + def test_clear_deferred_ballot(self): + doc = Document.objects.get(name='status-change-imaginary-mid-review') + url = urlreverse('ietf.doc.views_ballot.clear_ballot',kwargs=dict(name=doc.name, ballot_type_slug="statchg")) + login_testing_unauthorized(self, "secretary", url) + # Some additional setup + doc.relateddocument_set.create(target=Document.objects.get(name='rfc9999'),relationship_id='tois') + doc.relateddocument_set.create(target=Document.objects.get(name='rfc9998'),relationship_id='tohist') + create_ballot_if_not_open(None, doc, Person.objects.get(user__username="secretary"), "statchg") + doc.set_state(State.objects.get(slug='defer',type='statchg')) + old_ballot = doc.ballot_open("statchg") + self.assertIsNotNone(old_ballot) + + r = self.client.post(url, dict()) + self.assertEqual(r.status_code,302) + new_ballot = doc.ballot_open("statchg") + self.assertIsNotNone(new_ballot) + self.assertNotEqual(new_ballot, old_ballot) + self.assertEqual(doc.get_state_slug("statchg"),"iesgeval") + def setUp(self): super().setUp() - IndividualRfcFactory(alias2__name='rfc14',name='draft-was-never-issued',std_level_id='unkn') - WgRfcFactory(alias2__name='rfc9999',name='draft-ietf-random-thing',std_level_id='ps') - WgRfcFactory(alias2__name='rfc9998',name='draft-ietf-random-other-thing',std_level_id='inf') + IndividualRfcFactory(rfc_number=14,std_level_id='unkn') # draft was never issued + + rfc = WgRfcFactory(rfc_number=9999,std_level_id='ps') + draft = WgDraftFactory(name='draft-ietf-random-thing') + draft.relateddocument_set.create(relationship_id="became_rfc", target=rfc) + + rfc = WgRfcFactory(rfc_number=9998,std_level_id='inf') + draft = WgDraftFactory(name='draft-ietf-random-other-thing') + draft.relateddocument_set.create(relationship_id="became_rfc", target=rfc) + DocumentFactory(type_id='statchg',name='status-change-imaginary-mid-review',notify='notify@example.org') class StatusChangeSubmitTests(TestCase): - settings_temp_path_overrides = TestCase.settings_temp_path_overrides + ['STATUS_CHANGE_PATH'] + settings_temp_path_overrides = TestCase.settings_temp_path_overrides + ['STATUS_CHANGE_PATH', 'FTP_PATH'] def test_initial_submission(self): doc = Document.objects.get(name='status-change-imaginary-mid-review') url = urlreverse('ietf.doc.views_status_change.submit',kwargs=dict(name=doc.name)) @@ -508,15 +558,27 @@ def test_initial_submission(self): # Right now, nothing to test - we let people put whatever the web browser will let them put into that textbox # sane post using textbox - path = os.path.join(settings.STATUS_CHANGE_PATH, '%s-%s.txt' % (doc.canonical_name(), doc.rev)) self.assertEqual(doc.rev,'00') - self.assertFalse(os.path.exists(path)) + basename = f"{doc.name}-{doc.rev}.txt" + filepath = Path(settings.STATUS_CHANGE_PATH) / basename + ftp_filepath = Path(settings.FTP_DIR) / "status-changes" / basename + self.assertFalse(filepath.exists()) + self.assertFalse(ftp_filepath.exists()) + # TODO-BLOBSTORE: next assert is disabled because we currently suppress all exceptions + # with self.assertRaises(FileNotFoundError): + # retrieve_str("statchg",basename) r = self.client.post(url,dict(content="Some initial review text\n",submit_response="1")) self.assertEqual(r.status_code,302) doc = Document.objects.get(name='status-change-imaginary-mid-review') self.assertEqual(doc.rev,'00') - with io.open(path) as f: + with filepath.open() as f: self.assertEqual(f.read(),"Some initial review text\n") + with ftp_filepath.open() as f: + self.assertEqual(f.read(),"Some initial review text\n") + self.assertEqual( + retrieve_str("statchg", basename), + "Some initial review text\n" + ) self.assertTrue( "mid-review-00" in doc.latest_event(NewRevisionDocEvent).desc) def test_subsequent_submission(self): @@ -527,7 +589,7 @@ def test_subsequent_submission(self): # A little additional setup # doc.rev is u'00' per the test setup - double-checking that here - if it fails, the breakage is in setUp self.assertEqual(doc.rev,'00') - path = os.path.join(settings.STATUS_CHANGE_PATH, '%s-%s.txt' % (doc.canonical_name(), doc.rev)) + path = os.path.join(settings.STATUS_CHANGE_PATH, '%s-%s.txt' % (doc.name, doc.rev)) with io.open(path,'w') as f: f.write('This is the old proposal.') f.close() @@ -553,16 +615,21 @@ def test_subsequent_submission(self): self.assertContains(r, "does not appear to be a text file") # sane post uploading a file - test_file = StringIO("This is a new proposal.") + test_content = "This is a new proposal." + test_file = StringIO(test_content) test_file.name = "unnamed" r = self.client.post(url,dict(txt=test_file,submit_response="1")) self.assertEqual(r.status_code, 302) doc = Document.objects.get(name='status-change-imaginary-mid-review') self.assertEqual(doc.rev,'01') - path = os.path.join(settings.STATUS_CHANGE_PATH, '%s-%s.txt' % (doc.canonical_name(), doc.rev)) + path = os.path.join(settings.STATUS_CHANGE_PATH, '%s-%s.txt' % (doc.name, doc.rev)) with io.open(path) as f: - self.assertEqual(f.read(),"This is a new proposal.") + self.assertEqual(f.read(), test_content) f.close() + self.assertEqual( + retrieve_str("statchg", f"{doc.name}-{doc.rev}.txt"), + test_content + ) self.assertTrue( "mid-review-01" in doc.latest_event(NewRevisionDocEvent).desc) # verify reset text button works @@ -580,3 +647,6 @@ def test_subsequent_submission(self): def setUp(self): super().setUp() DocumentFactory(type_id='statchg',name='status-change-imaginary-mid-review',notify='notify@example.org') + ftp_subdir=Path(settings.FTP_DIR)/"status-changes" + if not ftp_subdir.exists(): + ftp_subdir.mkdir() diff --git a/ietf/doc/tests_subseries.py b/ietf/doc/tests_subseries.py new file mode 100644 index 0000000000..8186dc4dec --- /dev/null +++ b/ietf/doc/tests_subseries.py @@ -0,0 +1,49 @@ +# Copyright The IETF Trust 2023, All Rights Reserved +# -*- coding: utf-8 -*- + +import debug # pyflakes:ignore + +from pyquery import PyQuery + +from django.urls import reverse as urlreverse + +from ietf.doc.factories import SubseriesFactory, RfcFactory +from ietf.doc.models import Document +from ietf.utils.test_utils import TestCase + +class SubseriesTests(TestCase): + + def test_index_and_view(self): + types = ["bcp", "std", "fyi"] + for type_id in types: + doc = SubseriesFactory(type_id=type_id) + self.assertEqual(len(doc.contains()), 1) + rfc = doc.contains()[0] + # Index + url = urlreverse("ietf.doc.views_search.index_subseries", kwargs=dict(type_id=type_id)) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertIsNotNone(q(f"#{doc.name}")) + self.assertIn(f"RFC {rfc.name[3:]}",q(f"#{doc.name}").text()) + # Subseries document view + url = urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=doc.name)) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertIn(f"{doc.type_id.upper()} {doc.name[3:]} consists of:",q("h2").text()) + self.assertIn(f"RFC {rfc.name[3:]}", q("div.row p a").text()) + # RFC view + url = urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=rfc.name)) + r = self.client.get(url) + q = PyQuery(r.content) + self.assertIn(f"RFC {rfc.name[3:]} also known as {type_id.upper()} {doc.name[3:]}", q("h1").text()) + bcp = Document.objects.filter(type_id="bcp").last() + bcp.relateddocument_set.create(relationship_id="contains", target=RfcFactory()) + for rfc in bcp.contains(): + url = urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=rfc.name)) + r = self.client.get(url) + q = PyQuery(r.content) + self.assertIn(f"RFC {rfc.name[3:]} part of BCP {bcp.name[3:]}", q("h1").text()) + + diff --git a/ietf/doc/tests_tasks.py b/ietf/doc/tests_tasks.py new file mode 100644 index 0000000000..2e2d65463f --- /dev/null +++ b/ietf/doc/tests_tasks.py @@ -0,0 +1,317 @@ +# Copyright The IETF Trust 2024-2026, All Rights Reserved + +import datetime +from unittest import mock + +from pathlib import Path + +from celery.exceptions import Retry +from django.conf import settings +from django.test.utils import override_settings +from django.utils import timezone +from typesense import exceptions as typesense_exceptions + +from ietf.utils.test_utils import TestCase +from ietf.utils.timezone import datetime_today + +from .factories import DocumentFactory, NewRevisionDocEventFactory, WgRfcFactory +from .models import Document, NewRevisionDocEvent +from .tasks import ( + expire_ids_task, + expire_last_calls_task, + generate_draft_bibxml_files_task, + generate_idnits2_rfcs_obsoleted_task, + generate_idnits2_rfc_status_task, + investigate_fragment_task, + notify_expirations_task, + rebuild_searchindex_task, + update_rfc_searchindex_task, +) + + +class TaskTests(TestCase): + @mock.patch("ietf.doc.tasks.in_draft_expire_freeze") + @mock.patch("ietf.doc.tasks.get_expired_drafts") + @mock.patch("ietf.doc.tasks.expirable_drafts") + @mock.patch("ietf.doc.tasks.send_expire_notice_for_draft") + @mock.patch("ietf.doc.tasks.expire_draft") + @mock.patch("ietf.doc.tasks.clean_up_draft_files") + def test_expire_ids_task( + self, + clean_up_draft_files_mock, + expire_draft_mock, + send_expire_notice_for_draft_mock, + expirable_drafts_mock, + get_expired_drafts_mock, + in_draft_expire_freeze_mock, + ): + # set up mocks + in_draft_expire_freeze_mock.return_value = False + doc, other_doc = DocumentFactory.create_batch(2) + doc.expires = datetime_today() + get_expired_drafts_mock.return_value = [doc, other_doc] + expirable_drafts_mock.side_effect = [ + Document.objects.filter(pk=doc.pk), + Document.objects.filter(pk=other_doc.pk), + ] + + # call task + expire_ids_task() + + # check results + self.assertTrue(in_draft_expire_freeze_mock.called) + self.assertEqual(expirable_drafts_mock.call_count, 2) + self.assertEqual(send_expire_notice_for_draft_mock.call_count, 1) + self.assertEqual(send_expire_notice_for_draft_mock.call_args[0], (doc,)) + self.assertEqual(expire_draft_mock.call_count, 1) + self.assertEqual(expire_draft_mock.call_args[0], (doc,)) + self.assertTrue(clean_up_draft_files_mock.called) + + # test that an exception is raised + in_draft_expire_freeze_mock.side_effect = RuntimeError + with self.assertRaises(RuntimeError): + expire_ids_task() + + @mock.patch("ietf.doc.tasks.send_expire_warning_for_draft") + @mock.patch("ietf.doc.tasks.get_soon_to_expire_drafts") + def test_notify_expirations_task(self, get_drafts_mock, send_warning_mock): + # Set up mocks + get_drafts_mock.return_value = ["sentinel"] + notify_expirations_task() + self.assertEqual(send_warning_mock.call_count, 1) + self.assertEqual(send_warning_mock.call_args[0], ("sentinel",)) + + @mock.patch("ietf.doc.tasks.expire_last_call") + @mock.patch("ietf.doc.tasks.get_expired_last_calls") + def test_expire_last_calls_task(self, mock_get_expired, mock_expire): + docs = DocumentFactory.create_batch(3) + mock_get_expired.return_value = docs + expire_last_calls_task() + self.assertTrue(mock_get_expired.called) + self.assertEqual(mock_expire.call_count, 3) + self.assertEqual(mock_expire.call_args_list[0], mock.call(docs[0])) + self.assertEqual(mock_expire.call_args_list[1], mock.call(docs[1])) + self.assertEqual(mock_expire.call_args_list[2], mock.call(docs[2])) + + # Check that it runs even if exceptions occur + mock_get_expired.reset_mock() + mock_expire.reset_mock() + mock_expire.side_effect = ValueError + expire_last_calls_task() + self.assertTrue(mock_get_expired.called) + self.assertEqual(mock_expire.call_count, 3) + self.assertEqual(mock_expire.call_args_list[0], mock.call(docs[0])) + self.assertEqual(mock_expire.call_args_list[1], mock.call(docs[1])) + self.assertEqual(mock_expire.call_args_list[2], mock.call(docs[2])) + + def test_investigate_fragment_task(self): + investigation_results = object() # singleton + with mock.patch( + "ietf.doc.tasks.investigate_fragment", return_value=investigation_results + ) as mock_inv: + retval = investigate_fragment_task("some fragment") + self.assertTrue(mock_inv.called) + self.assertEqual(mock_inv.call_args, mock.call("some fragment")) + self.assertEqual( + retval, {"name_fragment": "some fragment", "results": investigation_results} + ) + + @mock.patch("ietf.doc.tasks.searchindex.update_or_create_rfc_entry") + @mock.patch("ietf.doc.tasks.searchindex.enabled") + def test_update_rfc_searchindex_task( + self, mock_searchindex_enabled, mock_create_entry + ): + mock_searchindex_enabled.return_value = False + + self.assertFalse(Document.objects.filter(rfc_number=5073).exists()) + rfc = WgRfcFactory() + update_rfc_searchindex_task(rfc_number=5073) + self.assertFalse(mock_create_entry.called) + update_rfc_searchindex_task(rfc_number=rfc.rfc_number) + self.assertFalse(mock_create_entry.called) + + mock_searchindex_enabled.return_value = True + update_rfc_searchindex_task(rfc_number=5073) + self.assertFalse(mock_create_entry.called) + update_rfc_searchindex_task(rfc_number=rfc.rfc_number) + self.assertTrue(mock_create_entry.called) + + with override_settings(SEARCHINDEX_CONFIG={"TASK_MAX_RETRIES": 0}): + # Try a non-retryable error (there are others) + mock_create_entry.side_effect = typesense_exceptions.RequestMalformed + update_rfc_searchindex_task(rfc_number=rfc.rfc_number) # no retry + # Now what should be a retryable error + mock_create_entry.side_effect = typesense_exceptions.Timeout + with self.assertRaises(Retry): + update_rfc_searchindex_task(rfc_number=rfc.rfc_number) + + @mock.patch("ietf.doc.tasks.searchindex.update_or_create_rfc_entries") + @mock.patch("ietf.doc.tasks.searchindex.create_collection") + @mock.patch("ietf.doc.tasks.searchindex.delete_collection") + def test_rebuild_searchindex_task(self, mock_delete, mock_create, mock_update): + rfcs = WgRfcFactory.create_batch(10) + rebuild_searchindex_task() + self.assertFalse(mock_delete.called) + self.assertFalse(mock_create.called) + self.assertTrue(mock_update.called) + self.assertQuerysetEqual( + mock_update.call_args.args[0], + sorted(rfcs, key=lambda doc: -doc.rfc_number), + ordered=True, + ) + + mock_delete.reset_mock() + mock_create.reset_mock() + mock_update.reset_mock() + rebuild_searchindex_task(drop_collection=True) + self.assertTrue(mock_delete.called) + self.assertTrue(mock_create.called) + self.assertTrue(mock_update.called) + self.assertQuerysetEqual( + mock_update.call_args.args[0], + sorted(rfcs, key=lambda doc: -doc.rfc_number), + ordered=True, + ) + + mock_delete.reset_mock() + mock_create.reset_mock() + mock_update.reset_mock() + rebuild_searchindex_task(drop_collection=True, batchsize=3) + self.assertTrue(mock_delete.called) + self.assertTrue(mock_create.called) + self.assertTrue(mock_update.called) + self.assertQuerysetEqual( + mock_update.call_args.args[0], + sorted(rfcs, key=lambda doc: -doc.rfc_number), + ordered=True, + ) + self.assertEqual(mock_update.call_args.kwargs["batchsize"], 3) + + +class Idnits2SupportTests(TestCase): + settings_temp_path_overrides = TestCase.settings_temp_path_overrides + [ + "DERIVED_DIR" + ] + + @mock.patch("ietf.doc.tasks.generate_idnits2_rfcs_obsoleted") + def test_generate_idnits2_rfcs_obsoleted_task(self, mock_generate): + mock_generate.return_value = "dåtå" + generate_idnits2_rfcs_obsoleted_task() + self.assertEqual(mock_generate.call_count, 1) + self.assertEqual( + "dåtå".encode("utf8"), + (Path(settings.DERIVED_DIR) / "idnits2-rfcs-obsoleted").read_bytes(), + ) + + @mock.patch("ietf.doc.tasks.generate_idnits2_rfc_status") + def test_generate_idnits2_rfc_status_task(self, mock_generate): + mock_generate.return_value = "dåtå" + generate_idnits2_rfc_status_task() + self.assertEqual(mock_generate.call_count, 1) + self.assertEqual( + "dåtå".encode("utf8"), + (Path(settings.DERIVED_DIR) / "idnits2-rfc-status").read_bytes(), + ) + + +class BIBXMLSupportTests(TestCase): + def setUp(self): + super().setUp() + now = timezone.now() + self.very_old_event = NewRevisionDocEventFactory( + time=now - datetime.timedelta(days=1000), rev="17" + ) + self.old_event = NewRevisionDocEventFactory( + time=now - datetime.timedelta(days=8), rev="03" + ) + self.young_event = NewRevisionDocEventFactory( + time=now - datetime.timedelta(days=6), rev="06" + ) + # a couple that should always be ignored + NewRevisionDocEventFactory( + time=now - datetime.timedelta(days=6), + rev="09", + doc__type_id="rfc", # not a draft + ) + NewRevisionDocEventFactory( + type="changed_document", # not a "new_revision" type + time=now - datetime.timedelta(days=6), + rev="09", + doc__type_id="rfc", + ) + # Get rid of the "00" events created by the factories -- they're just noise for this test + NewRevisionDocEvent.objects.filter(rev="00").delete() + + @mock.patch("ietf.doc.tasks.ensure_draft_bibxml_path_exists") + @mock.patch("ietf.doc.tasks.update_or_create_draft_bibxml_file") + def test_generate_bibxml_files_for_all_drafts_task( + self, mock_create, mock_ensure_path + ): + generate_draft_bibxml_files_task(process_all=True) + self.assertTrue(mock_ensure_path.called) + self.assertCountEqual( + mock_create.call_args_list, + [ + mock.call(self.young_event.doc, self.young_event.rev), + mock.call(self.old_event.doc, self.old_event.rev), + mock.call(self.very_old_event.doc, self.very_old_event.rev), + ], + ) + mock_create.reset_mock() + mock_ensure_path.reset_mock() + + # everything should still be tried, even if there's an exception + mock_create.side_effect = RuntimeError + generate_draft_bibxml_files_task(process_all=True) + self.assertTrue(mock_ensure_path.called) + self.assertCountEqual( + mock_create.call_args_list, + [ + mock.call(self.young_event.doc, self.young_event.rev), + mock.call(self.old_event.doc, self.old_event.rev), + mock.call(self.very_old_event.doc, self.very_old_event.rev), + ], + ) + + @mock.patch("ietf.doc.tasks.ensure_draft_bibxml_path_exists") + @mock.patch("ietf.doc.tasks.update_or_create_draft_bibxml_file") + def test_generate_bibxml_files_for_recent_drafts_task( + self, mock_create, mock_ensure_path + ): + # default args - look back 7 days + generate_draft_bibxml_files_task() + self.assertTrue(mock_ensure_path.called) + self.assertCountEqual( + mock_create.call_args_list, + [mock.call(self.young_event.doc, self.young_event.rev)], + ) + mock_create.reset_mock() + mock_ensure_path.reset_mock() + + # shorter lookback + generate_draft_bibxml_files_task(days=5) + self.assertTrue(mock_ensure_path.called) + self.assertCountEqual(mock_create.call_args_list, []) + mock_create.reset_mock() + mock_ensure_path.reset_mock() + + # longer lookback + generate_draft_bibxml_files_task(days=9) + self.assertTrue(mock_ensure_path.called) + self.assertCountEqual( + mock_create.call_args_list, + [ + mock.call(self.young_event.doc, self.young_event.rev), + mock.call(self.old_event.doc, self.old_event.rev), + ], + ) + + @mock.patch("ietf.doc.tasks.ensure_draft_bibxml_path_exists") + @mock.patch("ietf.doc.tasks.update_or_create_draft_bibxml_file") + def test_generate_bibxml_files_for_recent_drafts_task_with_bad_value( + self, mock_create, mock_ensure_path + ): + with self.assertRaises(ValueError): + generate_draft_bibxml_files_task(days=0) + self.assertFalse(mock_create.called) + self.assertFalse(mock_ensure_path.called) diff --git a/ietf/doc/tests_utils.py b/ietf/doc/tests_utils.py index e104b9ee51..ba672cd847 100644 --- a/ietf/doc/tests_utils.py +++ b/ietf/doc/tests_utils.py @@ -1,22 +1,36 @@ # Copyright The IETF Trust 2020, All Rights Reserved import datetime +from io import BytesIO + +import mock import debug # pyflakes:ignore +import requests -from unittest.mock import patch +from pathlib import Path +from unittest.mock import call, patch +from django.conf import settings +from django.core.files.storage import storages from django.db import IntegrityError from django.test.utils import override_settings from django.utils import timezone + +from ietf.doc.utils_r2 import rfcs_are_in_r2 +from ietf.doc.utils_red import trigger_red_precomputer from ietf.group.factories import GroupFactory, RoleFactory from ietf.name.models import DocTagName from ietf.person.factories import PersonFactory -from ietf.utils.test_utils import TestCase, name_of_file_containing +from ietf.doc.factories import BallotPositionDocEventFactory +from ietf.utils.test_utils import TestCase, name_of_file_containing, reload_db_objects from ietf.person.models import Person from ietf.doc.factories import DocumentFactory, WgRfcFactory, WgDraftFactory -from ietf.doc.models import State, DocumentActionHolder, DocumentAuthor, Document +from ietf.doc.models import State, DocumentActionHolder, DocumentAuthor, StoredObject from ietf.doc.utils import (update_action_holders, add_state_change_event, update_documentauthors, - fuzzy_find_documents, rebuild_reference_relations, build_file_urls) + fuzzy_find_documents, rebuild_reference_relations, build_file_urls, + ensure_draft_bibxml_path_exists, update_or_create_draft_bibxml_file, + last_ballot_doc_revision) +from ietf.doc.storage_utils import store_str from ietf.utils.draft import Draft, PlaintextDraft from ietf.utils.xmldraft import XMLDraft @@ -145,7 +159,7 @@ def test_update_action_holders_resets_age(self): doc = self.doc_in_iesg_state('pub-req') doc.action_holders.set([self.ad]) dah = doc.documentactionholder_set.get(person=self.ad) - dah.time_added = datetime.datetime(2020, 1, 1, tzinfo=datetime.timezone.utc) # arbitrary date in the past + dah.time_added = datetime.datetime(2020, 1, 1, tzinfo=datetime.UTC) # arbitrary date in the past dah.save() right_now = timezone.now() @@ -272,41 +286,49 @@ def test_update_documentauthors_with_nulls(self): self.assertEqual(docauth.country, '') def do_fuzzy_find_documents_rfc_test(self, name): - rfc = WgRfcFactory(name=name, create_revisions=(0, 1, 2)) - rfc = Document.objects.get(pk=rfc.pk) # clear out any cached values + draft = WgDraftFactory(name=name, create_revisions=(0, 1, 2)) + rfc = WgRfcFactory() + draft.relateddocument_set.create(relationship_id="became_rfc", target=rfc) + draft, rfc = reload_db_objects(draft, rfc) # by canonical name - found = fuzzy_find_documents(rfc.canonical_name(), None) + found = fuzzy_find_documents(rfc.name, None) self.assertCountEqual(found.documents, [rfc]) self.assertEqual(found.matched_rev, None) - self.assertEqual(found.matched_name, rfc.canonical_name()) + self.assertEqual(found.matched_name, rfc.name) # by draft name, no rev - found = fuzzy_find_documents(rfc.name, None) - self.assertCountEqual(found.documents, [rfc]) + found = fuzzy_find_documents(draft.name, None) + self.assertCountEqual(found.documents, [draft]) self.assertEqual(found.matched_rev, None) - self.assertEqual(found.matched_name, rfc.name) + self.assertEqual(found.matched_name, draft.name) # by draft name, latest rev - found = fuzzy_find_documents(rfc.name, '02') - self.assertCountEqual(found.documents, [rfc]) + found = fuzzy_find_documents(draft.name, '02') + self.assertCountEqual(found.documents, [draft]) self.assertEqual(found.matched_rev, '02') - self.assertEqual(found.matched_name, rfc.name) + self.assertEqual(found.matched_name, draft.name) # by draft name, earlier rev - found = fuzzy_find_documents(rfc.name, '01') - self.assertCountEqual(found.documents, [rfc]) + found = fuzzy_find_documents(draft.name, '01') + self.assertCountEqual(found.documents, [draft]) self.assertEqual(found.matched_rev, '01') - self.assertEqual(found.matched_name, rfc.name) + self.assertEqual(found.matched_name, draft.name) # wrong name or revision - found = fuzzy_find_documents(rfc.name + '-incorrect') + found = fuzzy_find_documents(draft.name + '-incorrect') self.assertCountEqual(found.documents, [], 'Should not find document that does not match') - found = fuzzy_find_documents(rfc.name + '-incorrect', '02') + found = fuzzy_find_documents(draft.name + '-incorrect', '02') self.assertCountEqual(found.documents, [], 'Still should not find document, even with a version') - found = fuzzy_find_documents(rfc.name, '22') - self.assertCountEqual(found.documents, [rfc], + found = fuzzy_find_documents(draft.name, '22') + self.assertCountEqual(found.documents, [draft], 'Should find document even if rev does not exist') + + # by rfc name mistakenly trying to provide a revision + found = fuzzy_find_documents(rfc.name+"-22") + self.assertCountEqual(found.documents, [rfc], "Should ignore versions when fuzzyfinding RFCs" ) + found = fuzzy_find_documents(rfc.name,"22") + self.assertCountEqual(found.documents, [rfc], "Should ignore versions when fuzzyfinding RFCs" ) def test_fuzzy_find_documents(self): @@ -346,29 +368,29 @@ def setUp(self): super().setUp() self.doc = WgDraftFactory() # document under test # Other documents that should be found by rebuild_reference_relations - self.normative, self.informative, self.unknown = WgRfcFactory.create_batch(3) + self.normative, self.informative, self.unknown = WgRfcFactory.create_batch(3) # AMHERE - these need to have rfc names. for relationship in ['refnorm', 'refinfo', 'refunk', 'refold']: self.doc.relateddocument_set.create( - target=WgRfcFactory().docalias.first(), + target=WgRfcFactory(), relationship_id=relationship, ) self.updated = WgRfcFactory() # related document that should be left alone - self.doc.relateddocument_set.create(target=self.updated.docalias.first(), relationship_id='updates') + self.doc.relateddocument_set.create(target=self.updated, relationship_id='updates') self.assertCountEqual(self.doc.relateddocument_set.values_list('relationship__slug', flat=True), ['refnorm', 'refinfo', 'refold', 'refunk', 'updates'], 'Test conditions set up incorrectly: wrong prior document relationships') for other_doc in [self.normative, self.informative, self.unknown]: self.assertEqual( - self.doc.relateddocument_set.filter(target__name=other_doc.canonical_name()).count(), + self.doc.relateddocument_set.filter(target__name=other_doc.name).count(), 0, 'Test conditions set up incorrectly: new documents already related', ) def _get_refs_return_value(self): return { - self.normative.canonical_name(): Draft.REF_TYPE_NORMATIVE, - self.informative.canonical_name(): Draft.REF_TYPE_INFORMATIVE, - self.unknown.canonical_name(): Draft.REF_TYPE_UNKNOWN, + self.normative.name: Draft.REF_TYPE_NORMATIVE, + self.informative.name: Draft.REF_TYPE_INFORMATIVE, + self.unknown.name: Draft.REF_TYPE_UNKNOWN, 'draft-not-found': Draft.REF_TYPE_NORMATIVE, } @@ -376,13 +398,13 @@ def test_requires_txt_or_xml(self): result = rebuild_reference_relations(self.doc, {}) self.assertCountEqual(result.keys(), ['errors']) self.assertEqual(len(result['errors']), 1) - self.assertIn('No Internet-Draft text available', result['errors'][0], + self.assertIn('No file available', result['errors'][0], 'Error should be reported if no Internet-Draft file is given') result = rebuild_reference_relations(self.doc, {'md': 'cant-do-this.md'}) self.assertCountEqual(result.keys(), ['errors']) self.assertEqual(len(result['errors']), 1) - self.assertIn('No Internet-Draft text available', result['errors'][0], + self.assertIn('No file available', result['errors'][0], 'Error should be reported if no XML or plaintext file is given') @patch.object(XMLDraft, 'get_refs') @@ -399,7 +421,7 @@ def test_xml(self, mock_init, mock_get_refs): self.assertEqual( result, { - 'warnings': ['There were 1 references with no matching DocAlias'], + 'warnings': ['There were 1 references with no matching Document'], 'unfound': ['draft-not-found'], } ) @@ -407,10 +429,10 @@ def test_xml(self, mock_init, mock_get_refs): self.assertCountEqual( self.doc.relateddocument_set.values_list('target__name', 'relationship__slug'), [ - (self.normative.canonical_name(), 'refnorm'), - (self.informative.canonical_name(), 'refinfo'), - (self.unknown.canonical_name(), 'refunk'), - (self.updated.docalias.first().name, 'updates'), + (self.normative.name, 'refnorm'), + (self.informative.name, 'refinfo'), + (self.unknown.name, 'refunk'), + (self.updated.name, 'updates'), ] ) @@ -430,7 +452,7 @@ def test_plaintext(self, mock_init, mock_get_refs): self.assertEqual( result, { - 'warnings': ['There were 1 references with no matching DocAlias'], + 'warnings': ['There were 1 references with no matching Document'], 'unfound': ['draft-not-found'], } ) @@ -438,10 +460,10 @@ def test_plaintext(self, mock_init, mock_get_refs): self.assertCountEqual( self.doc.relateddocument_set.values_list('target__name', 'relationship__slug'), [ - (self.normative.canonical_name(), 'refnorm'), - (self.informative.canonical_name(), 'refinfo'), - (self.unknown.canonical_name(), 'refunk'), - (self.updated.docalias.first().name, 'updates'), + (self.normative.name, 'refnorm'), + (self.informative.name, 'refinfo'), + (self.unknown.name, 'refunk'), + (self.updated.name, 'updates'), ] ) @@ -462,7 +484,7 @@ def test_xml_and_plaintext(self, mock_init, mock_get_refs, mock_plaintext_init): self.assertEqual( result, { - 'warnings': ['There were 1 references with no matching DocAlias'], + 'warnings': ['There were 1 references with no matching Document'], 'unfound': ['draft-not-found'], } ) @@ -470,9 +492,208 @@ def test_xml_and_plaintext(self, mock_init, mock_get_refs, mock_plaintext_init): self.assertCountEqual( self.doc.relateddocument_set.values_list('target__name', 'relationship__slug'), [ - (self.normative.canonical_name(), 'refnorm'), - (self.informative.canonical_name(), 'refinfo'), - (self.unknown.canonical_name(), 'refunk'), - (self.updated.docalias.first().name, 'updates'), + (self.normative.name, 'refnorm'), + (self.informative.name, 'refinfo'), + (self.unknown.name, 'refunk'), + (self.updated.name, 'updates'), ] ) + + +class DraftBibxmlTests(TestCase): + settings_temp_path_overrides = TestCase.settings_temp_path_overrides + ["BIBXML_BASE_PATH"] + + def test_ensure_draft_bibxml_path_exists(self): + expected = Path(settings.BIBXML_BASE_PATH) / "bibxml-ids" + self.assertFalse(expected.exists()) + ensure_draft_bibxml_path_exists() + self.assertTrue(expected.is_dir()) # false if does not exist or is not dir + + @patch("ietf.doc.utils.bibxml_for_draft", return_value="This\ris\nmy\r\nbibxml") + def test_create_draft_bibxml_file(self, mock): + bibxml_path = Path(settings.BIBXML_BASE_PATH) / "bibxml-ids" + bibxml_path.mkdir(exist_ok=False) # expect to start with a clean slate + + doc = DocumentFactory() + ref_path = bibxml_path / f"reference.I-D.{doc.name}-26.xml" # we're pretending it's rev 26 + + update_or_create_draft_bibxml_file(doc, "26") + self.assertEqual(mock.call_count, 1) + self.assertEqual(mock.call_args, call(doc, "26")) + self.assertEqual(ref_path.read_text(), "This\nis\nmy\nbibxml") + + @patch("ietf.doc.utils.bibxml_for_draft", return_value="This\ris\nmy\r\nbibxml") + def test_update_draft_bibxml_file(self, mock): + bibxml_path = Path(settings.BIBXML_BASE_PATH) / "bibxml-ids" + bibxml_path.mkdir(exist_ok=False) # expect to start with a clean slate + + doc = DocumentFactory() + ref_path = bibxml_path / f"reference.I-D.{doc.name}-26.xml" # we're pretending it's rev 26 + ref_path.write_text("Old data") + + # should replace it + update_or_create_draft_bibxml_file(doc, "26") + self.assertEqual(mock.call_count, 1) + self.assertEqual(mock.call_args, call(doc, "26")) + self.assertEqual(ref_path.read_text(), "This\nis\nmy\nbibxml") + + # should leave it alone if it differs only by leading/trailing whitespace + mock.reset_mock() + mock.return_value = " \n This\nis\nmy\nbibxml " + update_or_create_draft_bibxml_file(doc, "26") + self.assertEqual(mock.call_count, 1) + self.assertEqual(mock.call_args, call(doc, "26")) + self.assertEqual(ref_path.read_text(), "This\nis\nmy\nbibxml") + + +class LastBallotDocRevisionTests(TestCase): + def test_last_ballot_doc_revision(self): + now = timezone.now() + ad = Person.objects.get(user__username="ad") + bpde_with_null_send_email = BallotPositionDocEventFactory( + time=now - datetime.timedelta(minutes=30), + send_email=None, + ) + ballot = bpde_with_null_send_email.ballot + BallotPositionDocEventFactory( + ballot=ballot, + balloter=ad, + pos_id='noobj', + comment='Commentary', + comment_time=timezone.now(), + send_email=None, + ) + doc = bpde_with_null_send_email.doc + rev = bpde_with_null_send_email.rev + nobody = PersonFactory() + self.assertIsNone(last_ballot_doc_revision(doc, nobody)) + self.assertEqual(rev, last_ballot_doc_revision(doc, ad)) + + +class UtilsRedTests(TestCase): + @mock.patch("ietf.doc.utils_red.log") + @mock.patch("ietf.doc.utils_red.requests.post") + def test_trigger_red_precomputer_not_configured(self, mock_post, mock_log): + with override_settings(): + try: + del settings.CUSTOM_SETTING_NAME + except AttributeError: + pass + trigger_red_precomputer(rfc_number_list=[1, 2, 3]) + self.assertEqual(mock_log.call_count, 1) + mock_args, _ = mock_log.call_args + self.assertEqual( + mock_args, + ("No URL configured for triggering red precompute multiple, skipping",), + ) + + mock_log.reset_mock() + with override_settings(TRIGGER_RED_PRECOMPUTE_MULTIPLE_URL=None): + trigger_red_precomputer(rfc_number_list=[1, 2, 3]) + self.assertFalse(mock_post.called) + self.assertEqual(mock_log.call_count, 1) + mock_args, _ = mock_log.call_args + self.assertEqual( + mock_args, + ("No URL configured for triggering red precompute multiple, skipping",), + ) + + @override_settings( + TRIGGER_RED_PRECOMPUTE_MULTIPLE_URL="urlbits", + ) + @mock.patch("ietf.doc.utils_red.log") + @mock.patch("ietf.doc.utils_red.requests.post", side_effect=requests.Timeout()) + def test_trigger_red_precomputer_swallows_timeout_exception( + self, mock_post, mock_log + ): + exception_raised = False + try: + trigger_red_precomputer(rfc_number_list=[1, 2, 3]) + except Exception: + exception_raised = True + self.assertFalse(exception_raised) + self.assertEqual(mock_log.call_count, 2) + # only checking the last log call + mock_args, _ = mock_log.call_args + self.assertEqual(len(mock_args), 1) + self.assertIn("POST request timed out", mock_args[0]) + + @override_settings( + TRIGGER_RED_PRECOMPUTE_MULTIPLE_URL="urlbits", + ) + @mock.patch("ietf.doc.utils_red.requests.post", side_effect=Exception()) + def test_trigger_red_precomputer_does_not_swallow_too_much(self, mock_post): + exception_raised = False + try: + trigger_red_precomputer(rfc_number_list=[1, 2, 3]) + except Exception: + exception_raised = True + self.assertTrue(exception_raised) + + @override_settings( + TRIGGER_RED_PRECOMPUTE_MULTIPLE_URL="urlbits", + DEFAULT_REQUESTS_TIMEOUT=314159265, + ) + @mock.patch("ietf.doc.utils_red.log") + @mock.patch("ietf.doc.utils_red.requests.post") + def test_trigger_red_precomputer(self, mock_post, mock_log): + mock_post.return_value = mock.Mock(status_code=200) + trigger_red_precomputer(rfc_number_list=[1, 2, 3]) + self.assertTrue(mock_post.called) + _, mock_kwargs = mock_post.call_args + self.assertIn("url", mock_kwargs) + self.assertEqual(mock_kwargs["url"], "urlbits") + self.assertIn("json", mock_kwargs) + self.assertEqual(mock_kwargs["json"], {"rfcs": "1,2,3"}) + self.assertIn("timeout", mock_kwargs) + self.assertEqual(mock_kwargs["timeout"], 314159265) + self.assertEqual(mock_log.call_count, 1) # Not testing the first info log value + mock_log.reset_mock() + mock_post.reset_mock() + mock_post.return_value = mock.Mock( + status_code=500, + ) + trigger_red_precomputer(rfc_number_list=[1, 2, 3]) + self.assertEqual(mock_log.call_count, 2) + mock_args, _ = mock_log.call_args + self.assertEqual(len(mock_args), 1) + expected = f"POST request failed for {settings.TRIGGER_RED_PRECOMPUTE_MULTIPLE_URL} : status_code=500" + self.assertEqual(mock_args[0], expected) + + +class UtilsR2TestCase(TestCase): + def test_rfcs_are_in_r2(self): + rfcs = WgRfcFactory.create_batch(2) + rfc_name_list = [rfc.name for rfc in rfcs] + rfc_number_list = [rfc.rfc_number for rfc in rfcs] + r2_rfc_bucket = storages["r2-rfc"] + # Right now the various doc Factories do not populate any content + self.assertEqual( + StoredObject.objects.filter( + store="rfc", doc_name__in=rfc_name_list + ).count(), + 0, + ) + self.assertTrue(rfcs_are_in_r2(rfc_number_list=rfc_number_list)) + for rfc in rfcs: + store_str( + kind="rfc", + name=f"testartifact/{rfc.name}.testartifact", + content="", + doc_name=rfc.name, + doc_rev=None, + ) + self.assertEqual( + StoredObject.objects.filter( + store="rfc", doc_name__in=rfc_name_list + ).count(), + 2, + ) + self.assertFalse(rfcs_are_in_r2(rfc_number_list=rfc_number_list)) + r2_rfc_bucket.save(f"testartifact/{rfcs[0].name}.testartifact", BytesIO(b"")) + self.assertFalse(rfcs_are_in_r2(rfc_number_list=rfc_number_list)) + r2_rfc_bucket.save(f"testartifact/{rfcs[1].name}.testartifact", BytesIO(b"")) + self.assertTrue(rfcs_are_in_r2(rfc_number_list=rfc_number_list)) + + + diff --git a/ietf/doc/urls.py b/ietf/doc/urls.py index a9703d6328..0c13503b78 100644 --- a/ietf/doc/urls.py +++ b/ietf/doc/urls.py @@ -1,4 +1,4 @@ -# Copyright The IETF Trust 2009-2023, All Rights Reserved +# Copyright The IETF Trust 2009-2025, All Rights Reserved # -*- coding: utf-8 -*- # Copyright (C) 2009 Nokia Corporation and/or its subsidiary(-ies). # All rights reserved. Contact: Pasi Eronen @@ -53,12 +53,13 @@ url(r'^ad/?$', views_search.ad_workload), url(r'^ad/(?P[^/]+)/?$', views_search.docs_for_ad), url(r'^ad2/(?P[\w.-]+)/$', RedirectView.as_view(url='/doc/ad/%(name)s/', permanent=True)), + url(r'^for_iesg/?$', RedirectView.as_view(pattern_name='ietf.doc.views_search.docs_for_iesg', permanent=False)), url(r'^rfc-status-changes/?$', views_status_change.rfc_status_changes), url(r'^start-rfc-status-change/(?:%(name)s/)?$' % settings.URL_REGEXPS, views_status_change.start_rfc_status_change), url(r'^bof-requests/?$', views_bofreq.bof_requests), url(r'^bof-requests/new/$', views_bofreq.new_bof_request), url(r'^statement/new/$', views_statement.new_statement), - url(r'^iesg/?$', views_search.drafts_in_iesg_process), + url(r'^iesg/?$', views_search.docs_for_iesg), url(r'^email-aliases/?$', views_doc.email_aliases), url(r'^downref/?$', views_downref.downref_registry), url(r'^downref/add/?$', views_downref.downref_registry_add), @@ -66,17 +67,15 @@ r"^shepherdwriteup-template/(?P\w+)/?$", views_doc.document_shepherd_writeup_template, ), + url(r'^investigate/?$', views_doc.investigate), - url(r'^stats/newrevisiondocevent/?$', views_stats.chart_newrevisiondocevent), - url(r'^stats/newrevisiondocevent/conf/?$', views_stats.chart_conf_newrevisiondocevent), - url(r'^stats/newrevisiondocevent/data/?$', views_stats.chart_data_newrevisiondocevent), url(r'^stats/person/(?P[0-9]+)/drafts/conf/?$', views_stats.chart_conf_person_drafts), url(r'^stats/person/(?P[0-9]+)/drafts/data/?$', views_stats.chart_data_person_drafts), # This block should really all be at the idealized docs.ietf.org service url(r'^html/(?Pbcp[0-9]+?)(\.txt|\.html)?/?$', RedirectView.as_view(url=settings.RFC_EDITOR_INFO_BASE_URL+"%(name)s", permanent=False)), url(r'^html/(?Pstd[0-9]+?)(\.txt|\.html)?/?$', RedirectView.as_view(url=settings.RFC_EDITOR_INFO_BASE_URL+"%(name)s", permanent=False)), - url(r'^html/%(name)s(?:-%(rev)s)?(\.txt|\.html)?/?$' % settings.URL_REGEXPS, views_doc.document_html), + url(r'^html/%(name)s(?:-(?P[0-9]{2}(-[0-9]{2})?))?(\.txt|\.html)?/?$' % settings.URL_REGEXPS, views_doc.document_html), url(r'^id/%(name)s(?:-%(rev)s)?(?:\.(?P(txt|html|xml)))?/?$' % settings.URL_REGEXPS, views_doc.document_raw_id), url(r'^pdf/%(name)s(?:-%(rev)s)?(?:\.(?P[a-z]+))?/?$' % settings.URL_REGEXPS, views_doc.document_pdfized), @@ -90,12 +89,18 @@ url(r'^all/?$', views_search.index_all_drafts), url(r'^active/?$', views_search.index_active_drafts), url(r'^recent/?$', views_search.recent_drafts), - url(r'^select2search/(?P(document|docalias))/(?Pdraft)/$', views_search.ajax_select2_search_docs), + url(r'^select2search/(?Pdocument)/(?P(draft|rfc|all))/$', views_search.ajax_select2_search_docs), url(r'^ballots/irsg/$', views_ballot.irsg_ballot_status), url(r'^ballots/rsab/$', views_ballot.rsab_ballot_status), + url(r'^build-position-email/$', views_ballot.ajax_build_position_email), + + url(r'^(?P(bcp|std|fyi))/?$', views_search.index_subseries), + url(r'^%(name)s(?:/%(rev)s)?/$' % settings.URL_REGEXPS, views_doc.document_main), url(r'^%(name)s(?:/%(rev)s)?/bibtex/$' % settings.URL_REGEXPS, views_doc.document_bibtex), + url(r'^rfc(?P[0-9]+)/notprepped/$' , views_doc.rfcxml_notprepped), + url(r'^rfc(?P[0-9]+)/notprepped-wrapper/$', views_doc.rfcxml_notprepped_wrapper), url(r'^%(name)s(?:/%(rev)s)?/idnits2-state/$' % settings.URL_REGEXPS, views_doc.idnits2_state), url(r'^bibxml3/reference.I-D.%(name)s(?:-%(rev)s)?.xml$' % settings.URL_REGEXPS, views_doc.document_bibxml_ref), url(r'^bibxml3/%(name)s(?:-%(rev)s)?.xml$' % settings.URL_REGEXPS, views_doc.document_bibxml), @@ -110,7 +115,6 @@ url(r'^%(name)s/ballot/rsab/$' % settings.URL_REGEXPS, views_doc.document_rsab_ballot), url(r'^%(name)s/ballot/(?P[0-9]+)/$' % settings.URL_REGEXPS, views_doc.document_ballot), url(r'^%(name)s/ballot/(?P[0-9]+)/position/$' % settings.URL_REGEXPS, views_ballot.edit_position), - url(r'^%(name)s/ballot/(?P[0-9]+)/emailposition/$' % settings.URL_REGEXPS, views_ballot.send_ballot_comment), url(r'^%(name)s/(?:%(rev)s/)?doc.json$' % settings.URL_REGEXPS, views_doc.document_json), url(r'^%(name)s/ballotpopup/(?P[0-9]+)/$' % settings.URL_REGEXPS, views_doc.ballot_popup), url(r'^(?P[A-Za-z0-9._+-]+)/reviewrequest/', include("ietf.doc.urls_review")), @@ -123,6 +127,7 @@ url(r'^%(name)s/edit/info/$' % settings.URL_REGEXPS, views_draft.edit_info), url(r'^%(name)s/edit/requestresurrect/$' % settings.URL_REGEXPS, views_draft.request_resurrect), url(r'^%(name)s/edit/submit-to-iesg/$' % settings.URL_REGEXPS, views_draft.to_iesg), + url(r'^%(name)s/edit/issue-wg-lc/$' % settings.URL_REGEXPS, views_draft.issue_wg_lc), url(r'^%(name)s/edit/resurrect/$' % settings.URL_REGEXPS, views_draft.resurrect), url(r'^%(name)s/edit/addcomment/$' % settings.URL_REGEXPS, views_doc.add_comment), @@ -141,9 +146,14 @@ url(r'^%(name)s/edit/shepherdemail/$' % settings.URL_REGEXPS, views_draft.change_shepherd_email), url(r'^%(name)s/edit/shepherdwriteup/$' % settings.URL_REGEXPS, views_draft.edit_shepherd_writeup), url(r'^%(name)s/edit/requestpublication/$' % settings.URL_REGEXPS, views_draft.request_publication), + url(r'^%(name)s/edit/ask-about-ietf-adoption/$' % settings.URL_REGEXPS, views_draft.ask_about_ietf_adoption_call), url(r'^%(name)s/edit/adopt/$' % settings.URL_REGEXPS, views_draft.adopt_draft), + url(r'^%(name)s/edit/issue-wg-call-for-adoption/%(acronym)s/$' % settings.URL_REGEXPS, views_draft.issue_wg_call_for_adoption), + url(r'^%(name)s/edit/release/$' % settings.URL_REGEXPS, views_draft.release_draft), url(r'^%(name)s/edit/state/(?Pdraft-stream-[a-z]+)/$' % settings.URL_REGEXPS, views_draft.change_stream_state), + url(r'^%(name)s/edit/wg-action-helpers/$' % settings.URL_REGEXPS, views_draft.offer_wg_action_helpers), + url(r'^%(name)s/edit/state/statement/$' % settings.URL_REGEXPS, views_statement.change_statement_state), url(r'^%(name)s/edit/clearballot/(?P[\w-]+)/$' % settings.URL_REGEXPS, views_ballot.clear_ballot), url(r'^%(name)s/edit/deferballot/$' % settings.URL_REGEXPS, views_ballot.defer_ballot), @@ -161,6 +171,7 @@ url(r'^%(name)s/edit/issueballot/rsab/$' % settings.URL_REGEXPS, views_ballot.issue_rsab_ballot), url(r'^%(name)s/edit/closeballot/rsab/$' % settings.URL_REGEXPS, views_ballot.close_rsab_ballot), + url(r'^help/state/?$', views_help.state_index), url(r'^help/state/(?P[\w-]+)/$', views_help.state_help), url(r'^help/relationships/$', views_help.relationship_help), url(r'^help/relationships/(?P\w+)/$', views_help.relationship_help), @@ -176,7 +187,8 @@ url(r'^%(name)s/session/' % settings.URL_REGEXPS, include('ietf.doc.urls_material')), url(r'^(?P[A-Za-z0-9._+-]+)/session/', include(session_patterns)), url(r'^(?P[A-Za-z0-9\._\+\-]+)$', views_search.search_for_name), - # latest versions - keep old URLs alive during migration period + # rfcdiff - latest versions - keep old URLs alive during migration period url(r'^rfcdiff-latest-json/%(name)s(?:-%(rev)s)?(\.txt|\.html)?/?$' % settings.URL_REGEXPS, RedirectView.as_view(pattern_name='ietf.api.views.rfcdiff_latest_json', permanent=True)), url(r'^rfcdiff-latest-json/(?P[Rr][Ff][Cc] [0-9]+?)(\.txt|\.html)?/?$', RedirectView.as_view(pattern_name='ietf.api.views.rfcdiff_latest_json', permanent=True)), + # end of rfcdiff support URLs ] diff --git a/ietf/doc/utils.py b/ietf/doc/utils.py index 992659df3d..6f32ed454f 100644 --- a/ietf/doc/utils.py +++ b/ietf/doc/utils.py @@ -1,9 +1,8 @@ -# Copyright The IETF Trust 2011-2020, All Rights Reserved +# Copyright The IETF Trust 2011-2024, All Rights Reserved # -*- coding: utf-8 -*- import datetime -import hashlib import io import json import math @@ -13,11 +12,15 @@ from collections import defaultdict, namedtuple, Counter from dataclasses import dataclass -from typing import Union +from hashlib import sha384 +from pathlib import Path +from typing import Iterator, Optional, Union, Iterable from zoneinfo import ZoneInfo from django.conf import settings from django.contrib import messages +from django.core.cache import caches +from django.db.models import OuterRef from django.forms import ValidationError from django.http import Http404 from django.template.loader import render_to_string @@ -31,17 +34,28 @@ from ietf.community.models import CommunityList from ietf.community.utils import docs_tracked_by_community_list -from ietf.doc.models import Document, DocHistory, State, DocumentAuthor, DocHistoryAuthor -from ietf.doc.models import DocAlias, RelatedDocument, RelatedDocHistory, BallotType, DocReminder +from ietf.doc.models import ( + DocHistory, + DocHistoryAuthor, + Document, + DocumentAuthor, + EditedRfcAuthorsDocEvent, + RfcAuthor, + State, + StoredObject, +) +from ietf.doc.models import RelatedDocument, RelatedDocHistory, BallotType, DocReminder from ietf.doc.models import DocEvent, ConsensusDocEvent, BallotDocEvent, IRSGBallotDocEvent, NewRevisionDocEvent, StateDocEvent -from ietf.doc.models import TelechatDocEvent, DocumentActionHolder, EditedAuthorsDocEvent +from ietf.doc.models import TelechatDocEvent, DocumentActionHolder, EditedAuthorsDocEvent, BallotPositionDocEvent +from ietf.doc.storage_utils import force_replication from ietf.name.models import DocReminderTypeName, DocRelationshipName from ietf.group.models import Role, Group, GroupFeatures from ietf.ietfauth.utils import has_role, is_authorized_in_doc_stream, is_individual_draft_author, is_bofreq_editor -from ietf.person.models import Person +from ietf.person.models import Email, Person +from ietf.person.utils import get_active_balloters from ietf.review.models import ReviewWish from ietf.utils import draft, log -from ietf.utils.mail import send_mail +from ietf.utils.mail import parseaddr, send_mail from ietf.mailtrigger.utils import gather_address_lists from ietf.utils.timezone import date_today, datetime_from_date, datetime_today, DEADLINE_TZINFO from ietf.utils.xmldraft import XMLDraft @@ -57,7 +71,7 @@ def get_model_fields_as_dict(obj): # copy fields fields = get_model_fields_as_dict(doc) fields["doc"] = doc - fields["name"] = doc.canonical_name() + fields["name"] = doc.name dochist = DocHistory(**fields) dochist.save() @@ -219,7 +233,7 @@ def needed_ballot_positions(doc, active_positions): else: related_set = RelatedDocHistory.objects.none() for rel in related_set.filter(relationship__slug__in=['tops', 'tois', 'tohist', 'toinf', 'tobcp', 'toexp']): - if (rel.target.document.std_level_id in ['bcp','ps','ds','std']) or (rel.relationship_id in ['tops','tois','tobcp']): + if (rel.target.std_level_id in ['bcp','ps','ds','std']) or (rel.relationship_id in ['tops','tois','tobcp']): needed = two_thirds_rule(recused=len(recuse)) break else: @@ -346,22 +360,13 @@ def augment_events_with_revision(doc, events): """Take a set of events for doc and add a .rev attribute with the revision they refer to by checking NewRevisionDocEvents.""" + # Need QuerySetAny instead of QuerySet until django-stubs 5.0.1 if isinstance(events, QuerySetAny): qs = events.filter(newrevisiondocevent__isnull=False) else: qs = NewRevisionDocEvent.objects.filter(doc=doc) event_revisions = list(qs.order_by('time', 'id').values('id', 'rev', 'time')) - if doc.type_id == "draft" and doc.get_state_slug() == "rfc": - # add fake "RFC" revision - if isinstance(events, QuerySetAny): - e = events.filter(type="published_rfc").order_by('time').last() - else: - e = doc.latest_event(type="published_rfc") - if e: - event_revisions.append(dict(id=e.id, time=e.time, rev="RFC")) - event_revisions.sort(key=lambda x: (x["time"], x["id"])) - for e in sorted(events, key=lambda e: (e.time, e.id), reverse=True): while event_revisions and (e.time, e.id) < (event_revisions[-1]["time"], event_revisions[-1]["id"]): event_revisions.pop() @@ -406,8 +411,12 @@ def get_unicode_document_content(key, filename, codec='utf-8', errors='ignore'): def tags_suffix(tags): return ("::" + "::".join(t.name for t in tags)) if tags else "" -def add_state_change_event(doc, by, prev_state, new_state, prev_tags=None, new_tags=None, timestamp=None): - """Add doc event to explain that state change just happened.""" + +def new_state_change_event(doc, by, prev_state, new_state, prev_tags=None, new_tags=None, timestamp=None): + """Create unsaved doc event to explain that state change just happened + + Returns None if no state change occurred. + """ if prev_state and new_state: assert prev_state.type_id == new_state.type_id @@ -427,7 +436,22 @@ def add_state_change_event(doc, by, prev_state, new_state, prev_tags=None, new_t e.desc += " from %s" % (prev_state.name + tags_suffix(prev_tags)) if timestamp: e.time = timestamp - e.save() + return e # not saved! + + +def add_state_change_event(doc, by, prev_state, new_state, prev_tags=None, new_tags=None, timestamp=None): + """Add doc event to explain that state change just happened. + + Returns None if no state change occurred. + + Note: Creating a state change DocEvent will trigger notifications to be sent to people subscribed + to the doc via a CommunityList on its first save(). If you need to adjust the event (say, changing + its desc) before that notification is sent, use new_state_change_event() instead and save the + event after making your changes. + """ + e = new_state_change_event(doc, by, prev_state, new_state, prev_tags, new_tags, timestamp) + if e is not None: + e.save() return e @@ -481,8 +505,9 @@ def update_action_holders(doc, prev_state=None, new_state=None, prev_tags=None, Returns an event describing the change which should be passed to doc.save_with_history() - Only cares about draft-iesg state changes. Places where other state types are updated - may not call this method. If you add rules for updating action holders on other state + Only cares about draft-iesg state changes and draft expiration. + Places where other state types are updated may not call this method. + If you add rules for updating action holders on other state types, be sure this is called in the places that change that state. """ # Should not call this with different state types @@ -501,41 +526,84 @@ def update_action_holders(doc, prev_state=None, new_state=None, prev_tags=None, # Remember original list of action holders to later check if it changed prev_set = list(doc.action_holders.all()) - - # Update the action holders. To get this right for people with more - # than one relationship to the document, do removals first, then adds. - # Remove outdated action holders - iesg_state_changed = (prev_state != new_state) and (getattr(new_state, "type_id", None) == "draft-iesg") - if iesg_state_changed: - # Clear the action_holders list on a state change. This will reset the age of any that get added back. + + if new_state and new_state.type_id=="draft" and new_state.slug=="expired": doc.action_holders.clear() - if tags.removed("need-rev"): - # Removed the 'need-rev' tag - drop authors from the action holders list - DocumentActionHolder.objects.filter(document=doc, person__in=doc.authors()).delete() - elif tags.added("need-rev"): - # Remove the AD if we're asking for a new revision - DocumentActionHolder.objects.filter(document=doc, person=doc.ad).delete() - - # Add new action holders - if doc.ad: - # AD is an action holder unless specified otherwise for the new state - if iesg_state_changed and new_state.slug not in DocumentActionHolder.CLEAR_ACTION_HOLDERS_STATES: - doc.action_holders.add(doc.ad) - # If AD follow-up is needed, make sure they are an action holder - if tags.added("ad-f-up"): - doc.action_holders.add(doc.ad) - # Authors get the action if a revision is needed - if tags.added("need-rev"): - for auth in doc.authors(): - doc.action_holders.add(auth) - - # Now create an event if we changed the set - return add_action_holder_change_event( - doc, - Person.objects.get(name='(System)'), - prev_set, - reason='IESG state changed', - ) + return add_action_holder_change_event( + doc, + Person.objects.get(name='(System)'), + prev_set, + reason='draft expired', + ) + else: + # Update the action holders. To get this right for people with more + # than one relationship to the document, do removals first, then adds. + # Remove outdated action holders + iesg_state_changed = (prev_state != new_state) and (getattr(new_state, "type_id", None) == "draft-iesg") + if iesg_state_changed: + # Clear the action_holders list on a state change. This will reset the age of any that get added back. + doc.action_holders.clear() + if tags.removed("need-rev"): + # Removed the 'need-rev' tag - drop authors from the action holders list + DocumentActionHolder.objects.filter(document=doc, person__in=doc.author_persons()).delete() + elif tags.added("need-rev"): + # Remove the AD if we're asking for a new revision + DocumentActionHolder.objects.filter(document=doc, person=doc.ad).delete() + + # Add new action holders + if doc.ad: + # AD is an action holder unless specified otherwise for the new state + if iesg_state_changed and new_state.slug not in DocumentActionHolder.CLEAR_ACTION_HOLDERS_STATES: + doc.action_holders.add(doc.ad) + # If AD follow-up is needed, make sure they are an action holder + if tags.added("ad-f-up"): + doc.action_holders.add(doc.ad) + # Authors get the action if a revision is needed + if tags.added("need-rev"): + for auth in doc.author_persons(): + doc.action_holders.add(auth) + + # Now create an event if we changed the set + return add_action_holder_change_event( + doc, + Person.objects.get(name='(System)'), + prev_set, + reason='IESG state changed', + ) + + +def _change_field_and_describe( + author: DocumentAuthor | RfcAuthor, + field: str, + newval, + field_display_name: str | None = None, +): + # make the change + oldval = getattr(author, field) + setattr(author, field, newval) + + was_empty = oldval is None or len(str(oldval)) == 0 + now_empty = newval is None or len(str(newval)) == 0 + + # describe the change + if oldval == newval: + return None + else: + if field_display_name is None: + field_display_name = field + + if was_empty and not now_empty: + return 'set {field} to "{new}"'.format( + field=field_display_name, new=newval + ) + elif now_empty and not was_empty: + return 'cleared {field} (was "{old}")'.format( + field=field_display_name, old=oldval + ) + else: + return 'changed {field} from "{old}" to "{new}"'.format( + field=field_display_name, old=oldval, new=newval + ) def update_documentauthors(doc, new_docauthors, by=None, basis=None): @@ -550,27 +618,6 @@ def update_documentauthors(doc, new_docauthors, by=None, basis=None): used. These objects will not be saved, their attributes will be used to create new DocumentAuthor instances. (The document and order fields will be ignored.) """ - def _change_field_and_describe(auth, field, newval): - # make the change - oldval = getattr(auth, field) - setattr(auth, field, newval) - - was_empty = oldval is None or len(str(oldval)) == 0 - now_empty = newval is None or len(str(newval)) == 0 - - # describe the change - if oldval == newval: - return None - else: - if was_empty and not now_empty: - return 'set {field} to "{new}"'.format(field=field, new=newval) - elif now_empty and not was_empty: - return 'cleared {field} (was "{old}")'.format(field=field, old=oldval) - else: - return 'changed {field} from "{old}" to "{new}"'.format( - field=field, old=oldval, new=newval - ) - persons = [] changes = [] # list of change descriptions @@ -614,6 +661,123 @@ def _change_field_and_describe(auth, field, newval): ) for change in changes ] + +def update_rfcauthors( + rfc: Document, new_rfcauthors: Iterable[RfcAuthor], by: Person | None = None +) -> Iterable[EditedRfcAuthorsDocEvent]: + def _find_matching_author( + author_to_match: RfcAuthor, existing_authors: Iterable[RfcAuthor] + ) -> RfcAuthor | None: + """Helper to find a matching existing author""" + if author_to_match.person_id is not None: + for candidate in existing_authors: + if candidate.person_id == author_to_match.person_id: + return candidate + return None # no match + # author does not have a person, match on titlepage name + for candidate in existing_authors: + if candidate.titlepage_name == author_to_match.titlepage_name: + return candidate + return None # no match + + def _rfcauthor_from_documentauthor(docauthor: DocumentAuthor) -> RfcAuthor: + """Helper to create an equivalent RfcAuthor from a DocumentAuthor""" + return RfcAuthor( + document_id=docauthor.document_id, + titlepage_name=docauthor.person.plain_name(), # closest thing we have + is_editor=False, + person_id=docauthor.person_id, + affiliation=docauthor.affiliation, + country=docauthor.country, + order=docauthor.order, + ) + + # Is this the first time this document is getting an RfcAuthor? If so, the + # updates will need to account for the model change. + converting_from_docauthors = not rfc.rfcauthor_set.exists() + + if converting_from_docauthors: + original_authors = [ + _rfcauthor_from_documentauthor(da) for da in rfc.documentauthor_set.all() + ] + else: + original_authors = list(rfc.rfcauthor_set.all()) + + authors_to_commit = [] + changes = [] + for order, new_author in enumerate(new_rfcauthors): + matching_author = _find_matching_author(new_author, original_authors) + if matching_author is not None: + # Update existing matching author using new_author data + authors_to_commit.append(matching_author) + original_authors.remove(matching_author) # avoid reuse + # Describe changes to this author + author_changes = [] + # Update fields other than order + for field in ["titlepage_name", "is_editor", "affiliation", "country"]: + author_changes.append( + _change_field_and_describe( + matching_author, + field, + getattr(new_author, field), + # List titlepage_name as "name" in logs + "name" if field == "titlepage_name" else field, + ) + ) + # Update order + author_changes.append( + _change_field_and_describe(matching_author, "order", order + 1) + ) + matching_author.save() + author_change_summary = ", ".join( + [ch for ch in author_changes if ch is not None] + ) + if len(author_change_summary) > 0: + changes.append( + 'Changed author "{name}": {summary}'.format( + name=matching_author.titlepage_name, + summary=author_change_summary, + ) + ) + else: + # No author matched, so update the new_author and use that + new_author.document = rfc + new_author.order = order + 1 + new_author.save() + if new_author.person_id is not None: + person_desc = f"Person {new_author.person_id}" + else: + person_desc = "no Person linked" + changes.append( + f'Added "{new_author.titlepage_name}" ({person_desc}) as author' + ) + # Any authors left in original_authors are no longer in the list, so remove them + for removed_author in original_authors: + # Skip actual removal of old authors if we are converting from the + # DocumentAuthor models - the original_authors were just stand-ins anyway. + if not converting_from_docauthors: + removed_author.delete() + if removed_author.person_id is not None: + person_desc = f"Person {removed_author.person_id}" + else: + person_desc = "no Person linked" + changes.append( + f'Removed "{removed_author.titlepage_name}" ({person_desc}) as author' + ) + # Create DocEvents, but leave it up to caller to save + if by is None: + by = Person.objects.get(name="(System)") + return [ + EditedRfcAuthorsDocEvent( + type="edited_authors", + by=by, + doc=rfc, + desc=change, + ) + for change in changes + ] + + def update_reminder(doc, reminder_type_slug, event, due_date): reminder_type = DocReminderTypeName.objects.get(slug=reminder_type_slug) @@ -665,6 +829,22 @@ def nice_consensus(consensus): } return mapping[consensus] +def last_ballot_doc_revision(doc, person): + """ Return the document revision for the most recent ballot position + by the provided user. """ + ballot = doc.active_ballot() + if ballot is None or person is None: + return None + balloters = get_active_balloters(ballot.ballot_type) + if person not in balloters: + return None + position_queryset = BallotPositionDocEvent.objects.filter(type="changed_ballot_position", balloter=person, ballot=ballot).order_by("-time") + if not position_queryset.exists(): + return None + ballot_time = position_queryset.first().time + doc_rev = NewRevisionDocEvent.objects.filter(doc=doc, time__lte=ballot_time).order_by('-time').first().rev + return doc_rev + def has_same_ballot(doc, date1, date2=None): """ Test if the most recent ballot created before the end of date1 is the same as the most recent ballot created before the @@ -749,7 +929,7 @@ def update_telechat(request, doc, by, new_telechat_date, new_returning_item=None else: e.desc = "Removed from agenda for telechat" elif on_agenda and new_telechat_date != prev_telechat: - e.desc = "Telechat date has been changed to %s from %s" % ( + e.desc = "Telechat date has been changed to %s (Previous date was %s)" % ( new_telechat_date, prev_telechat) else: # we didn't reschedule but flipped returning item bit - let's @@ -777,51 +957,93 @@ def rebuild_reference_relations(doc, filenames): filenames should be a dict mapping file ext (i.e., type) to the full path of each file. """ - if doc.type.slug != 'draft': + if doc.type.slug not in ["draft", "rfc"]: + log.log(f"rebuild_reference_relations called for non draft/rfc doc {doc.name}") return None - # try XML first - if 'xml' in filenames: - refs = XMLDraft(filenames['xml']).get_refs() - elif 'txt' in filenames: - filename = filenames['txt'] - try: - refs = draft.PlaintextDraft.from_file(filename).get_refs() - except IOError as e: - return { 'errors': ["%s :%s" % (e.strerror, filename)] } - else: - return {'errors': ['No Internet-Draft text available for rebuilding reference relations. Need XML or plaintext.']} - doc.relateddocument_set.filter(relationship__slug__in=['refnorm','refinfo','refold','refunk']).delete() + if "xml" not in filenames and "txt" not in filenames: + log.log(f"rebuild_reference_relations error: no file available for {doc.name}") + return { + "errors": [ + "No file available for rebuilding reference relations. Need XML or plaintext." + ] + } + else: + try: + # try XML first + if "xml" in filenames: + refs = XMLDraft(filenames["xml"]).get_refs() + elif "txt" in filenames: + filename = filenames["txt"] + refs = draft.PlaintextDraft.from_file(filename).get_refs() + except (IOError, UnicodeDecodeError) as e: + log.log(f"rebuild_reference_relations error: On {doc.name}: {e}") + return {"errors": [f"{e}: {filename}"]} + + before = set(doc.relateddocument_set.filter( + relationship__slug__in=["refnorm", "refinfo", "refold", "refunk"] + ).values_list("relationship__slug","target__name")) warnings = [] errors = [] unfound = set() - for ( ref, refType ) in refs.items(): - refdoc = DocAlias.objects.filter(name=ref) - if not refdoc and re.match(r"^draft-.*-\d{2}$", ref): - refdoc = DocAlias.objects.filter(name=ref[:-3]) + intended = set() + names = [ref for ref in refs] + names.extend([ref[:-3] for ref in refs if re.match(r"^draft-.*-\d{2}$", ref)]) + queryset = Document.objects.filter(name__in=names) + for ref, refType in refs.items(): + refdoc = queryset.filter(name=ref) + if not refdoc.exists() and re.match(r"^draft-.*-\d{2}$", ref): + refdoc = queryset.filter(name=ref[:-3]) count = refdoc.count() - # As of Dec 2021, DocAlias has a unique constraint on the name field, so count > 1 should not occur if count == 0: - unfound.add( "%s" % ref ) + unfound.add("%s" % ref) continue elif count > 1: - errors.append("Too many DocAlias objects found for %s"%ref) + log.unreachable("2026-3-16") # This branch is holdover from DocAlias + errors.append("Too many Document objects found for %s" % ref) else: # Don't add references to ourself - if doc != refdoc[0].document: - RelatedDocument.objects.get_or_create( source=doc, target=refdoc[ 0 ], relationship=DocRelationshipName.objects.get( slug='ref%s' % refType ) ) + if doc != refdoc[0]: + intended.add((f"ref{refType}", refdoc[0].name)) + if unfound: - warnings.append('There were %d references with no matching DocAlias'%len(unfound)) + warnings.append( + "There were %d references with no matching Document" % len(unfound) + ) + + if intended != before: + for slug, name in before-intended: + doc.relateddocument_set.filter(target__name=name, relationship_id=slug).delete() + for slug, name in intended-before: + doc.relateddocument_set.create( + target=queryset.get(name=name), + relationship_id=slug + ) + after = set(doc.relateddocument_set.filter( + relationship__slug__in=["refnorm", "refinfo", "refold", "refunk"] + ).values_list("relationship__slug","target__name")) + if after != intended: + errors.append("Attempted changed didn't achieve intended results") + changed_references = True + else: + changed_references = False ret = {} if errors: - ret['errors']=errors + ret["errors"] = errors if warnings: - ret['warnings']=warnings + ret["warnings"] = warnings if unfound: - ret['unfound']=list(unfound) + ret["unfound"] = list(unfound) + + logmsg = f"rebuild_reference_relations for {doc.name}: " + logmsg += "changed references" if changed_references else "references unchanged" + if ret: + logmsg += f" {json.dumps(ret)}" + + log.log(logmsg) return ret @@ -848,26 +1070,26 @@ def set_replaces_for_document(request, doc, new_replaces, by, email_subject, com for d in old_replaces: if d not in new_replaces: - other_addrs = gather_address_lists('doc_replacement_changed',doc=d.document) + other_addrs = gather_address_lists('doc_replacement_changed',doc=d) to.update(other_addrs.to) cc.update(other_addrs.cc) RelatedDocument.objects.filter(source=doc, target=d, relationship=relationship).delete() if not RelatedDocument.objects.filter(target=d, relationship=relationship): - s = 'active' if d.document.expires > timezone.now() else 'expired' - d.document.set_state(State.objects.get(type='draft', slug=s)) + s = 'active' if d.expires > timezone.now() else 'expired' + d.set_state(State.objects.get(type='draft', slug=s)) for d in new_replaces: if d not in old_replaces: - other_addrs = gather_address_lists('doc_replacement_changed',doc=d.document) + other_addrs = gather_address_lists('doc_replacement_changed',doc=d) to.update(other_addrs.to) cc.update(other_addrs.cc) RelatedDocument.objects.create(source=doc, target=d, relationship=relationship) - d.document.set_state(State.objects.get(type='draft', slug='repl')) + d.set_state(State.objects.get(type='draft', slug='repl')) - if d.document.stream_id in ('irtf','ise','iab'): - repl_state = State.objects.get(type_id='draft-stream-%s'%d.document.stream_id, slug='repl') - d.document.set_state(repl_state) - events.append(StateDocEvent.objects.create(doc=d.document, rev=d.document.rev, by=by, type='changed_state', desc="Set stream state to Replaced",state_type=repl_state.type, state=repl_state)) + if d.stream_id in ('irtf','ise','iab'): + repl_state = State.objects.get(type_id='draft-stream-%s'%d.stream_id, slug='repl') + d.set_state(repl_state) + events.append(StateDocEvent.objects.create(doc=d, rev=d.rev, by=by, type='changed_state', desc="Set stream state to Replaced",state_type=repl_state.type, state=repl_state)) # make sure there are no lingering suggestions duplicating new replacements RelatedDocument.objects.filter(source=doc, target__in=new_replaces, relationship="possibly-replaces").delete() @@ -937,7 +1159,7 @@ def extract_complete_replaces_ancestor_mapping_for_docs(names): break relations = ( RelatedDocument.objects.filter(source__name__in=front, relationship="replaces") - .select_related("target").values_list("source__name", "target__docs__name") ) + .select_related("target").values_list("source__name", "target__name") ) if not relations: break @@ -958,49 +1180,67 @@ def make_rev_history(doc): def get_predecessors(doc, predecessors=None): if predecessors is None: - predecessors = [] + predecessors = set() if hasattr(doc, 'relateddocument_set'): - for alias in doc.related_that_doc('replaces'): - for document in alias.docs.all(): - if document not in predecessors: - predecessors.append(document) - predecessors.extend(get_predecessors(document, predecessors)) + for document in doc.related_that_doc('replaces'): + if document not in predecessors: + predecessors.add(document) + predecessors.update(get_predecessors(document, predecessors)) + if doc.came_from_draft(): + predecessors.add(doc.came_from_draft()) + predecessors.update(get_predecessors(doc.came_from_draft(), predecessors)) return predecessors def get_ancestors(doc, ancestors = None): if ancestors is None: - ancestors = [] + ancestors = set() if hasattr(doc, 'relateddocument_set'): - for alias in doc.related_that('replaces'): - for document in alias.docs.all(): - if document not in ancestors: - ancestors.append(document) - ancestors.extend(get_ancestors(document, ancestors)) + for document in doc.related_that('replaces'): + if document not in ancestors: + ancestors.add(document) + ancestors.update(get_ancestors(document, ancestors)) + if doc.became_rfc(): + if doc.became_rfc() not in ancestors: + ancestors.add(doc.became_rfc()) + ancestors.update(get_ancestors(doc.became_rfc(), ancestors)) return ancestors def get_replaces_tree(doc): tree = get_predecessors(doc) - tree.extend(get_ancestors(doc)) + tree.update(get_ancestors(doc)) return tree history = {} docs = get_replaces_tree(doc) if docs is not None: - docs.append(doc) + docs.add(doc) for d in docs: - for e in d.docevent_set.filter(type='new_revision').distinct(): - if hasattr(e, 'newrevisiondocevent'): - url = urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=d)) + e.newrevisiondocevent.rev + "/" - history[url] = { - 'name': d.name, - 'rev': e.newrevisiondocevent.rev, - 'published': e.time.isoformat(), - 'url': url, - } - if d.history_set.filter(rev=e.newrevisiondocevent.rev).exists(): - history[url]['pages'] = d.history_set.filter(rev=e.newrevisiondocevent.rev).first().pages + if d.type_id == "rfc": + url = urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=d)) + e = d.docevent_set.filter(type="published_rfc").order_by("-time").first() + history[url] = { + "name": d.name, + "rev": d.name, + "published": e and e.time.isoformat(), + "url": url, + } + else: + for e in d.docevent_set.filter(type='new_revision').distinct(): + if hasattr(e, 'newrevisiondocevent'): + url = urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=d)) + e.newrevisiondocevent.rev + "/" + history[url] = { + 'name': d.name, + 'rev': e.newrevisiondocevent.rev, + 'published': e.time.isoformat(), + 'url': url, + } + if d.history_set.filter(rev=e.newrevisiondocevent.rev).exists(): + history[url]['pages'] = d.history_set.filter(rev=e.newrevisiondocevent.rev).first().pages if doc.type_id == "draft": + # Do nothing - all draft revisions are captured above already. + e = None + elif doc.type_id == "rfc": # e.time.date() agrees with RPC publication date when shown in the RPC_TZINFO time zone e = doc.latest_event(type='published_rfc') else: @@ -1008,31 +1248,20 @@ def get_replaces_tree(doc): if e: url = urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=e.doc)) history[url] = { - 'name': e.doc.canonical_name(), - 'rev': e.doc.canonical_name(), + 'name': e.doc.name, + 'rev': e.doc.name, 'published': e.time.isoformat(), 'url': url } - if hasattr(e, 'newrevisiondocevent') and doc.history_set.filter(rev=e.newrevisiondocevent.rev).exists(): + if doc.type_id != "rfc" and hasattr(e, 'newrevisiondocevent') and doc.history_set.filter(rev=e.newrevisiondocevent.rev).exists(): history[url]['pages'] = doc.history_set.filter(rev=e.newrevisiondocevent.rev).first().pages history = list(history.values()) return sorted(history, key=lambda x: x['published']) -def get_search_cache_key(params): - from ietf.doc.views_search import SearchForm - fields = set(SearchForm.base_fields) - set(['sort',]) - kwargs = dict([ (k,v) for (k,v) in list(params.items()) if k in fields ]) - key = "doc:document:search:" + hashlib.sha512(json.dumps(kwargs, sort_keys=True).encode('utf-8')).hexdigest() - return key - def build_file_urls(doc: Union[Document, DocHistory]): - if doc.type_id != 'draft': - return [], [] - - if doc.get_state_slug() == "rfc": - name = doc.canonical_name() - base_path = os.path.join(settings.RFC_PATH, name + ".") + if doc.type_id == "rfc": + base_path = os.path.join(settings.RFC_PATH, doc.name + ".") possible_types = settings.RFC_FILE_TYPES found_types = [t for t in possible_types if os.path.exists(base_path + t)] @@ -1040,64 +1269,67 @@ def build_file_urls(doc: Union[Document, DocHistory]): file_urls = [] for t in found_types: + if t == "ps": # Postscript might have been submitted but should not be displayed in the list of URLs + continue label = "plain text" if t == "txt" else t - file_urls.append((label, base + name + "." + t)) + file_urls.append((label, base + doc.name + "." + t)) if "pdf" not in found_types and "txt" in found_types: - file_urls.append(("pdf", base + "pdfrfc/" + name + ".txt.pdf")) + file_urls.append(("pdf", base + "pdfrfc/" + doc.name + ".txt.pdf")) if "txt" in found_types: - file_urls.append(("htmlized", urlreverse('ietf.doc.views_doc.document_html', kwargs=dict(name=name)))) + file_urls.append(("htmlized", urlreverse('ietf.doc.views_doc.document_html', kwargs=dict(name=doc.name)))) if doc.tags.filter(slug="verified-errata").exists(): - file_urls.append(("with errata", settings.RFC_EDITOR_INLINE_ERRATA_URL.format(rfc_number=doc.rfc_number()))) - file_urls.append(("bibtex", urlreverse('ietf.doc.views_doc.document_bibtex',kwargs=dict(name=name)))) - elif doc.rev: + file_urls.append(("with errata", settings.RFC_EDITOR_INLINE_ERRATA_URL.format(rfc_number=doc.rfc_number))) + file_urls.append(("bibtex", urlreverse('ietf.doc.views_doc.document_bibtex',kwargs=dict(name=doc.name)))) + elif doc.type_id == "draft" and doc.rev != "": base_path = os.path.join(settings.INTERNET_ALL_DRAFTS_ARCHIVE_DIR, doc.name + "-" + doc.rev + ".") possible_types = settings.IDSUBMIT_FILE_TYPES found_types = [t for t in possible_types if os.path.exists(base_path + t)] base = settings.IETF_ID_ARCHIVE_URL file_urls = [] for t in found_types: + if t == "ps": # Postscript might have been submitted but should not be displayed in the list of URLs + continue label = "plain text" if t == "txt" else t file_urls.append((label, base + doc.name + "-" + doc.rev + "." + t)) - if doc.text(): + if doc.text_exists(): file_urls.append(("htmlized", urlreverse('ietf.doc.views_doc.document_html', kwargs=dict(name=doc.name, rev=doc.rev)))) file_urls.append(("pdfized", urlreverse('ietf.doc.views_doc.document_pdfized', kwargs=dict(name=doc.name, rev=doc.rev)))) file_urls.append(("bibtex", urlreverse('ietf.doc.views_doc.document_bibtex',kwargs=dict(name=doc.name,rev=doc.rev)))) file_urls.append(("bibxml", urlreverse('ietf.doc.views_doc.document_bibxml',kwargs=dict(name=doc.name,rev=doc.rev)))) else: - # As of 2022-12-14, there are 1463 Document and 3136 DocHistory records with type='draft' and rev=''. - # All of these are in the rfc state and are covered by the above cases. - log.unreachable('2022-12-14') + if doc.type_id == "draft": + # TODO: look at the state of the database post migration and update this comment, or remove the block + # As of 2022-12-14, there are 1463 Document and 3136 DocHistory records with type='draft' and rev=''. + # All of these are in the rfc state and are covered by the above cases. + log.unreachable('2022-12-14') file_urls = [] found_types = [] - + return file_urls, found_types -def augment_docs_and_user_with_user_info(docs, user): +def augment_docs_and_person_with_person_info(docs, person): """Add attribute to each document with whether the document is tracked - or has a review wish by the user or not, and the review teams the user is on.""" + or has a review wish by the person or not, and the review teams the person is on.""" tracked = set() review_wished = set() - - if user and user.is_authenticated: - user.review_teams = Group.objects.filter( - reviewteamsettings__isnull=False, role__person__user=user, role__name='reviewer') - doc_pks = [d.pk for d in docs] - clist = CommunityList.objects.filter(user=user).first() - if clist: - tracked.update( - docs_tracked_by_community_list(clist).filter(pk__in=doc_pks).values_list("pk", flat=True)) + # used in templates + person.review_teams = Group.objects.filter( + reviewteamsettings__isnull=False, role__person=person, role__name='reviewer') - try: - wishes = ReviewWish.objects.filter(person=Person.objects.get(user=user)) - wishes = wishes.filter(doc__pk__in=doc_pks).values_list("doc__pk", flat=True) - review_wished.update(wishes) - except Person.DoesNotExist: - pass + doc_pks = [d.pk for d in docs] + clist = CommunityList.objects.filter(person=person).first() + if clist: + tracked.update( + docs_tracked_by_community_list(clist).filter(pk__in=doc_pks).values_list("pk", flat=True)) + + wishes = ReviewWish.objects.filter(person=person) + wishes = wishes.filter(doc__pk__in=doc_pks).values_list("doc__pk", flat=True) + review_wished.update(wishes) for d in docs: d.tracked_in_personal_community_list = d.pk in tracked @@ -1139,21 +1371,24 @@ def generate_idnits2_rfc_status(): 'unkn': 'U', } - rfcs = Document.objects.filter(type_id='draft',states__slug='rfc',states__type='draft') + rfcs = Document.objects.filter(type_id='rfc') for rfc in rfcs: - offset = int(rfc.rfcnum)-1 + offset = int(rfc.rfc_number)-1 blob[offset] = symbols[rfc.std_level_id] if rfc.related_that('obs'): blob[offset] = 'O' # Workarounds for unusual states in the datatracker - # Document.get(docalias='rfc6312').rfcnum == 6342 - # 6312 was published with the wrong rfc number in it - # weird workaround in the datatracker - there are two - # DocAliases starting with rfc - the canonical name code - # searches for the lexically highest alias starting with rfc - # which is getting lucky. + # The explanation for 6312 is from before docalias was removed + # The workaround is still needed, even if the datatracker + # state no longer matches what's described here: + # Document.get(docalias='rfc6312').rfc_number == 6342 + # 6312 was published with the wrong rfc number in it + # weird workaround in the datatracker - there are two + # DocAliases starting with rfc - the canonical name code + # searches for the lexically highest alias starting with rfc + # which is getting lucky. blob[6312 - 1] = 'O' # RFC200 is an old RFC List by Number @@ -1169,7 +1404,7 @@ def generate_idnits2_rfc_status(): def generate_idnits2_rfcs_obsoleted(): obsdict = defaultdict(list) for r in RelatedDocument.objects.filter(relationship_id='obs'): - obsdict[int(r.target.document.rfc_number())].append(int(r.source.rfc_number())) + obsdict[int(r.target.rfc_number)].append(int(r.source.rfc_number)) # Aren't these already guaranteed to be ints? for k in obsdict: obsdict[k] = sorted(obsdict[k]) return render_to_string('doc/idnits2-rfcs-obsoleted.txt', context={'obsitems':sorted(obsdict.items())}) @@ -1198,19 +1433,27 @@ def fuzzy_find_documents(name, rev=None): if re.match("^[0-9]+$", name): name = f'rfc{name}' + if name.startswith("rfc"): + sought_type = "rfc" + name = name.split("-")[0] # strip any noise (like a revision) at and after the first hyphen + rev = None # If someone is looking for an RFC and supplies a version, ignore it. + else: + sought_type = "draft" + # see if we can find a document using this name - docs = Document.objects.filter(docalias__name=name, type_id='draft') - if rev and not docs.exists(): - # No document found, see if the name/rev split has been misidentified. + docs = Document.objects.filter(name=name, type_id=sought_type) + if sought_type == "draft" and rev and not docs.exists(): + # No draft found, see if the name/rev split has been misidentified. # Handles some special cases, like draft-ietf-tsvwg-ieee-802-11. name = '%s-%s' % (name, rev) - docs = Document.objects.filter(docalias__name=name, type_id='draft') + docs = Document.objects.filter(name=name, type_id='draft') if docs.exists(): rev = None # found a doc by name with rev = None, so update that FoundDocuments = namedtuple('FoundDocuments', 'documents matched_name matched_rev') return FoundDocuments(docs, name, rev) + def bibxml_for_draft(doc, rev=None): if rev is not None and rev != doc.rev: @@ -1245,3 +1488,251 @@ def bibxml_for_draft(doc, rev=None): return render_to_string('doc/bibxml.xml', {'name':name, 'doc':doc, 'doc_bibtype':'I-D', 'settings':settings}) + +class DraftAliasGenerator: + days = 2 * 365 + + def __init__(self, draft_queryset=None): + if draft_queryset is not None: + self.draft_queryset = draft_queryset.filter(type_id="draft") # only drafts allowed + else: + self.draft_queryset = Document.objects.filter(type_id="draft") + + def get_draft_ad_emails(self, doc): + """Get AD email addresses for the given draft, if any.""" + from ietf.group.utils import get_group_ad_emails # avoid circular import + ad_emails = set() + # If working group document, return current WG ADs + if doc.group and doc.group.acronym != "none": + ad_emails.update(get_group_ad_emails(doc.group)) + # Document may have an explicit AD set + if doc.ad: + ad_emails.add(doc.ad.email_address()) + return ad_emails + + def get_draft_chair_emails(self, doc): + """Get chair email addresses for the given draft, if any.""" + from ietf.group.utils import get_group_role_emails # avoid circular import + chair_emails = set() + if doc.group: + chair_emails.update(get_group_role_emails(doc.group, ["chair", "secr"])) + return chair_emails + + def get_draft_shepherd_email(self, doc): + """Get shepherd email addresses for the given draft, if any.""" + shepherd_email = set() + if doc.shepherd: + shepherd_email.add(doc.shepherd.email_address()) + return shepherd_email + + def get_draft_authors_emails(self, doc): + """Get list of authors for the given draft.""" + author_emails = set() + for email in Email.objects.filter(documentauthor__document=doc): + if email.active: + author_emails.add(email.address) + elif email.person: + person_email = email.person.email_address() + if person_email: + author_emails.add(person_email) + return author_emails + + def get_draft_notify_emails(self, doc): + """Get list of email addresses to notify for the given draft.""" + ad_email_alias_regex = r"^%s.ad@(%s|%s)$" % (doc.name, settings.DRAFT_ALIAS_DOMAIN, settings.TOOLS_SERVER) + all_email_alias_regex = r"^%s.all@(%s|%s)$" % (doc.name, settings.DRAFT_ALIAS_DOMAIN, settings.TOOLS_SERVER) + author_email_alias_regex = r"^%s@(%s|%s)$" % (doc.name, settings.DRAFT_ALIAS_DOMAIN, settings.TOOLS_SERVER) + notify_email_alias_regex = r"^%s.notify@(%s|%s)$" % ( + doc.name, settings.DRAFT_ALIAS_DOMAIN, settings.TOOLS_SERVER) + shepherd_email_alias_regex = r"^%s.shepherd@(%s|%s)$" % ( + doc.name, settings.DRAFT_ALIAS_DOMAIN, settings.TOOLS_SERVER) + notify_emails = set() + if doc.notify: + for e in doc.notify.split(','): + e = e.strip() + if re.search(ad_email_alias_regex, e): + notify_emails.update(self.get_draft_ad_emails(doc)) + elif re.search(author_email_alias_regex, e): + notify_emails.update(self.get_draft_authors_emails(doc)) + elif re.search(shepherd_email_alias_regex, e): + notify_emails.update(self.get_draft_shepherd_email(doc)) + elif re.search(all_email_alias_regex, e): + notify_emails.update(self.get_draft_ad_emails(doc)) + notify_emails.update(self.get_draft_authors_emails(doc)) + notify_emails.update(self.get_draft_shepherd_email(doc)) + elif re.search(notify_email_alias_regex, e): + pass + else: + (name, email) = parseaddr(e) + notify_emails.add(email) + return notify_emails + + def _yield_aliases_for_draft(self, doc)-> Iterator[tuple[str, list[str]]]: + alias = doc.name + all = set() + + # no suffix and .authors are the same list + emails = self.get_draft_authors_emails(doc) + all.update(emails) + if emails: + yield alias, list(emails) + yield alias + ".authors", list(emails) + + # .chairs = group chairs + emails = self.get_draft_chair_emails(doc) + if emails: + all.update(emails) + yield alias + ".chairs", list(emails) + + # .ad = sponsoring AD / WG AD (WG document) + emails = self.get_draft_ad_emails(doc) + if emails: + all.update(emails) + yield alias + ".ad", list(emails) + + # .notify = notify email list from the Document + emails = self.get_draft_notify_emails(doc) + if emails: + all.update(emails) + yield alias + ".notify", list(emails) + + # .shepherd = shepherd email from the Document + emails = self.get_draft_shepherd_email(doc) + if emails: + all.update(emails) + yield alias + ".shepherd", list(emails) + + # .all = everything from above + if all: + yield alias + ".all", list(all) + + def __iter__(self) -> Iterator[tuple[str, list[str]]]: + # Internet-Drafts with active status or expired within self.days + show_since = timezone.now() - datetime.timedelta(days=self.days) + drafts = self.draft_queryset + + # Look up the draft-active state properly. Doing this with + # states__type_id, states__slug directly in the `filter()` + # works, but it does not work as expected in `exclude()`. + active_state = State.objects.get(type_id="draft", slug="active") + active_pks = [] # build a static list of the drafts we actually returned as "active" + active_drafts = drafts.filter(states=active_state) + for this_draft in active_drafts: + active_pks.append(this_draft.pk) + for alias, addresses in self._yield_aliases_for_draft(this_draft): + yield alias, addresses + + # Annotate with the draft state slug so we can check for drafts that + # have become RFCs + inactive_recent_drafts = ( + drafts.exclude(pk__in=active_pks) # don't re-filter by state, states may have changed during the run! + .filter(expires__gte=show_since) + .annotate( + # Why _default_manager instead of objects? See: + # https://docs.djangoproject.com/en/4.2/topics/db/managers/#django.db.models.Model._default_manager + draft_state_slug=Document.states.through._default_manager.filter( + document__pk=OuterRef("pk"), + state__type_id="draft" + ).values("state__slug"), + ) + ) + for this_draft in inactive_recent_drafts: + # Omit drafts that became RFCs, unless they were published in the last DEFAULT_YEARS + if this_draft.draft_state_slug == "rfc": + rfc = this_draft.became_rfc() + log.assertion("rfc is not None") + if rfc.latest_event(type='published_rfc').time < show_since: + continue + for alias, addresses in self._yield_aliases_for_draft(this_draft): + yield alias, addresses + + +def get_doc_email_aliases(name: Optional[str] = None): + aliases = [] + for (alias, alist) in DraftAliasGenerator( + Document.objects.filter(type_id="draft", name=name) if name else None + ): + # alias is draft-name.alias_type + doc_name, _dot, alias_type = alias.partition(".") + aliases.append({ + "doc_name": doc_name, + "alias_type": f".{alias_type}" if alias_type else "", + "expansion": ", ".join(sorted(alist)), + }) + return sorted(aliases, key=lambda a: (a["doc_name"])) + + +def investigate_fragment(name_fragment: str): + cache = caches["default"] + # Ensure name_fragment does not interact badly with the cache key handling + name_digest = sha384(name_fragment.encode("utf8")).hexdigest() + cache_key = f"investigate_fragment:{name_digest}" + result = cache.get(cache_key) + if result is None: + can_verify = set() + for root in [settings.INTERNET_DRAFT_PATH, settings.INTERNET_DRAFT_ARCHIVE_DIR]: + can_verify.update(list(Path(root).glob(f"*{name_fragment}*"))) + archive_verifiable_names = set([p.name for p in can_verify]) + # Can also verify drafts in proceedings directories + can_verify.update(list(Path(settings.AGENDA_PATH).glob(f"**/*{name_fragment}*"))) + + # N.B. This reflects the assumption that the internet draft archive dir is in the + # a directory with other collections (at /a/ietfdata/draft/collections as this is written) + unverifiable_collections = set([ + p for p in + Path(settings.INTERNET_DRAFT_ARCHIVE_DIR).parent.glob(f"**/*{name_fragment}*") + if p.name not in archive_verifiable_names + ]) + + unverifiable_collections.difference_update(can_verify) + + expected_names = set([p.name for p in can_verify.union(unverifiable_collections)]) + maybe_unexpected = list( + Path(settings.INTERNET_ALL_DRAFTS_ARCHIVE_DIR).glob(f"*{name_fragment}*") + ) + unexpected = [p for p in maybe_unexpected if p.name not in expected_names] + result = dict( + can_verify=can_verify, + unverifiable_collections=unverifiable_collections, + unexpected=unexpected, + ) + # 1 hour caching + cache.set(key=cache_key, timeout=3600, value=result) + return result + + +def update_or_create_draft_bibxml_file(doc, rev): + log.assertion("doc.type_id == 'draft'") + normalized_bibxml = re.sub(r"\r\n?", r"\n", bibxml_for_draft(doc, rev)) + ref_rev_file_path = Path(settings.BIBXML_BASE_PATH) / "bibxml-ids" / f"reference.I-D.{doc.name}-{rev}.xml" + try: + existing_bibxml = ref_rev_file_path.read_text(encoding="utf8") + except IOError: + existing_bibxml = "" + if normalized_bibxml.strip() != existing_bibxml.strip(): + log.log(f"Writing {ref_rev_file_path}") + ref_rev_file_path.write_text(normalized_bibxml, encoding="utf8") # TODO-BLOBSTORE + + +def ensure_draft_bibxml_path_exists(): + (Path(settings.BIBXML_BASE_PATH) / "bibxml-ids").mkdir(exist_ok=True) + + +def replicate_stored_objects_for_document(doc: Document) -> int: + """Sync all StoredObjects associated with doc to the replica blob store + + Returns count of StoredObjects queued for replication (which may or may not + be replicated, depending on whether replication is enabled / the storages are + actually BlobdbStorage instances, etc). + """ + # n.b., StoredObjects have a nullable doc_rev field, but Documents do not. + # Until / unless we straighten that out, treat "" and None equivalently when + # matching rev. + qs_matching_rev = StoredObject.objects.filter(doc_rev=doc.rev) + if doc.rev == "": + qs_matching_rev |= StoredObject.objects.filter(doc_rev__isnull=True) + count = 0 + for stored_object in qs_matching_rev.filter(doc_name=doc.name): + force_replication(kind=stored_object.store, name=stored_object.name) + count += 1 + return count diff --git a/ietf/doc/utils_bofreq.py b/ietf/doc/utils_bofreq.py index aec8f60ad6..d01b039b8e 100644 --- a/ietf/doc/utils_bofreq.py +++ b/ietf/doc/utils_bofreq.py @@ -1,12 +1,149 @@ -# Copyright The IETF Trust 2021 All Rights Reserved +# Copyright The IETF Trust 2021-2026 All Rights Reserved +import datetime +from pathlib import Path -from ietf.doc.models import BofreqEditorDocEvent, BofreqResponsibleDocEvent +from django.conf import settings + +from ietf.doc.models import ( + BofreqEditorDocEvent, + BofreqResponsibleDocEvent, + DocEvent, + DocHistory, + Document, +) from ietf.person.models import Person +from ietf.utils import log + def bofreq_editors(bofreq): e = bofreq.latest_event(BofreqEditorDocEvent) return e.editors.all() if e else Person.objects.none() + def bofreq_responsible(bofreq): e = bofreq.latest_event(BofreqResponsibleDocEvent) - return e.responsible.all() if e else Person.objects.none() \ No newline at end of file + return e.responsible.all() if e else Person.objects.none() + + +def fixup_bofreq_timestamps(): # pragma: nocover + """Fixes bofreq event / document timestamps + + Timestamp errors resulted from the bug fixed by + https://github.com/ietf-tools/datatracker/pull/10333 + + Does not fix up -00 revs because the timestamps on these were not affected by + the bug. Replacing their timestamps creates a confusing event history because the + filesystem timestamp is usually a fraction of a second later than other events + created upon the initial rev creation. This causes the "New revision available" + event to appear _after_ these events in the history. Better to leave them as is. + """ + FIX_DEPLOYMENT_TIME = "2026-02-03T01:16:00+00:00" # 12.58.0 -> production + + def _get_doc_time(doc_name: str, rev: str): + path = Path(settings.BOFREQ_PATH) / f"{doc_name}-{rev}.md" + return datetime.datetime.fromtimestamp(path.stat().st_mtime, datetime.UTC) + + # Find affected DocEvents and DocHistories + new_bofreq_events = ( + DocEvent.objects.filter( + doc__type="bofreq", type="new_revision", time__lt=FIX_DEPLOYMENT_TIME + ) + .exclude(rev="00") # bug did not affect rev 00 events + .order_by("doc__name", "rev") + ) + log.log( + f"fixup_bofreq_timestamps: found {new_bofreq_events.count()} " + f"new_revision events before {FIX_DEPLOYMENT_TIME}" + ) + document_fixups = {} + for e in new_bofreq_events: + name = e.doc.name + rev = e.rev + filesystem_time = _get_doc_time(name, rev) + assert e.time < filesystem_time, ( + f"Rev {rev} event timestamp for {name} unexpectedly later than the " + "filesystem timestamp!" + ) + try: + dochistory = DocHistory.objects.filter( + name=name, time__lt=filesystem_time + ).get(rev=rev) + except DocHistory.MultipleObjectsReturned as err: + raise RuntimeError( + f"Multiple DocHistories for {name} rev {rev} exist earlier than the " + "filesystem timestamp!" + ) from err + except DocHistory.DoesNotExist as err: + if rev == "00": + # Unreachable because we don't adjust -00 revs, but could be needed + # if we did, in theory. In practice it's still not reached, but + # keeping the case for completeness. + dochistory = None + else: + raise RuntimeError( + f"No DocHistory for {name} rev {rev} exists earlier than the " + f"filesystem timestamp!" + ) from err + + if name not in document_fixups: + document_fixups[name] = [] + document_fixups[name].append( + { + "event": e, + "dochistory": dochistory, + "filesystem_time": filesystem_time, + } + ) + + # Now do the actual fixup + system_person = Person.objects.get(name="(System)") + for doc_name, fixups in document_fixups.items(): + bofreq = Document.objects.get(type="bofreq", name=doc_name) + log_msg_parts = [] + adjusted_revs = [] + for fixup in fixups: + event_to_fix = fixup["event"] + dh_to_fix = fixup["dochistory"] + new_time = fixup["filesystem_time"] + adjusted_revs.append(event_to_fix.rev) + + # Fix up the event + event_to_fix.time = new_time + event_to_fix.save() + log_msg_parts.append(f"rev {event_to_fix.rev} DocEvent") + + # Fix up the DocHistory + if dh_to_fix is not None: + dh_to_fix.time = new_time + dh_to_fix.save() + log_msg_parts.append(f"rev {dh_to_fix.rev} DocHistory") + + if event_to_fix.rev == bofreq.rev and bofreq.time < new_time: + # Update the Document without calling save(). Only update if + # the time has not changed so we don't inadvertently overwrite + # a concurrent update. + Document.objects.filter(pk=bofreq.pk, time=bofreq.time).update( + time=new_time + ) + bofreq.refresh_from_db() + if bofreq.rev == event_to_fix.rev: + log_msg_parts.append(f"rev {bofreq.rev} Document") + else: + log.log( + "fixup_bofreq_timestamps: WARNING: bofreq Document rev " + f"changed for {bofreq.name}" + ) + log.log(f"fixup_bofreq_timestamps: {bofreq.name}: " + ", ".join(log_msg_parts)) + + # Fix up the Document, if necessary, and add a record of the adjustment + DocEvent.objects.create( + type="added_comment", + by=system_person, + doc=bofreq, + rev=bofreq.rev, + desc=( + "Corrected inaccurate document and new revision event timestamps for " + + ("version " if len(adjusted_revs) == 1 else "versions ") + + ", ".join(adjusted_revs) + ), + ) diff --git a/ietf/doc/utils_charter.py b/ietf/doc/utils_charter.py index 2e85b3cc10..287ce8cece 100644 --- a/ietf/doc/utils_charter.py +++ b/ietf/doc/utils_charter.py @@ -3,11 +3,12 @@ import datetime -import io import os import re import shutil +from pathlib import Path + from django.conf import settings from django.urls import reverse as urlreverse from django.template.loader import render_to_string @@ -62,10 +63,9 @@ def next_approved_revision(rev): return "%#02d" % (int(m.group('major')) + 1) def read_charter_text(doc): - filename = os.path.join(settings.CHARTER_PATH, '%s-%s.txt' % (doc.canonical_name(), doc.rev)) + filename = Path(settings.CHARTER_PATH) / f"{doc.name}-{doc.rev}.txt" try: - with io.open(filename, 'r') as f: - return f.read() + return filename.read_text() except IOError: return "Error: couldn't read charter text" @@ -92,22 +92,43 @@ def change_group_state_after_charter_approval(group, by): def fix_charter_revision_after_approval(charter, by): # according to spec, 00-02 becomes 01, so copy file and record new revision try: - old = os.path.join(charter.get_file_path(), '%s-%s.txt' % (charter.canonical_name(), charter.rev)) - new = os.path.join(charter.get_file_path(), '%s-%s.txt' % (charter.canonical_name(), next_approved_revision(charter.rev))) + old = os.path.join( + charter.get_file_path(), "%s-%s.txt" % (charter.name, charter.rev) + ) + new = os.path.join( + charter.get_file_path(), + "%s-%s.txt" % (charter.name, next_approved_revision(charter.rev)), + ) shutil.copy(old, new) except IOError: log("There was an error copying %s to %s" % (old, new)) + # Also provide a copy to the legacy ftp source directory, which is served by rsync + # This replaces the hardlink copy that ghostlink has made in the past + # Still using a hardlink as long as these are on the same filesystem. + # Staying with os.path vs pathlib.Path until we get to python>=3.10. + charter_dir = os.path.join(settings.FTP_DIR, "charter") + ftp_filepath = os.path.join( + charter_dir, "%s-%s.txt" % (charter.name, next_approved_revision(charter.rev)) + ) + try: + os.link(new, ftp_filepath) + except IOError as ex: + log( + "There was an error creating a hardlink at %s pointing to %s: %s" + % (ftp_filepath, new, ex) + ) events = [] e = NewRevisionDocEvent(doc=charter, by=by, type="new_revision") e.rev = next_approved_revision(charter.rev) - e.desc = "New version available: %s-%s.txt" % (charter.canonical_name(), e.rev) + e.desc = "New version available: %s-%s.txt" % (charter.name, e.rev) e.save() events.append(e) charter.rev = e.rev charter.save_with_history(events) + def historic_milestones_for_charter(charter, rev): """Return GroupMilestone/GroupMilestoneHistory objects for charter document at rev by looking through the history.""" diff --git a/ietf/doc/utils_errata.py b/ietf/doc/utils_errata.py new file mode 100644 index 0000000000..539262151f --- /dev/null +++ b/ietf/doc/utils_errata.py @@ -0,0 +1,35 @@ +# Copyright The IETF Trust 2026, All Rights Reserved + +import requests + +from django.conf import settings + +from ietf.utils.log import log + + +def signal_update_rfc_metadata(rfc_number_list=()): + key = getattr(settings, "ERRATA_METADATA_NOTIFICATION_API_KEY", None) + if key is not None: + headers = {"X-Api-Key": settings.ERRATA_METADATA_NOTIFICATION_API_KEY} + post_dict = { + "rfc_number_list": list(rfc_number_list), + } + try: + response = requests.post( + settings.ERRATA_METADATA_NOTIFICATION_URL, + headers=headers, + json=post_dict, + timeout=settings.DEFAULT_REQUESTS_TIMEOUT, + ) + except requests.Timeout as e: + log( + f"POST request timed out for {settings.ERRATA_METADATA_NOTIFICATION_URL} ]: {e}" + ) + # raise RuntimeError(f'POST request timed out for {settings.ERRATA_METADATA_NOTIFICATION_URL}') from e + return + if response.status_code != 200: + log( + f"POST request failed for {settings.ERRATA_METADATA_NOTIFICATION_URL} ]: {response.status_code} {response.text}" + ) + else: + log("No API key configured for errata metadata notification, skipping") diff --git a/ietf/doc/utils_r2.py b/ietf/doc/utils_r2.py new file mode 100644 index 0000000000..53fb978303 --- /dev/null +++ b/ietf/doc/utils_r2.py @@ -0,0 +1,17 @@ +# Copyright The IETF Trust 2026, All Rights Reserved + +from django.core.files.storage import storages + +from ietf.doc.models import StoredObject + + +def rfcs_are_in_r2(rfc_number_list=()): + r2_rfc_bucket = storages["r2-rfc"] + for rfc_number in rfc_number_list: + stored_objects = StoredObject.objects.filter( + store="rfc", doc_name=f"rfc{rfc_number}" + ) + for stored_object in stored_objects: + if not r2_rfc_bucket.exists(stored_object.name): + return False + return True diff --git a/ietf/doc/utils_red.py b/ietf/doc/utils_red.py new file mode 100644 index 0000000000..5c5879d688 --- /dev/null +++ b/ietf/doc/utils_red.py @@ -0,0 +1,31 @@ +# Copyright The IETF Trust 2026, All Rights Reserved + +import requests + +from django.conf import settings + +from ietf.utils.log import log + + +def trigger_red_precomputer(rfc_number_list=()): + url = getattr(settings, "TRIGGER_RED_PRECOMPUTE_MULTIPLE_URL", None) + if url is not None: + payload = { + "rfcs": ",".join([str(n) for n in rfc_number_list]), + } + try: + log(f"Triggering red precompute multiple for RFCs {rfc_number_list}") + response = requests.post( + url=url, + json=payload, + timeout=settings.DEFAULT_REQUESTS_TIMEOUT, + ) + except requests.Timeout as e: + log(f"POST request timed out for {url} : {e}") + return + if response.status_code // 100 != 2: # 2xx status codes are ok + log( + f"POST request failed for {url} : status_code={response.status_code}" + ) + else: + log("No URL configured for triggering red precompute multiple, skipping") diff --git a/ietf/doc/utils_search.py b/ietf/doc/utils_search.py index 31aedda0d7..a5f461f9bb 100644 --- a/ietf/doc/utils_search.py +++ b/ietf/doc/utils_search.py @@ -9,9 +9,9 @@ from django.conf import settings -from ietf.doc.models import Document, DocAlias, RelatedDocument, DocEvent, TelechatDocEvent, BallotDocEvent +from ietf.doc.models import Document, RelatedDocument, DocEvent, TelechatDocEvent, BallotDocEvent, DocTypeName from ietf.doc.expire import expirable_drafts -from ietf.doc.utils import augment_docs_and_user_with_user_info +from ietf.doc.utils import augment_docs_and_person_with_person_info from ietf.meeting.models import SessionPresentation, Meeting, Session from ietf.review.utils import review_assignments_to_list_for_docs from ietf.utils.timezone import date_today @@ -26,7 +26,7 @@ def fill_in_telechat_date(docs, doc_dict=None, doc_ids=None): doc_dict = dict((d.pk, d) for d in docs) doc_ids = list(doc_dict.keys()) if doc_ids is None: - doc_ids = list(doc_dict.keys()) + doc_ids = list(doc_dict.keys()) seen = set() for e in TelechatDocEvent.objects.filter(doc__id__in=doc_ids, type="scheduled_for_telechat").order_by('-time'): @@ -54,12 +54,13 @@ def fill_in_document_sessions(docs, doc_dict, doc_ids): def fill_in_document_table_attributes(docs, have_telechat_date=False): # fill in some attributes for the document table results to save # some hairy template code and avoid repeated SQL queries - # TODO - this function evolved from something that assumed it was handling only drafts. It still has places where it assumes all docs are drafts where that is not a correct assumption + # TODO - this function evolved from something that assumed it was handling only drafts. + # It still has places where it assumes all docs are drafts where that is not a correct assumption doc_dict = dict((d.pk, d) for d in docs) doc_ids = list(doc_dict.keys()) - rfc_aliases = dict([ (a.document.id, a.name) for a in DocAlias.objects.filter(name__startswith="rfc", docs__id__in=doc_ids) ]) + rfcs = dict((d.pk, d.name) for d in docs if d.type_id == "rfc") # latest event cache event_types = ("published_rfc", @@ -90,10 +91,8 @@ def fill_in_document_table_attributes(docs, have_telechat_date=False): # misc expirable_pks = expirable_drafts(Document.objects.filter(pk__in=doc_ids)).values_list('pk', flat=True) for d in docs: - # emulate canonical name which is used by a lot of the utils - # d.canonical_name = wrap_value(rfc_aliases[d.pk] if d.pk in rfc_aliases else d.name) - if d.rfc_number() != None and d.latest_event_cache["published_rfc"]: + if d.type_id == "rfc" and d.latest_event_cache["published_rfc"]: d.latest_revision_date = d.latest_event_cache["published_rfc"].time elif d.latest_event_cache["new_revision"]: d.latest_revision_date = d.latest_event_cache["new_revision"].time @@ -109,7 +108,10 @@ def fill_in_document_table_attributes(docs, have_telechat_date=False): d.search_heading = "Withdrawn Internet-Draft" d.expirable = False else: - d.search_heading = "%s Internet-Draft" % d.get_state() + if d.type_id == "draft" and d.stream_id == 'ietf' and d.get_state_slug('draft-iesg') != 'idexists': # values can be: ad-eval idexists approved rfcqueue dead iesg-eva + d.search_heading = "%s with the IESG Internet-Draft" % d.get_state() + else: + d.search_heading = "%s Internet-Draft" % d.get_state() if state_slug == "active": d.expirable = d.pk in expirable_pks else: @@ -118,7 +120,7 @@ def fill_in_document_table_attributes(docs, have_telechat_date=False): d.search_heading = "%s" % (d.type,) d.expirable = False - if d.get_state_slug() != "rfc": + if d.type_id == "draft" and d.get_state_slug() != "rfc": d.milestones = [ m for (t, s, v, m) in sorted(((m.time, m.state.slug, m.desc, m) for m in d.groupmilestone_set.all() if m.state_id == "active")) ] d.review_assignments = review_assignments_to_list_for_docs([d]).get(d.name, []) @@ -128,29 +130,30 @@ def fill_in_document_table_attributes(docs, have_telechat_date=False): # RFCs # errata - erratas = set(Document.objects.filter(tags="errata", id__in=list(rfc_aliases.keys())).distinct().values_list("name", flat=True)) - verified_erratas = set(Document.objects.filter(tags="verified-errata", id__in=list(rfc_aliases.keys())).distinct().values_list("name", flat=True)) + erratas = set(Document.objects.filter(tags="errata", id__in=list(rfcs.keys())).distinct().values_list("name", flat=True)) + verified_erratas = set(Document.objects.filter(tags="verified-errata", id__in=list(rfcs.keys())).distinct().values_list("name", flat=True)) for d in docs: d.has_errata = d.name in erratas d.has_verified_errata = d.name in verified_erratas # obsoleted/updated by - for a in rfc_aliases: - d = doc_dict[a] + for rfc in rfcs: + d = doc_dict[rfc] d.obsoleted_by_list = [] d.updated_by_list = [] # Revisit this block after RFCs become first-class Document objects xed_by = list( RelatedDocument.objects.filter( - target__name__in=list(rfc_aliases.values()), + target__name__in=list(rfcs.values()), relationship__in=("obs", "updates"), ).select_related("target") ) - rel_rfc_aliases = { - a.document.id: re.sub(r"rfc(\d+)", r"RFC \1", a.name, flags=re.IGNORECASE) - for a in DocAlias.objects.filter( - name__startswith="rfc", docs__id__in=[rel.source_id for rel in xed_by] + # TODO - this likely reduces to something even simpler + rel_rfcs = { + d.id: re.sub(r"rfc(\d+)", r"RFC \1", d.name, flags=re.IGNORECASE) + for d in Document.objects.filter( + type_id="rfc", id__in=[rel.source_id for rel in xed_by] ) } xed_by.sort( @@ -158,18 +161,17 @@ def fill_in_document_table_attributes(docs, have_telechat_date=False): re.sub( r"rfc\s*(\d+)", r"\1", - rel_rfc_aliases[rel.source_id], + rel_rfcs[rel.source_id], flags=re.IGNORECASE, ) ) ) for rel in xed_by: - d = doc_dict[rel.target.document.id] - s = rel_rfc_aliases[rel.source_id] + d = doc_dict[rel.target.id] if rel.relationship_id == "obs": - d.obsoleted_by_list.append(s) + d.obsoleted_by_list.append(rel.source) elif rel.relationship_id == "updates": - d.updated_by_list.append(s) + d.updated_by_list.append(rel.source) def augment_docs_with_related_docs_info(docs): """Augment all documents with related documents information. @@ -179,10 +181,10 @@ def augment_docs_with_related_docs_info(docs): if d.type_id == 'conflrev': if len(d.related_that_doc('conflrev')) != 1: continue - originalDoc = d.related_that_doc('conflrev')[0].document + originalDoc = d.related_that_doc('conflrev')[0] d.pages = originalDoc.pages - -def prepare_document_table(request, docs, query=None, max_results=200): + +def prepare_document_table(request, docs, query=None, max_results=200, show_ad_and_shepherd=True): """Take a queryset of documents and a QueryDict with sorting info and return list of documents with attributes filled in for displaying a full table of information about the documents, plus @@ -193,14 +195,15 @@ def prepare_document_table(request, docs, query=None, max_results=200): # the number of queries docs = docs.select_related("ad", "std_level", "intended_std_level", "group", "stream", "shepherd", ) docs = docs.prefetch_related("states__type", "tags", "groupmilestone_set__group", "reviewrequest_set__team", - "ad__email_set", "docalias__iprdocrel_set") + "ad__email_set", "iprdocrel_set") docs = docs[:max_results] # <- that is still a queryset, but with a LIMIT now docs = list(docs) else: docs = docs[:max_results] fill_in_document_table_attributes(docs) - augment_docs_and_user_with_user_info(docs, request.user) + if request.user.is_authenticated and hasattr(request.user, "person"): + augment_docs_and_person_with_person_info(docs, request.user.person) augment_docs_with_related_docs_info(docs) meta = {} @@ -217,10 +220,14 @@ def num(i): res = [] - rfc_num = d.rfc_number() + rfc_num = num(d.rfc_number) if d.rfc_number else None if d.type_id == "draft": res.append(num(["Active", "Expired", "Replaced", "Withdrawn", "RFC"].index(d.search_heading.split()[0]))) + if "with the IESG" in d.search_heading: + res.append("1") + else: + res.append("0") else: res.append(d.type_id); res.append("-"); @@ -232,25 +239,25 @@ def num(i): elif sort_key == "date": res.append(str(d.latest_revision_date.astimezone(ZoneInfo(settings.TIME_ZONE)))) elif sort_key == "status": - if rfc_num != None: - res.append(num(rfc_num)) + if rfc_num is not None: + res.append(rfc_num) else: res.append(num(d.get_state().order) if d.get_state() else None) elif sort_key == "ipr": res.append(len(d.ipr())) elif sort_key == "ad": - if rfc_num != None: - res.append(num(rfc_num)) + if rfc_num is not None: + res.append(rfc_num) elif d.get_state_slug() == "active": if d.get_state("draft-iesg"): res.append(d.get_state("draft-iesg").order) else: res.append(0) else: - if rfc_num != None: - res.append(num(rfc_num)) + if rfc_num is not None: + res.append(rfc_num) else: - res.append(d.canonical_name()) + res.append(d.name) return res @@ -260,12 +267,14 @@ def num(i): if len(docs) == max_results: meta['max'] = max_results - meta['headers'] = [{'title': 'Document', 'key':'document'}, - {'title': 'Title', 'key':'title'}, - {'title': 'Date', 'key':'date'}, - {'title': 'Status', 'key':'status'}, - {'title': 'IPR', 'key':'ipr'}, - {'title': 'AD / Shepherd', 'key':'ad'}] + meta['headers'] = [{'title': 'Document', 'key': 'document'}, + {'title': 'Title', 'key': 'title'}, + {'title': 'Date', 'key': 'date'}, + {'title': 'Status', 'key': 'status'}, + {'title': 'IPR', 'key': 'ipr'}] + if show_ad_and_shepherd: + meta['headers'].append({'title': 'AD / Shepherd', 'key': 'ad'}) + meta['show_ad_and_shepherd'] = show_ad_and_shepherd if query and hasattr(query, "urlencode"): # fed a Django QueryDict d = query.copy() @@ -283,3 +292,86 @@ def num(i): h["sort_url"] = "?" + d.urlencode() return (docs, meta) + + +# The document types and state slugs to include in the AD dashboard +# and AD doc list, in the order they should be shown. +# +# "rfc" is a custom subset of "draft" that we special-case in the code +# to break out these docs into a separate table. +# +AD_WORKLOAD = { + "draft": [ + "pub-req", + "ad-eval", + "lc-req", + "lc", + "goaheadw", + "writeupw", + # "defer", # probably not a useful state to show, since it's rare + "iesg-eva", + "approved", + "ann", + ], + "rfc": [ + "rfcqueue", + "rfc", + ], + "conflrev": [ + "needshep", + "adrev", + "iesgeval", + "approved", # synthesized state for all the "appr-" states + # "withdraw", # probably not a useful state to show + ], + "statchg": [ + "needshep", + "adrev", + "lc-req", + "in-lc", + "iesgeval", + "goahead", + "appr-sent", + # "dead", # probably not a useful state to show + ], + "charter": [ + "notrev", + "infrev", + "intrev", + "extrev", + "iesgrev", + "approved", + # "replaced", # probably not a useful state to show + ], +} + + +def doc_type(doc): + dt = doc.type.slug + if ( + doc.get_state_slug("draft") == "rfc" + or doc.get_state_slug("draft-iesg") == "rfcqueue" + ): + dt = "rfc" + return dt + + +def doc_state(doc): + dt = doc.type.slug + ds = doc.get_state(dt) + if dt == "draft": + dis = doc.get_state("draft-iesg") + if ds.slug == "active" and dis: + return dis.slug + elif dt == "conflrev": + if ds.slug.startswith("appr"): + return "approved" + return ds.slug + + +def doc_type_name(doc_type): + if doc_type == "rfc": + return "RFC" + if doc_type == "draft": + return "Internet-Draft" + return DocTypeName.objects.get(slug=doc_type).name diff --git a/ietf/doc/views_ballot.py b/ietf/doc/views_ballot.py index 98fb126109..03cf01a4a1 100644 --- a/ietf/doc/views_ballot.py +++ b/ietf/doc/views_ballot.py @@ -4,11 +4,12 @@ # Directors and Secretariat -import datetime, json +import datetime +import json from django import forms from django.conf import settings -from django.http import HttpResponse, HttpResponseRedirect, Http404 +from django.http import HttpResponse, HttpResponseNotAllowed, HttpResponseRedirect, Http404, HttpResponseBadRequest from django.shortcuts import render, get_object_or_404, redirect from django.template.defaultfilters import striptags from django.template.loader import render_to_string @@ -33,12 +34,15 @@ from ietf.doc.templatetags.ietf_filters import can_ballot from ietf.iesg.models import TelechatDate from ietf.ietfauth.utils import has_role, role_required, is_authorized_in_doc_stream +from ietf.mailtrigger.models import Recipient from ietf.mailtrigger.utils import gather_address_lists from ietf.mailtrigger.forms import CcSelectForm from ietf.message.utils import infer_message from ietf.name.models import BallotPositionName, DocTypeName from ietf.person.models import Person -from ietf.utils.mail import send_mail_text, send_mail_preformatted +from ietf.utils.fields import ModelMultipleChoiceField, MultiEmailField +from ietf.utils.http import validate_return_to_path +from ietf.utils.mail import decode_header_value, send_mail_text, send_mail_preformatted from ietf.utils.decorators import require_api_key from ietf.utils.response import permission_denied from ietf.utils.timezone import date_today, datetime_from_date, DEADLINE_TZINFO @@ -176,19 +180,22 @@ def save_position(form, doc, ballot, balloter, login=None, send_email=False): return pos +class AdditionalCCForm(forms.Form): + additional_cc = MultiEmailField(required=False) + @role_required("Area Director", "Secretariat", "IRSG Member", "RSAB Member") def edit_position(request, name, ballot_id): """Vote and edit discuss and comment on document""" - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) ballot = get_object_or_404(BallotDocEvent, type="created_ballot", pk=ballot_id, doc=doc) balloter = login = request.user.person - if 'ballot_edit_return_point' in request.session: - return_to_url = request.session['ballot_edit_return_point'] - else: - return_to_url = urlreverse("ietf.doc.views_doc.document_ballot", kwargs=dict(name=doc.name, ballot_id=ballot_id)) - + try: + return_to_url = parse_ballot_edit_return_point(request.GET.get('ballot_edit_return_point'), doc.name, ballot_id) + except ValueError: + return HttpResponseBadRequest('ballot_edit_return_point is invalid') + # if we're in the Secretariat, we can select a balloter to act as stand-in for if has_role(request.user, "Secretariat"): balloter_id = request.GET.get('balloter') @@ -196,45 +203,67 @@ def edit_position(request, name, ballot_id): raise Http404 balloter = get_object_or_404(Person, pk=balloter_id) + if doc.stream_id == 'irtf': + mailtrigger_slug='irsg_ballot_saved' + elif doc.stream_id == 'editorial': + mailtrigger_slug='rsab_ballot_saved' + else: + mailtrigger_slug='iesg_ballot_saved' + if request.method == 'POST': old_pos = None if not has_role(request.user, "Secretariat") and not can_ballot(request.user, doc): # prevent pre-ADs from taking a position permission_denied(request, "Must be an active member (not a pre-AD for example) of the balloting body to take a position") + if request.POST.get("Defer") and doc.stream.slug != "irtf": + return redirect('ietf.doc.views_ballot.defer_ballot', name=doc) + elif request.POST.get("Undefer") and doc.stream.slug != "irtf": + return redirect('ietf.doc.views_ballot.undefer_ballot', name=doc) + form = EditPositionForm(request.POST, ballot_type=ballot.ballot_type) - if form.is_valid(): + cc_select_form = CcSelectForm(data=request.POST,mailtrigger_slug=mailtrigger_slug,mailtrigger_context={'doc':doc}) + additional_cc_form = AdditionalCCForm(request.POST) + if form.is_valid() and cc_select_form.is_valid() and additional_cc_form.is_valid(): send_mail = True if request.POST.get("send_mail") else False - save_position(form, doc, ballot, balloter, login, send_mail) - + pos = save_position(form, doc, ballot, balloter, login, send_mail) if send_mail: - qstr="" - if request.GET.get('balloter'): - qstr += "?balloter=%s" % request.GET.get('balloter') - return HttpResponseRedirect(urlreverse('ietf.doc.views_ballot.send_ballot_comment', kwargs=dict(name=doc.name, ballot_id=ballot_id)) + qstr) - elif request.POST.get("Defer") and doc.stream.slug != "irtf": - return redirect('ietf.doc.views_ballot.defer_ballot', name=doc) - elif request.POST.get("Undefer") and doc.stream.slug != "irtf": - return redirect('ietf.doc.views_ballot.undefer_ballot', name=doc) - else: - return HttpResponseRedirect(return_to_url) + addrs, frm, subject, body = build_position_email(balloter, doc, pos) + if doc.stream_id == 'irtf': + mailtrigger_slug='irsg_ballot_saved' + elif doc.stream_id == 'editorial': + mailtrigger_slug='rsab_ballot_saved' + else: + mailtrigger_slug='iesg_ballot_saved' + cc = [] + cc.extend(cc_select_form.get_selected_addresses()) + extra_cc = additional_cc_form.cleaned_data["additional_cc"] + if extra_cc: + cc.extend(extra_cc) + cc_set = set(cc) + cc_set.discard("") + cc = sorted(list(cc_set)) + send_mail_text(request, addrs.to, frm, subject, body, cc=", ".join(cc)) + return redirect(return_to_url) else: initial = {} old_pos = doc.latest_event(BallotPositionDocEvent, type="changed_ballot_position", balloter=balloter, ballot=ballot) if old_pos: initial['position'] = old_pos.pos.slug initial['discuss'] = old_pos.discuss - initial['comment'] = old_pos.comment - + initial['comment'] = old_pos.comment form = EditPositionForm(initial=initial, ballot_type=ballot.ballot_type) + cc_select_form = CcSelectForm(mailtrigger_slug=mailtrigger_slug,mailtrigger_context={'doc':doc}) + additional_cc_form = AdditionalCCForm() blocking_positions = dict((p.pk, p.name) for p in form.fields["position"].queryset.all() if p.blocking) - ballot_deferred = doc.active_defer_event() return render(request, 'doc/ballot/edit_position.html', dict(doc=doc, form=form, + cc_select_form=cc_select_form, + additional_cc_form=additional_cc_form, balloter=balloter, return_to_url=return_to_url, old_pos=old_pos, @@ -249,14 +278,18 @@ def edit_position(request, name, ballot_id): @csrf_exempt def api_set_position(request): def err(code, text): - return HttpResponse(text, status=code, content_type='text/plain') + return HttpResponse( + text, + status=code, + content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}", + ) if request.method == 'POST': ad = request.user.person name = request.POST.get('doc') if not name: return err(400, "Missing document name") try: - doc = Document.objects.get(docalias__name=name) + doc = Document.objects.get(name=name) except Document.DoesNotExist: return err(400, "Document not found") position_names = BallotPositionName.objects.values_list('slug', flat=True) @@ -282,24 +315,105 @@ def err(code, text): addrs, frm, subject, body = build_position_email(ad, doc, pos) send_mail_text(request, addrs.to, frm, subject, body, cc=addrs.cc) - return HttpResponse("Done", status=200, content_type='text/plain') + return HttpResponse( + "Done", + status=200, + content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}", + ) -def build_position_email(balloter, doc, pos): +@role_required("Area Director", "Secretariat") +@csrf_exempt +def ajax_build_position_email(request): + if request.method != "POST": + return HttpResponseNotAllowed(["POST"]) + errors = list() + try: + json_body = json.loads(request.body) + except json.decoder.JSONDecodeError: + errors.append("Post body is not valid json") + if len(errors) == 0: + post_data = json_body.get("post_data") + if post_data is None: + errors.append("post_data not provided") + else: + for key in [ + "discuss", + "comment", + "position", + "balloter", + "docname", + "cc_choices", + "additional_cc", + ]: + if key not in post_data: + errors.append(f"{key} not found in post_data") + if len(errors) == 0: + person = Person.objects.filter(pk=post_data.get("balloter")).first() + if person is None: + errors.append("No person found matching balloter") + doc = Document.objects.filter(name=post_data.get("docname")).first() + if doc is None: + errors.append("No document found matching docname") + if len(errors) > 0: + response = { + "success": False, + "errors": errors, + } + else: + wanted = dict() # consider named tuple instead + wanted["discuss"] = post_data.get("discuss") + wanted["comment"] = post_data.get("comment") + wanted["position_name"] = post_data.get("position") + wanted["balloter"] = person + wanted["doc"] = doc + addrs, frm, subject, body = build_position_email_from_dict(wanted) + + recipient_slugs = post_data.get("cc_choices") + # Consider refactoring gather_address_lists so this isn't duplicated from there + cc_addrs = set() + for r in Recipient.objects.filter(slug__in=recipient_slugs): + cc_addrs.update(r.gather(doc=doc)) + additional_cc = post_data.get("additional_cc") + for addr in additional_cc.split(","): + cc_addrs.add(addr.strip()) + cc_addrs.discard("") + cc_addrs = sorted(list(cc_addrs)) + + response_text = "\n".join( + [ + f"From: {decode_header_value(frm)}", + f"To: {', '.join([decode_header_value(addr) for addr in addrs.to])}", + f"Cc: {', '.join([decode_header_value(addr) for addr in cc_addrs])}", + f"Subject: {subject}", + "", + body, + ] + ) + + response = { + "success": True, + "text": response_text, + } + return HttpResponse(json.dumps(response), content_type="application/json") + +def build_position_email_from_dict(pos_dict): + doc = pos_dict["doc"] subj = [] d = "" blocking_name = "DISCUSS" - if pos.pos.blocking and pos.discuss: - d = pos.discuss - blocking_name = pos.pos.name.upper() + pos_name = BallotPositionName.objects.filter(slug=pos_dict["position_name"]).first() + if pos_name.blocking and pos_dict.get("discuss"): + d = pos_dict.get("discuss") + blocking_name = pos_name.name.upper() subj.append(blocking_name) c = "" - if pos.comment: - c = pos.comment + if pos_dict.get("comment"): + c = pos_dict.get("comment") subj.append("COMMENT") - + balloter = pos_dict.get("balloter") balloter_name_genitive = balloter.plain_name() + "'" if balloter.plain_name().endswith('s') else balloter.plain_name() + "'s" - subject = "%s %s on %s" % (balloter_name_genitive, pos.pos.name if pos.pos else "No Position", doc.name + "-" + doc.rev) + subject = "%s %s on %s" % (balloter_name_genitive, pos_name.name if pos_name else "No Position", doc.name + "-" + doc.rev) if subj: subject += ": (with %s)" % " and ".join(subj) @@ -308,101 +422,51 @@ def build_position_email(balloter, doc, pos): comment=c, balloter=balloter.plain_name(), doc=doc, - pos=pos.pos, + pos=pos_name, blocking_name=blocking_name, settings=settings)) frm = balloter.role_email("ad").formatted_email() if doc.stream_id == "irtf": addrs = gather_address_lists('irsg_ballot_saved',doc=doc) + elif doc.stream_id == "editorial": + addrs = gather_address_lists('rsab_ballot_saved',doc=doc) else: addrs = gather_address_lists('iesg_ballot_saved',doc=doc) return addrs, frm, subject, body -@role_required('Area Director','Secretariat','IRSG Member', 'RSAB Member') -def send_ballot_comment(request, name, ballot_id): - """Email document ballot position discuss/comment for Area Director.""" - doc = get_object_or_404(Document, docalias__name=name) - ballot = get_object_or_404(BallotDocEvent, type="created_ballot", pk=ballot_id, doc=doc) - - if not has_role(request.user, 'Secretariat'): - if any([ - doc.stream_id == 'ietf' and not has_role(request.user, 'Area Director'), - doc.stream_id == 'irtf' and not has_role(request.user, 'IRSG Member'), - doc.stream_id == 'editorial' and not has_role(request.user, 'RSAB Member'), - ]): - raise Http404 - - balloter = request.user.person - - if 'ballot_edit_return_point' in request.session: - return_to_url = request.session['ballot_edit_return_point'] - else: - return_to_url = urlreverse("ietf.doc.views_doc.document_ballot", kwargs=dict(name=doc.name, ballot_id=ballot_id)) - - if 'HTTP_REFERER' in request.META: - back_url = request.META['HTTP_REFERER'] - else: - back_url = urlreverse("ietf.doc.views_doc.document_ballot", kwargs=dict(name=doc.name, ballot_id=ballot_id)) - - # if we're in the Secretariat, we can select a balloter (such as an AD) to act as stand-in for - if has_role(request.user, "Secretariat"): - balloter_id = request.GET.get('balloter') - if not balloter_id: - raise Http404 - balloter = get_object_or_404(Person, pk=balloter_id) - - pos = doc.latest_event(BallotPositionDocEvent, type="changed_ballot_position", balloter=balloter, ballot=ballot) - if not pos: - raise Http404 - addrs, frm, subject, body = build_position_email(balloter, doc, pos) - - if doc.stream_id == 'irtf': - mailtrigger_slug='irsg_ballot_saved' - elif doc.stream_id == 'editorial': - mailtrigger_slug='rsab_ballot_saved' - else: - mailtrigger_slug='iesg_ballot_saved' - - if request.method == 'POST': - cc = [] - cc_select_form = CcSelectForm(data=request.POST,mailtrigger_slug=mailtrigger_slug,mailtrigger_context={'doc':doc}) - if cc_select_form.is_valid(): - cc.extend(cc_select_form.get_selected_addresses()) - extra_cc = [x.strip() for x in request.POST.get("extra_cc","").split(',') if x.strip()] - if extra_cc: - cc.extend(extra_cc) - - send_mail_text(request, addrs.to, frm, subject, body, cc=", ".join(cc)) - - return HttpResponseRedirect(return_to_url) - - else: +def build_position_email(balloter, doc, pos): - cc_select_form = CcSelectForm(mailtrigger_slug=mailtrigger_slug,mailtrigger_context={'doc':doc}) - - return render(request, 'doc/ballot/send_ballot_comment.html', - dict(doc=doc, - subject=subject, - body=body, - frm=frm, - to=addrs.as_strings().to, - balloter=balloter, - back_url=back_url, - cc_select_form = cc_select_form, - )) + pos_dict=dict() + pos_dict["doc"]=doc + pos_dict["position_name"]=pos.pos.slug + pos_dict["discuss"]=pos.discuss + pos_dict["comment"]=pos.comment + pos_dict["balloter"]=balloter + return build_position_email_from_dict(pos_dict) @role_required('Area Director','Secretariat') def clear_ballot(request, name, ballot_type_slug): """Clear all positions and discusses on every open ballot for a document.""" doc = get_object_or_404(Document, name=name) + # If there's no appropriate ballot type state, clearing would be an invalid action. + # This will need to be updated if we ever allow defering IRTF ballots + if ballot_type_slug == "approve": + state_machine = "draft-iesg" + elif ballot_type_slug in ["statchg","conflrev"]: + state_machine = ballot_type_slug + else: + state_machine = None + state_slug = state_machine and doc.get_state_slug(state_machine) + if state_machine is None or state_slug is None: + raise Http404 if request.method == 'POST': by = request.user.person if close_ballot(doc, by, ballot_type_slug): create_ballot_if_not_open(request, doc, by, ballot_type_slug) - if doc.get_state('draft-iesg').slug == 'defer': + if state_slug == "defer": do_undefer_ballot(request,doc) return redirect("ietf.doc.views_doc.document_main", name=doc.name) @@ -413,7 +477,7 @@ def clear_ballot(request, name, ballot_type_slug): @role_required('Area Director','Secretariat') def defer_ballot(request, name): """Signal post-pone of ballot, notifying relevant parties.""" - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) if doc.type_id not in ('draft','conflrev','statchg'): raise Http404 interesting_state = dict(draft='draft-iesg',conflrev='conflrev',statchg='statchg') @@ -467,7 +531,7 @@ def defer_ballot(request, name): @role_required('Area Director','Secretariat') def undefer_ballot(request, name): """undo deferral of ballot ballot.""" - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) if doc.type_id not in ('draft','conflrev','statchg'): raise Http404 if doc.type_id == 'draft' and not doc.get_state("draft-iesg"): @@ -503,7 +567,7 @@ def clean_last_call_text(self): @role_required('Area Director','Secretariat') def lastcalltext(request, name): """Editing of the last call text""" - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) if not doc.get_state("draft-iesg"): raise Http404 @@ -589,7 +653,11 @@ def clean_ballot_writeup(self): @role_required('Area Director','Secretariat') def ballot_writeupnotes(request, name): """Editing of ballot write-up and notes""" - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) + + if doc.stream_id is None or doc.stream_id != 'ietf': + raise Http404("The requested operation is not allowed for this document.") + prev_state = doc.get_state("draft-iesg") login = request.user.person @@ -620,7 +688,7 @@ def ballot_writeupnotes(request, name): existing.save() if "issue_ballot" in request.POST and not ballot_already_approved: - if prev_state.slug in ['watching', 'writeupw', 'goaheadw']: + if prev_state.slug in ['writeupw', 'goaheadw']: new_state = State.objects.get(used=True, type="draft-iesg", slug='iesg-eva') prev_tags = doc.tags.filter(slug__in=IESG_SUBSTATE_TAGS) doc.set_state(new_state) @@ -686,7 +754,8 @@ def ballot_writeupnotes(request, name): dict(doc=doc, back_url=doc.get_absolute_url(), ballot_issued=bool(doc.latest_event(type="sent_ballot_announcement")), - ballot_issue_danger=bool(prev_state.slug in ['ad-eval', 'lc']), + warn_lc = not doc.docevent_set.filter(lastcalldocevent__expires__date__lt=date_today(DEADLINE_TZINFO)).exists(), + warn_unexpected_state= prev_state if bool(prev_state.slug in ['ad-eval', 'lc']) else None, ballot_writeup_form=form, need_intended_status=need_intended_status, )) @@ -700,7 +769,7 @@ def clean_rfc_editor_note(self): @role_required('Area Director','Secretariat','IAB Chair','IRTF Chair','ISE') def ballot_rfceditornote(request, name): """Editing of RFC Editor Note""" - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) if not is_authorized_in_doc_stream(request.user, doc): permission_denied(request, "You do not have the necessary permissions to change the RFC Editor Note for this document") @@ -765,7 +834,7 @@ def clean_approval_text(self): @role_required('Area Director','Secretariat') def ballot_approvaltext(request, name): """Editing of approval text""" - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) if not doc.get_state("draft-iesg"): raise Http404 @@ -816,7 +885,7 @@ def ballot_approvaltext(request, name): @role_required('Secretariat') def approve_ballot(request, name): """Approve ballot, sending out announcement, changing state.""" - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) if not doc.get_state("draft-iesg"): raise Http404 @@ -931,7 +1000,7 @@ def approve_ballot(request, name): class ApproveDownrefsForm(forms.Form): - checkboxes = forms.ModelMultipleChoiceField( + checkboxes = ModelMultipleChoiceField( widget = forms.CheckboxSelectMultiple, queryset = RelatedDocument.objects.none(), ) @@ -947,13 +1016,19 @@ def clean(self): @role_required('Secretariat') def approve_downrefs(request, name): """Document ballot was just approved; add the checked downwared references to the downref registry.""" - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) if not doc.get_state("draft-iesg"): raise Http404 login = request.user.person - downrefs_to_rfc = [rel for rel in doc.relateddocument_set.all() if rel.is_downref() and not rel.is_approved_downref() and rel.target.document.is_rfc()] + downrefs_to_rfc = [ + rel + for rel in doc.relateddocument_set.all() + if rel.is_downref() + and not rel.is_approved_downref() + and rel.target.type_id == "rfc" + ] downrefs_to_rfc_qs = RelatedDocument.objects.filter(pk__in=[r.pk for r in downrefs_to_rfc]) @@ -968,12 +1043,12 @@ def approve_downrefs(request, name): c = DocEvent(type="downref_approved", doc=rel.source, rev=rel.source.rev, by=login) c.desc = "Downref to RFC %s approved by Last Call for %s-%s" % ( - rel.target.document.rfc_number(), rel.source, rel.source.rev) + rel.target.rfc_number, rel.source, rel.source.rev) c.save() - c = DocEvent(type="downref_approved", doc=rel.target.document, - rev=rel.target.document.rev, by=login) + c = DocEvent(type="downref_approved", doc=rel.target, + rev=rel.target.rev, by=login) c.desc = "Downref to RFC %s approved by Last Call for %s-%s" % ( - rel.target.document.rfc_number(), rel.source, rel.source.rev) + rel.target.rfc_number, rel.source, rel.source.rev) c.save() return HttpResponseRedirect(doc.get_absolute_url()) @@ -995,7 +1070,7 @@ class MakeLastCallForm(forms.Form): @role_required('Secretariat') def make_last_call(request, name): """Make last call for Internet-Draft, sending out announcement.""" - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) if not (doc.get_state("draft-iesg") or doc.get_state("statchg")): raise Http404 @@ -1103,7 +1178,7 @@ def make_last_call(request, name): @role_required('Secretariat', 'IRTF Chair') def issue_irsg_ballot(request, name): - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) if doc.stream.slug != "irtf" or doc.type != DocTypeName.objects.get(slug="draft"): raise Http404 @@ -1158,7 +1233,7 @@ def issue_irsg_ballot(request, name): @role_required('Secretariat', 'IRTF Chair') def close_irsg_ballot(request, name): - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) if doc.stream.slug != "irtf" or doc.type != DocTypeName.objects.get(slug="draft"): raise Http404 @@ -1199,7 +1274,7 @@ def irsg_ballot_status(request): @role_required('Secretariat', 'RSAB Chair') def issue_rsab_ballot(request, name): - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) if doc.stream.slug != "editorial" or doc.type != DocTypeName.objects.get(slug="draft"): raise Http404 @@ -1248,7 +1323,7 @@ def issue_rsab_ballot(request, name): @role_required('Secretariat', 'RSAB Chair') def close_rsab_ballot(request, name): - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) if doc.stream.slug != "editorial" or doc.type_id != "draft": raise Http404 @@ -1283,3 +1358,27 @@ def rsab_ballot_status(request): # Possible TODO: add a menu item to show this? Maybe only if you're in rsab or an rswg chair? # There will be so few of these that the general community would follow them from the rswg docs page. # Maybe the view isn't actually needed at all... + + +def parse_ballot_edit_return_point(path, doc_name, ballot_id): + get_default_path = lambda: urlreverse("ietf.doc.views_doc.document_ballot", kwargs=dict(name=doc_name, ballot_id=ballot_id)) + allowed_path_handlers = { + "ietf.community.views.view_list", + "ietf.doc.views_doc.document_ballot", + "ietf.doc.views_doc.document_irsg_ballot", + "ietf.doc.views_doc.document_rsab_ballot", + "ietf.doc.views_ballot.irsg_ballot_status", + "ietf.doc.views_ballot.rsab_ballot_status", + "ietf.doc.views_search.search", + "ietf.doc.views_search.docs_for_ad", + "ietf.doc.views_search.drafts_in_last_call", + "ietf.doc.views_search.recent_drafts", + "ietf.group.views.chartering_groups", + "ietf.group.views.group_documents", + "ietf.group.views.stream_documents", + "ietf.iesg.views.agenda", + "ietf.iesg.views.agenda_documents", + "ietf.iesg.views.discusses", + "ietf.iesg.views.past_documents", + } + return validate_return_to_path(path, get_default_path, allowed_path_handlers) diff --git a/ietf/doc/views_bofreq.py b/ietf/doc/views_bofreq.py index 92a130efb4..94e3960dfa 100644 --- a/ietf/doc/views_bofreq.py +++ b/ietf/doc/views_bofreq.py @@ -15,7 +15,7 @@ from ietf.doc.mails import (email_bofreq_title_changed, email_bofreq_editors_changed, email_bofreq_new_revision, email_bofreq_responsible_changed) -from ietf.doc.models import (Document, DocAlias, DocEvent, NewRevisionDocEvent, +from ietf.doc.models import (Document, DocEvent, NewRevisionDocEvent, BofreqEditorDocEvent, BofreqResponsibleDocEvent, State) from ietf.doc.utils import add_state_change_event from ietf.doc.utils_bofreq import bofreq_editors, bofreq_responsible @@ -91,7 +91,6 @@ def submit(request, name): by=request.user.person, rev=bofreq.rev, desc='New revision available', - time=bofreq.time, ) bofreq.save_with_history([e]) bofreq_submission = form.cleaned_data['bofreq_submission'] @@ -101,6 +100,7 @@ def submit(request, name): content = form.cleaned_data['bofreq_content'] with io.open(bofreq.get_file_name(), 'w', encoding='utf-8') as destination: destination.write(content) + bofreq.store_str(bofreq.get_base_name(), content) email_bofreq_new_revision(request, bofreq) return redirect('ietf.doc.views_doc.document_main', name=bofreq.name) @@ -168,8 +168,6 @@ def new_bof_request(request): ) e2.editors.set([request.user.person]) bofreq.save_with_history([e1,e2]) - alias = DocAlias.objects.create(name=name) - alias.docs.set([bofreq]) bofreq_submission = form.cleaned_data['bofreq_submission'] if bofreq_submission == "upload": content = get_cleaned_text_file_content(form.cleaned_data["bofreq_file"]) @@ -177,6 +175,7 @@ def new_bof_request(request): content = form.cleaned_data['bofreq_content'] with io.open(bofreq.get_file_name(), 'w', encoding='utf-8') as destination: destination.write(content) + bofreq.store_str(bofreq.get_base_name(), content) email_bofreq_new_revision(request, bofreq) return redirect('ietf.doc.views_doc.document_main', name=bofreq.name) diff --git a/ietf/doc/views_charter.py b/ietf/doc/views_charter.py index d3173291d3..e899f59227 100644 --- a/ietf/doc/views_charter.py +++ b/ietf/doc/views_charter.py @@ -3,11 +3,12 @@ import datetime -import io import json import os import textwrap +from pathlib import Path + from django.http import HttpResponseRedirect, HttpResponseNotFound, Http404 from django.shortcuts import get_object_or_404, redirect, render from django.urls import reverse as urlreverse @@ -22,7 +23,7 @@ import debug # pyflakes:ignore -from ietf.doc.models import ( Document, DocAlias, DocHistory, State, DocEvent, +from ietf.doc.models import ( Document, DocHistory, State, DocEvent, BallotDocEvent, BallotPositionDocEvent, InitialReviewDocEvent, NewRevisionDocEvent, WriteupDocEvent, TelechatDocEvent ) from ietf.doc.utils import ( add_state_change_event, close_open_ballots, @@ -32,16 +33,17 @@ generate_ballot_writeup, generate_issue_ballot_mail, next_revision, derive_new_work_text, change_group_state_after_charter_approval, fix_charter_revision_after_approval, - split_charter_name) + split_charter_name, charter_name_for_group) from ietf.doc.mails import email_state_changed, email_charter_internal_review from ietf.group.mails import email_admin_re_charter from ietf.group.models import Group, ChangeStateGroupEvent, MilestoneGroupEvent -from ietf.group.utils import save_group_in_history, save_milestone_in_history, can_manage_all_groups_of_type -from ietf.group.views import fill_in_charter_info +from ietf.group.utils import save_group_in_history, save_milestone_in_history, can_manage_all_groups_of_type, \ + fill_in_charter_info from ietf.ietfauth.utils import has_role, role_required from ietf.name.models import GroupStateName from ietf.person.models import Person from ietf.utils.history import find_history_active_at +from ietf.utils.log import assertion, log from ietf.utils.mail import send_mail_preformatted from ietf.utils.textupload import get_cleaned_text_file_content from ietf.utils.response import permission_denied @@ -362,38 +364,41 @@ def clean_txt(self): @login_required def submit(request, name, option=None): - if not name.startswith('charter-'): - raise Http404 - + # Charters are named "charter--" charter = Document.objects.filter(type="charter", name=name).first() if charter: group = charter.group - charter_canonical_name = charter.canonical_name() + assertion("charter.name == charter_name_for_group(group)") charter_rev = charter.rev else: top_org, group_acronym = split_charter_name(name) group = get_object_or_404(Group, acronym=group_acronym) - charter_canonical_name = name + if name != charter_name_for_group(group): + raise Http404 # do not allow creation of misnamed charters charter_rev = "00-00" - if not can_manage_all_groups_of_type(request.user, group.type_id) or not group.features.has_chartering_process: + if ( + not can_manage_all_groups_of_type(request.user, group.type_id) + or not group.features.has_chartering_process + ): permission_denied(request, "You don't have permission to access this view.") - - path = os.path.join(settings.CHARTER_PATH, '%s-%s.txt' % (charter_canonical_name, charter_rev)) - not_uploaded_yet = charter_rev.endswith("-00") and not os.path.exists(path) + charter_filename = Path(settings.CHARTER_PATH) / f"{name}-{charter_rev}.txt" + not_uploaded_yet = charter_rev.endswith("-00") and not charter_filename.exists() if not_uploaded_yet or not charter: # this case is special - we recently chartered or rechartered and have no file yet next_rev = charter_rev else: # search history for possible collisions with abandoned efforts - prev_revs = list(charter.history_set.order_by('-time').values_list('rev', flat=True)) + prev_revs = list( + charter.history_set.order_by("-time").values_list("rev", flat=True) + ) next_rev = next_revision(charter.rev) while next_rev in prev_revs: next_rev = next_revision(next_rev) - if request.method == 'POST': + if request.method == "POST": form = UploadForm(request.POST, request.FILES) if form.is_valid(): # Also save group history so we can search for it @@ -408,9 +413,10 @@ def submit(request, name, option=None): abstract=group.name, rev=next_rev, ) - DocAlias.objects.create(name=charter.name).docs.add(charter) - charter.set_state(State.objects.get(used=True, type="charter", slug="notrev")) + charter.set_state( + State.objects.get(used=True, type="charter", slug="notrev") + ) group.charter = charter group.save() @@ -418,56 +424,88 @@ def submit(request, name, option=None): charter.rev = next_rev events = [] - e = NewRevisionDocEvent(doc=charter, by=request.user.person, type="new_revision") - e.desc = "New version available: %s-%s.txt" % (charter.canonical_name(), charter.rev) + e = NewRevisionDocEvent( + doc=charter, by=request.user.person, type="new_revision" + ) + e.desc = "New version available: %s-%s.txt" % ( + charter.name, + charter.rev, + ) e.rev = charter.rev e.save() events.append(e) # Save file on disk - filename = os.path.join(settings.CHARTER_PATH, '%s-%s.txt' % (charter.canonical_name(), charter.rev)) - with io.open(filename, 'w', encoding='utf-8') as destination: - if form.cleaned_data['txt']: - destination.write(form.cleaned_data['txt']) + charter_filename = charter_filename.with_name( + f"{name}-{charter.rev}.txt" + ) # update rev + with charter_filename.open("w", encoding="utf-8") as destination: + if form.cleaned_data["txt"]: + content=form.cleaned_data["txt"] else: - destination.write(form.cleaned_data['content']) + content=form.cleaned_data["content"] + destination.write(content) + # Also provide a copy to the legacy ftp source directory, which is served by rsync + # This replaces the hardlink copy that ghostlink has made in the past + # Still using a hardlink as long as these are on the same filesystem. + ftp_filename = Path(settings.FTP_DIR) / "charter" / charter_filename.name + try: + os.link(charter_filename, ftp_filename) # os.link until we are on python>=3.10 + except IOError: + log( + "There was an error creating a hardlink at %s pointing to %s" + % (ftp_filename, charter_filename) + ) + charter.store_str(charter_filename.name, content) - if option in ['initcharter','recharter'] and charter.ad == None: - charter.ad = getattr(group.ad_role(),'person',None) + + if option in ["initcharter", "recharter"] and charter.ad == None: + charter.ad = getattr(group.ad_role(), "person", None) charter.save_with_history(events) if option: - return redirect('ietf.doc.views_charter.change_state', name=charter.name, option=option) + return redirect( + "ietf.doc.views_charter.change_state", + name=charter.name, + option=option, + ) else: return redirect("ietf.doc.views_doc.document_main", name=charter.name) else: - init = { "content": "" } + init = {"content": ""} if not_uploaded_yet and charter: # use text from last approved revision last_approved = charter.rev.split("-")[0] - h = charter.history_set.filter(rev=last_approved).order_by("-time", "-id").first() + h = ( + charter.history_set.filter(rev=last_approved) + .order_by("-time", "-id") + .first() + ) if h: - charter_canonical_name = h.canonical_name() - charter_rev = h.rev - - filename = os.path.join(settings.CHARTER_PATH, '%s-%s.txt' % (charter_canonical_name, charter_rev)) + assertion("h.name == charter_name_for_group(group)") + charter_filename = charter_filename.with_name( + f"{name}-{h.rev}.txt" + ) # update rev try: - with io.open(filename, 'r') as f: - init["content"] = f.read() + init["content"] = charter_filename.read_text() except IOError: pass form = UploadForm(initial=init) fill_in_charter_info(group) - return render(request, 'doc/charter/submit.html', { - 'form': form, - 'next_rev': next_rev, - 'group': group, - 'name': name, - }) + return render( + request, + "doc/charter/submit.html", + { + "form": form, + "next_rev": next_rev, + "group": group, + "name": name, + }, + ) class ActionAnnouncementTextForm(forms.Form): announcement_text = forms.CharField(widget=forms.Textarea, required=True, strip=False) @@ -484,7 +522,7 @@ def clean_announcement_text(self): return self.cleaned_data["announcement_text"].replace("\r", "") -@role_required('Area Director','Secretariat') +@role_required("Area Director", "Secretariat") def review_announcement_text(request, name): """Editing of review announcement text""" charter = get_object_or_404(Document, type="charter", name=name) @@ -493,7 +531,9 @@ def review_announcement_text(request, name): by = request.user.person existing = charter.latest_event(WriteupDocEvent, type="changed_review_announcement") - existing_new_work = charter.latest_event(WriteupDocEvent, type="changed_new_work_text") + existing_new_work = charter.latest_event( + WriteupDocEvent, type="changed_new_work_text" + ) if not existing: (existing, existing_new_work) = default_review_text(group, charter, by) @@ -506,19 +546,23 @@ def review_announcement_text(request, name): existing_new_work.by = by existing_new_work.type = "changed_new_work_text" existing_new_work.desc = "%s review text was changed" % group.type.name - existing_new_work.text = derive_new_work_text(existing.text,group) + existing_new_work.text = derive_new_work_text(existing.text, group) existing_new_work.time = timezone.now() - form = ReviewAnnouncementTextForm(initial=dict(announcement_text=escape(existing.text),new_work_text=escape(existing_new_work.text))) + form = ReviewAnnouncementTextForm( + initial=dict( + announcement_text=escape(existing.text), + new_work_text=escape(existing_new_work.text), + ) + ) - if request.method == 'POST': + if request.method == "POST": form = ReviewAnnouncementTextForm(request.POST) if "save_text" in request.POST and form.is_valid(): - now = timezone.now() events = [] - t = form.cleaned_data['announcement_text'] + t = form.cleaned_data["announcement_text"] if t != existing.text: e = WriteupDocEvent(doc=charter, rev=charter.rev) e.by = by @@ -532,11 +576,11 @@ def review_announcement_text(request, name): existing.save() events.append(existing) - t = form.cleaned_data['new_work_text'] + t = form.cleaned_data["new_work_text"] if t != existing_new_work.text: e = WriteupDocEvent(doc=charter, rev=charter.rev) e.by = by - e.type = "changed_new_work_text" + e.type = "changed_new_work_text" e.desc = "%s new work message text was changed" % (group.type.name) e.text = t e.time = now @@ -549,33 +593,71 @@ def review_announcement_text(request, name): charter.save_with_history(events) if request.GET.get("next", "") == "approve": - return redirect('ietf.doc.views_charter.approve', name=charter.canonical_name()) + return redirect( + "ietf.doc.views_charter.approve", name=charter.name + ) - return redirect('ietf.doc.views_doc.document_writeup', name=charter.canonical_name()) + return redirect( + "ietf.doc.views_doc.document_writeup", name=charter.name + ) if "regenerate_text" in request.POST: (existing, existing_new_work) = default_review_text(group, charter, by) existing.save() existing_new_work.save() - form = ReviewAnnouncementTextForm(initial=dict(announcement_text=escape(existing.text), - new_work_text=escape(existing_new_work.text))) - - if any(x in request.POST for x in ['send_annc_only','send_nw_only','send_both']) and form.is_valid(): - if any(x in request.POST for x in ['send_annc_only','send_both']): - parsed_msg = send_mail_preformatted(request, form.cleaned_data['announcement_text']) - messages.success(request, "The email To: '%s' with Subject: '%s' has been sent." % (parsed_msg["To"],parsed_msg["Subject"],)) - if any(x in request.POST for x in ['send_nw_only','send_both']): - parsed_msg = send_mail_preformatted(request, form.cleaned_data['new_work_text']) - messages.success(request, "The email To: '%s' with Subject: '%s' has been sent." % (parsed_msg["To"],parsed_msg["Subject"],)) - return redirect('ietf.doc.views_doc.document_writeup', name=charter.name) - - return render(request, 'doc/charter/review_announcement_text.html', - dict(charter=charter, - back_url=urlreverse('ietf.doc.views_doc.document_writeup', kwargs=dict(name=charter.name)), - announcement_text_form=form, - )) + form = ReviewAnnouncementTextForm( + initial=dict( + announcement_text=escape(existing.text), + new_work_text=escape(existing_new_work.text), + ) + ) + + if ( + any( + x in request.POST + for x in ["send_annc_only", "send_nw_only", "send_both"] + ) + and form.is_valid() + ): + if any(x in request.POST for x in ["send_annc_only", "send_both"]): + parsed_msg = send_mail_preformatted( + request, form.cleaned_data["announcement_text"] + ) + messages.success( + request, + "The email To: '%s' with Subject: '%s' has been sent." + % ( + parsed_msg["To"], + parsed_msg["Subject"], + ), + ) + if any(x in request.POST for x in ["send_nw_only", "send_both"]): + parsed_msg = send_mail_preformatted( + request, form.cleaned_data["new_work_text"] + ) + messages.success( + request, + "The email To: '%s' with Subject: '%s' has been sent." + % ( + parsed_msg["To"], + parsed_msg["Subject"], + ), + ) + return redirect("ietf.doc.views_doc.document_writeup", name=charter.name) + + return render( + request, + "doc/charter/review_announcement_text.html", + dict( + charter=charter, + back_url=urlreverse( + "ietf.doc.views_doc.document_writeup", kwargs=dict(name=charter.name) + ), + announcement_text_form=form, + ), + ) -@role_required('Area Director','Secretariat') +@role_required("Area Director", "Secretariat") def action_announcement_text(request, name): """Editing of action announcement text""" charter = get_object_or_404(Document, type="charter", name=name) @@ -590,16 +672,18 @@ def action_announcement_text(request, name): if not existing: raise Http404 - form = ActionAnnouncementTextForm(initial=dict(announcement_text=escape(existing.text))) + form = ActionAnnouncementTextForm( + initial=dict(announcement_text=escape(existing.text)) + ) - if request.method == 'POST': + if request.method == "POST": form = ActionAnnouncementTextForm(request.POST) if "save_text" in request.POST and form.is_valid(): - t = form.cleaned_data['announcement_text'] + t = form.cleaned_data["announcement_text"] if t != existing.text: e = WriteupDocEvent(doc=charter, rev=charter.rev) e.by = by - e.type = "changed_action_announcement" + e.type = "changed_action_announcement" e.desc = "%s action text was changed" % group.type.name e.text = t e.save() @@ -607,25 +691,46 @@ def action_announcement_text(request, name): existing.save() if request.GET.get("next", "") == "approve": - return redirect('ietf.doc.views_charter.approve', name=charter.canonical_name()) + return redirect( + "ietf.doc.views_charter.approve", name=charter.name + ) - return redirect('ietf.doc.views_doc.document_writeup', name=charter.canonical_name()) + return redirect( + "ietf.doc.views_doc.document_writeup", name=charter.name + ) if "regenerate_text" in request.POST: e = default_action_text(group, charter, by) e.save() - form = ActionAnnouncementTextForm(initial=dict(announcement_text=escape(e.text))) + form = ActionAnnouncementTextForm( + initial=dict(announcement_text=escape(e.text)) + ) if "send_text" in request.POST and form.is_valid(): - parsed_msg = send_mail_preformatted(request, form.cleaned_data['announcement_text']) - messages.success(request, "The email To: '%s' with Subject: '%s' has been sent." % (parsed_msg["To"],parsed_msg["Subject"],)) - return redirect('ietf.doc.views_doc.document_writeup', name=charter.name) - - return render(request, 'doc/charter/action_announcement_text.html', - dict(charter=charter, - back_url=urlreverse('ietf.doc.views_doc.document_writeup', kwargs=dict(name=charter.name)), - announcement_text_form=form, - )) + parsed_msg = send_mail_preformatted( + request, form.cleaned_data["announcement_text"] + ) + messages.success( + request, + "The email To: '%s' with Subject: '%s' has been sent." + % ( + parsed_msg["To"], + parsed_msg["Subject"], + ), + ) + return redirect("ietf.doc.views_doc.document_writeup", name=charter.name) + + return render( + request, + "doc/charter/action_announcement_text.html", + dict( + charter=charter, + back_url=urlreverse( + "ietf.doc.views_doc.document_writeup", kwargs=dict(name=charter.name) + ), + announcement_text_form=form, + ), + ) class BallotWriteupForm(forms.Form): ballot_writeup = forms.CharField(widget=forms.Textarea, required=True, strip=False) @@ -806,33 +911,37 @@ def approve(request, name): dict(charter=charter, announcement=escape(announcement))) + def charter_with_milestones_txt(request, name, rev): - charter = get_object_or_404(Document, type="charter", docalias__name=name) + charter = get_object_or_404(Document, type="charter", name=name) - revision_event = charter.latest_event(NewRevisionDocEvent, type="new_revision", rev=rev) + revision_event = charter.latest_event( + NewRevisionDocEvent, type="new_revision", rev=rev + ) if not revision_event: return HttpResponseNotFound("Revision %s not found in database" % rev) # read charter text c = find_history_active_at(charter, revision_event.time) or charter - filename = '%s-%s.txt' % (c.canonical_name(), rev) - - charter_text = "" - + filename = Path(settings.CHARTER_PATH) / f"{c.name}-{rev}.txt" try: - with io.open(os.path.join(settings.CHARTER_PATH, filename), 'r') as f: - charter_text = force_str(f.read(), errors='ignore') + with filename.open() as f: + charter_text = force_str(f.read(), errors="ignore") except IOError: - charter_text = "Error reading charter text %s" % filename + charter_text = f"Error reading charter text {filename.name}" milestones = historic_milestones_for_charter(charter, rev) # wrap the output nicely - wrapper = textwrap.TextWrapper(initial_indent="", subsequent_indent=" " * 11, width=80, break_long_words=False) + wrapper = textwrap.TextWrapper( + initial_indent="", subsequent_indent=" " * 11, width=80, break_long_words=False + ) for m in milestones: m.desc_filled = wrapper.fill(m.desc) - return render(request, 'doc/charter/charter_with_milestones.txt', - dict(charter_text=charter_text, - milestones=milestones), - content_type="text/plain; charset=%s"%settings.DEFAULT_CHARSET) + return render( + request, + "doc/charter/charter_with_milestones.txt", + dict(charter_text=charter_text, milestones=milestones), + content_type="text/plain; charset=%s" % settings.DEFAULT_CHARSET, + ) diff --git a/ietf/doc/views_conflict_review.py b/ietf/doc/views_conflict_review.py index 7d939ec984..159f1340a4 100644 --- a/ietf/doc/views_conflict_review.py +++ b/ietf/doc/views_conflict_review.py @@ -5,6 +5,7 @@ import datetime import io import os +from pathlib import Path from django import forms from django.shortcuts import render, get_object_or_404, redirect @@ -16,7 +17,7 @@ import debug # pyflakes:ignore -from ietf.doc.models import ( BallotDocEvent, BallotPositionDocEvent, DocAlias, DocEvent, +from ietf.doc.models import ( BallotDocEvent, BallotPositionDocEvent, DocEvent, Document, NewRevisionDocEvent, State ) from ietf.doc.utils import ( add_state_change_event, close_open_ballots, create_ballot_if_not_open, update_telechat ) @@ -98,7 +99,7 @@ def change_state(request, name, option=None): ok_to_publish) if new_state.slug in ["appr-reqnopub-sent", "appr-noprob-sent", "withdraw", "dead"]: - doc = review.related_that_doc("conflrev")[0].document + doc = review.related_that_doc("conflrev")[0] update_stream_state(doc, login, 'chair-w' if doc.stream_id=='irtf' else 'ise-rev', 'iesg-com') return redirect('ietf.doc.views_doc.document_main', name=review.name) @@ -123,7 +124,7 @@ def send_conflict_review_ad_changed_email(request, review, event): by = request.user.person, event = event, review = review, - reviewed_doc = review.relateddocument_set.get(relationship__slug='conflrev').target.document, + reviewed_doc = review.relateddocument_set.get(relationship__slug='conflrev').target, review_url = settings.IDTRACKER_BASE_URL+review.get_absolute_url(), ) ) @@ -138,7 +139,7 @@ def send_conflict_review_started_email(request, review): cc = addrs.cc, by = request.user.person, review = review, - reviewed_doc = review.relateddocument_set.get(relationship__slug='conflrev').target.document, + reviewed_doc = review.relateddocument_set.get(relationship__slug='conflrev').target, review_url = settings.IDTRACKER_BASE_URL+review.get_absolute_url(), ) ) @@ -147,7 +148,7 @@ def send_conflict_review_started_email(request, review): addrs = gather_address_lists('conflrev_requested_iana',doc=review).as_strings(compact=False) email_iana(request, - review.relateddocument_set.get(relationship__slug='conflrev').target.document, + review.relateddocument_set.get(relationship__slug='conflrev').target, addrs.to, msg, cc=addrs.cc) @@ -165,7 +166,7 @@ def send_conflict_eval_email(request,review): send_mail_preformatted(request,msg,override=override) addrs = gather_address_lists('ballot_issued_iana',doc=review).as_strings() email_iana(request, - review.relateddocument_set.get(relationship__slug='conflrev').target.document, + review.relateddocument_set.get(relationship__slug='conflrev').target, addrs.to, msg, addrs.cc) @@ -181,12 +182,23 @@ def clean_txt(self): return get_cleaned_text_file_content(self.cleaned_data["txt"]) def save(self, review): - filename = os.path.join(settings.CONFLICT_REVIEW_PATH, '%s-%s.txt' % (review.canonical_name(), review.rev)) - with io.open(filename, 'w', encoding='utf-8') as destination: + basename = f"{review.name}-{review.rev}.txt" + filepath = Path(settings.CONFLICT_REVIEW_PATH) / basename + with filepath.open('w', encoding='utf-8') as destination: if self.cleaned_data['txt']: - destination.write(self.cleaned_data['txt']) + content = self.cleaned_data['txt'] else: - destination.write(self.cleaned_data['content']) + content = self.cleaned_data['content'] + destination.write(content) + ftp_filepath = Path(settings.FTP_DIR) / "conflict-reviews" / basename + try: + os.link(filepath, ftp_filepath) # Path.hardlink_to is not available until 3.10 + except IOError as e: + log.log( + "There was an error creating a hardlink at %s pointing to %s: %s" + % (ftp_filepath, filepath, e) + ) + review.store_str(basename, content) #This is very close to submit on charter - can we get better reuse? @role_required('Area Director','Secretariat') @@ -195,7 +207,7 @@ def submit(request, name): login = request.user.person - path = os.path.join(settings.CONFLICT_REVIEW_PATH, '%s-%s.txt' % (review.canonical_name(), review.rev)) + path = os.path.join(settings.CONFLICT_REVIEW_PATH, '%s-%s.txt' % (review.name, review.rev)) not_uploaded_yet = review.rev == "00" and not os.path.exists(path) if not_uploaded_yet: @@ -212,7 +224,7 @@ def submit(request, name): events = [] e = NewRevisionDocEvent(doc=review, by=login, type="new_revision") - e.desc = "New version available: %s-%s.txt" % (review.canonical_name(), review.rev) + e.desc = "New version available: %s-%s.txt" % (review.name, review.rev) e.rev = review.rev e.save() events.append(e) @@ -244,7 +256,7 @@ def submit(request, name): dict(), )) else: - filename = os.path.join(settings.CONFLICT_REVIEW_PATH, '%s-%s.txt' % (review.canonical_name(), review.rev)) + filename = os.path.join(settings.CONFLICT_REVIEW_PATH, '%s-%s.txt' % (review.name, review.rev)) try: with io.open(filename, 'r') as f: init["content"] = f.read() @@ -257,7 +269,7 @@ def submit(request, name): {'form': form, 'next_rev': next_rev, 'review' : review, - 'conflictdoc' : review.relateddocument_set.get(relationship__slug='conflrev').target.document, + 'conflictdoc' : review.relateddocument_set.get(relationship__slug='conflrev').target, }) @role_required("Area Director", "Secretariat") @@ -285,8 +297,8 @@ def edit_ad(request, name): form = AdForm(initial=init) - conflictdoc = review.relateddocument_set.get(relationship__slug='conflrev').target.document - titletext = 'the conflict review of %s-%s' % (conflictdoc.canonical_name(),conflictdoc.rev) + conflictdoc = review.relateddocument_set.get(relationship__slug='conflrev').target + titletext = 'the conflict review of %s-%s' % (conflictdoc.name,conflictdoc.rev) return render(request, 'doc/change_ad.html', {'form': form, 'doc': review, @@ -297,7 +309,7 @@ def edit_ad(request, name): def default_approval_text(review): current_text = review.text_or_error() # pyflakes:ignore - conflictdoc = review.relateddocument_set.get(relationship__slug='conflrev').target.document + conflictdoc = review.relateddocument_set.get(relationship__slug='conflrev').target if conflictdoc.stream_id=='ise': receiver = 'Independent Submissions Editor' elif conflictdoc.stream_id=='irtf': @@ -365,7 +377,7 @@ def approve_conflict_review(request, name): c.desc = "The following approval message was sent\n"+form.cleaned_data['announcement_text'] c.save() - doc = review.related_that_doc("conflrev")[0].document + doc = review.related_that_doc("conflrev")[0] update_stream_state(doc, login, 'chair-w' if doc.stream_id=='irtf' else 'ise-rev', 'iesg-com') return HttpResponseRedirect(review.get_absolute_url()) @@ -378,7 +390,7 @@ def approve_conflict_review(request, name): return render(request, 'doc/conflict_review/approve.html', dict( review = review, - conflictdoc = review.relateddocument_set.get(relationship__slug='conflrev').target.document, + conflictdoc = review.relateddocument_set.get(relationship__slug='conflrev').target, form = form, )) @@ -429,7 +441,7 @@ def start_review_sanity_check(request, name): raise Http404 # sanity check that there's not already a conflict review document for this document - if [ rel.source for alias in doc_to_review.docalias.all() for rel in alias.relateddocument_set.filter(relationship='conflrev') ]: + if [ rel.source for rel in doc_to_review.targets_related.filter(relationship='conflrev') ]: raise Http404 return doc_to_review @@ -461,11 +473,8 @@ def build_conflict_review_document(login, doc_to_review, ad, notify, create_in_s group=iesg_group, ) conflict_review.set_state(create_in_state) - - DocAlias.objects.create( name=review_name).docs.add( conflict_review ) - - conflict_review.relateddocument_set.create(target=DocAlias.objects.get(name=doc_to_review.name),relationship_id='conflrev') + conflict_review.relateddocument_set.create(target=doc_to_review, relationship_id='conflrev') c = DocEvent(type="added_comment", doc=conflict_review, rev=conflict_review.rev, by=login) c.desc = "IETF conflict review requested" diff --git a/ietf/doc/views_doc.py b/ietf/doc/views_doc.py index 1c42bfa963..5b57a62074 100644 --- a/ietf/doc/views_doc.py +++ b/ietf/doc/views_doc.py @@ -1,4 +1,4 @@ -# Copyright The IETF Trust 2009-2023, All Rights Reserved +# Copyright The IETF Trust 2009-2026, All Rights Reserved # -*- coding: utf-8 -*- # # Parts Copyright (C) 2009-2010 Nokia Corporation and/or its subsidiary(-ies). @@ -35,37 +35,41 @@ import glob -import io import json import os import re -from urllib.parse import quote from pathlib import Path -from django.http import HttpResponse, Http404 +from celery.result import AsyncResult +from django.core.cache import caches +from django.core.files.base import ContentFile +from django.core.exceptions import PermissionDenied +from django.db.models import Max +from django.http import FileResponse, HttpResponse, Http404, HttpResponseBadRequest, JsonResponse from django.shortcuts import render, get_object_or_404, redirect from django.template.loader import render_to_string from django.urls import reverse as urlreverse from django.conf import settings from django import forms +from django.contrib.auth.decorators import login_required from django.contrib.staticfiles import finders - import debug # pyflakes:ignore -from ietf.doc.models import ( Document, DocAlias, DocHistory, DocEvent, BallotDocEvent, BallotType, - ConsensusDocEvent, NewRevisionDocEvent, TelechatDocEvent, WriteupDocEvent, IanaExpertDocEvent, +from ietf.doc.models import ( Document, DocHistory, DocEvent, BallotDocEvent, BallotType, + ConsensusDocEvent, NewRevisionDocEvent, StoredObject, TelechatDocEvent, WriteupDocEvent, IanaExpertDocEvent, IESG_BALLOT_ACTIVE_STATES, STATUSCHANGE_RELATIONS, DocumentActionHolder, DocumentAuthor, RelatedDocument, RelatedDocHistory) +from ietf.doc.tasks import investigate_fragment_task from ietf.doc.utils import (augment_events_with_revision, can_adopt_draft, can_unadopt_draft, get_chartering_type, get_tags_for_stream_id, - needed_ballot_positions, nice_consensus, prettify_std_name, update_telechat, has_same_ballot, + needed_ballot_positions, nice_consensus, update_telechat, has_same_ballot, get_initial_notify, make_notify_changed_event, make_rev_history, default_consensus, add_events_message_info, get_unicode_document_content, - augment_docs_and_user_with_user_info, irsg_needed_ballot_positions, add_action_holder_change_event, + augment_docs_and_person_with_person_info, irsg_needed_ballot_positions, add_action_holder_change_event, build_file_urls, update_documentauthors, fuzzy_find_documents, - bibxml_for_draft) + bibxml_for_draft, get_doc_email_aliases) from ietf.doc.utils_bofreq import bofreq_editors, bofreq_responsible from ietf.group.models import Role, Group from ietf.group.utils import can_manage_all_groups_of_type, can_manage_materials, group_features_role_filter @@ -73,20 +77,24 @@ role_required, is_individual_draft_author, can_request_rfc_publication) from ietf.name.models import StreamName, BallotPositionName from ietf.utils.history import find_history_active_at -from ietf.doc.forms import TelechatForm, NotifyForm, ActionHoldersForm, DocAuthorForm, DocAuthorChangeBasisForm +from ietf.doc.views_ballot import parse_ballot_edit_return_point +from ietf.doc.forms import InvestigateForm, TelechatForm, NotifyForm, ActionHoldersForm, DocAuthorForm, DocAuthorChangeBasisForm from ietf.doc.mails import email_comment, email_remind_action_holders +from ietf.doc.utils import last_ballot_doc_revision from ietf.mailtrigger.utils import gather_relevant_expansions -from ietf.meeting.models import Session +from ietf.meeting.models import Session, SessionPresentation from ietf.meeting.utils import group_sessions, get_upcoming_manageable_sessions, sort_sessions, add_event_info_to_session_qs from ietf.review.models import ReviewAssignment from ietf.review.utils import can_request_review_of_doc, review_assignments_to_list_for_docs, review_requests_to_list_for_docs from ietf.review.utils import no_review_from_teams_on_doc +from ietf.doc.storage_utils import retrieve_bytes from ietf.utils import markup_txt, log, markdown -from ietf.utils.draft import PlaintextDraft +from ietf.utils.draft import get_status_from_draft_text +from ietf.utils.meetecho import MeetechoAPIError, SlidesManager from ietf.utils.response import permission_denied from ietf.utils.text import maybe_split from ietf.utils.timezone import date_today - +from ietf.utils.unicodenormalize import normalize_for_sorting def render_document_top(request, doc, tab, name): tabs = [] @@ -154,8 +162,8 @@ def render_document_top(request, doc, tab, name): None, ) ) - - tabs.append(("Email expansions","email",urlreverse('ietf.doc.views_doc.document_email', kwargs=dict(name=name)), True, None)) + if not doc.type_id in ["bcp", "std", "fyi"]: + tabs.append(("Email expansions","email",urlreverse('ietf.doc.views_doc.document_email', kwargs=dict(name=name)), True, None)) tabs.append(("History", "history", urlreverse('ietf.doc.views_doc.document_history', kwargs=dict(name=name)), True, None)) if name.startswith("rfc"): @@ -163,7 +171,7 @@ def render_document_top(request, doc, tab, name): else: name += "-" + doc.rev - return render_to_string("doc/document_top.html", + return render_to_string("doc/document_top.html" if not doc.type_id in ["bcp", "std", "fyi"] else "doc/document_subseries_top.html", dict(doc=doc, tabs=tabs, selected=tab, @@ -180,42 +188,38 @@ def interesting_doc_relations(doc): else: raise TypeError("Expected this method to be called with a Document or DocHistory object") - that_relationships = STATUSCHANGE_RELATIONS + ('conflrev', 'replaces', 'possibly_replaces', 'updates', 'obs') + that_relationships = STATUSCHANGE_RELATIONS + ('conflrev', 'replaces', 'possibly_replaces', 'updates', 'obs', 'became_rfc') - that_doc_relationships = ('replaces', 'possibly_replaces', 'updates', 'obs') + that_doc_relationships = ('replaces', 'possibly_replaces', 'updates', 'obs', 'became_rfc') - # TODO: This returns the relationships in database order, which may not be the order we want to display them in. - interesting_relations_that = cls.objects.filter(target__docs=target, relationship__in=that_relationships).select_related('source') - interesting_relations_that_doc = cls.objects.filter(source=doc, relationship__in=that_doc_relationships).prefetch_related('target__docs') + interesting_relations_that = cls.objects.filter(target=target, relationship__in=that_relationships).select_related('source') + interesting_relations_that_doc = cls.objects.filter(source=doc, relationship__in=that_doc_relationships).prefetch_related('target') return interesting_relations_that, interesting_relations_that_doc def document_main(request, name, rev=None, document_html=False): - if name.startswith("rfc") and rev is not None: + + doc = get_object_or_404(Document.objects.select_related(), name=name) + + if doc.type_id == "rfc" and rev is not None: raise Http404() - doc = get_object_or_404(Document.objects.select_related(), docalias__name=name) + log.assertion('doc.type_id!="rfc" or doc.name.startswith("rfc")') # take care of possible redirections - aliases = DocAlias.objects.filter(docs=doc).values_list("name", flat=True) - if document_html is False and rev==None and doc.type_id == "draft" and not name.startswith("rfc"): - for a in aliases: - if a.startswith("rfc"): - return redirect("ietf.doc.views_doc.document_main", name=a) - - revisions = [] - for h in doc.history_set.order_by("time", "id"): - if h.rev and not h.rev in revisions: - revisions.append(h.rev) - if not doc.rev in revisions: - revisions.append(doc.rev) + if document_html is False and rev is None: + became_rfc = doc.became_rfc() + if became_rfc: + return redirect("ietf.doc.views_doc.document_main", name=became_rfc.name) + + revisions = doc.revisions_by_dochistory() latest_rev = doc.rev snapshot = False gh = None - if rev: - # find the entry in the history + if rev and rev != doc.rev: + # find the entry in the history if the rev requested is not the current rev for h in doc.history_set.order_by("-time"): if rev == h.rev: snapshot = True @@ -241,9 +245,136 @@ def document_main(request, name, rev=None, document_html=False): if telechat and (not telechat.telechat_date or telechat.telechat_date < date_today(settings.TIME_ZONE)): telechat = None - # specific document types - if doc.type_id == "draft": + if doc.type_id == "rfc": + split_content = request.COOKIES.get("full_draft", settings.USER_PREFERENCE_DEFAULTS["full_draft"]) == "off" + if request.GET.get('include_text') == "0": + split_content = True + elif request.GET.get('include_text') == "1": + split_content = False + else: + pass + + interesting_relations_that, interesting_relations_that_doc = interesting_doc_relations(doc) + + can_edit = has_role(request.user, ("Area Director", "Secretariat")) + can_edit_authors = has_role(request.user, ("Secretariat")) + + stream_slugs = StreamName.objects.values_list("slug", flat=True) + # For some reason, AnonymousUser has __iter__, but is not iterable, + # which causes problems in the filter() below. Work around this: + if request.user.is_authenticated: + roles = Role.objects.filter(group__acronym__in=stream_slugs, person__user=request.user) + roles = group_features_role_filter(roles, request.user.person, 'docman_roles') + else: + roles = [] + + can_change_stream = bool(can_edit or roles) + + file_urls, found_types = build_file_urls(doc) + if not request.user.is_authenticated: + file_urls = [fu for fu in file_urls if fu[0] != "pdfized"] + content = doc.text_or_error() # pyflakes:ignore + content = markup_txt.markup(maybe_split(content, split=split_content)) + + if not found_types: + content = "This RFC is not currently available online." + split_content = False + elif "txt" not in found_types: + content = "This RFC is not available in plain text format." + split_content = False + + # status changes + status_changes = [] + proposed_status_changes = [] + for r in interesting_relations_that.filter(relationship__in=STATUSCHANGE_RELATIONS): + state_slug = r.source.get_state_slug() + if state_slug in ('appr-sent', 'appr-pend'): + status_changes.append(r) + elif state_slug in ('needshep','adrev','iesgeval','defer','appr-pr'): + proposed_status_changes.append(r) + else: + pass + + presentations = doc.future_presentations() + + if request.user.is_authenticated and hasattr(request.user, "person"): + augment_docs_and_person_with_person_info([doc], request.user.person) + + exp_comment = doc.latest_event(IanaExpertDocEvent,type="comment") + iana_experts_comment = exp_comment and exp_comment.desc + + html = None + js = None + css = None + diff_revisions = None + simple_diff_revisions = None + if document_html: + diff_revisions=get_diff_revisions(request, name, doc) + simple_diff_revisions = [t[1] for t in diff_revisions if t[0] == doc.name] + simple_diff_revisions.reverse() + html = doc.html_body() + if request.COOKIES.get("pagedeps") == "inline": + js = Path(finders.find("ietf/js/document_html.js")).read_text() + css = Path(finders.find("ietf/css/document_html_inline.css")).read_text() + if html: + css += Path(finders.find("ietf/css/document_html_txt.css")).read_text() + + # submission + submission = "" + if group is None: + submission = "unknown" + elif group.type_id == "individ": + submission = "individual" + elif group.type_id == "area" and doc.stream_id == "ietf": + submission = "individual in %s area" % group.acronym + else: + if group.features.acts_like_wg and not group.type_id == "edwg": + submission = "%s %s" % (group.acronym, group.type) + else: + submission = group.acronym + submission = '
%s' % (group.about_url(), submission) + + draft = doc.came_from_draft() + mailto_name = draft.name if draft else None + + return render(request, "doc/document_rfc.html" if document_html is False else "doc/document_html.html", + dict(doc=doc, + document_html=document_html, + css=css, + js=js, + html=html, + group=group, + top=top, + name=doc.name, + content=content, + split_content=split_content, + revisions=simple_diff_revisions if document_html else revisions, + latest_rev=latest_rev, + can_edit=can_edit, + can_edit_authors=can_edit_authors, + can_change_stream=can_change_stream, + rfc_number=doc.rfc_number, + updates=interesting_relations_that_doc.filter(relationship="updates"), + updated_by=interesting_relations_that.filter(relationship="updates"), + obsoletes=interesting_relations_that_doc.filter(relationship="obs"), + obsoleted_by=interesting_relations_that.filter(relationship="obs"), + status_changes=status_changes, + proposed_status_changes=proposed_status_changes, + has_errata=doc.pk and doc.tags.filter(slug="errata"), # doc.pk == None if using a fake_history_obj + file_urls=file_urls, + rfc_editor_state=doc.get_state("draft-rfceditor"), + iana_review_state=doc.get_state("draft-iana-review"), + iana_action_state=doc.get_state("draft-iana-action"), + iana_experts_state=doc.get_state("draft-iana-experts"), + iana_experts_comment=iana_experts_comment, + presentations=presentations, + diff_revisions=diff_revisions, + submission=submission, + mailto_name=mailto_name, + )) + + elif doc.type_id == "draft": split_content = request.COOKIES.get("full_draft", settings.USER_PREFERENCE_DEFAULTS["full_draft"]) == "off" if request.GET.get('include_text') == "0": split_content = True @@ -278,46 +409,22 @@ def document_main(request, name, rev=None, document_html=False): can_edit_replaces = has_role(request.user, ("Area Director", "Secretariat", "IRTF Chair", "WG Chair", "RG Chair", "WG Secretary", "RG Secretary")) + can_edit_action_holders = can_edit or ( + request.user.is_authenticated and group.has_role(request.user, group.features.docman_roles) + ) + is_author = request.user.is_authenticated and doc.documentauthor_set.filter(person__user=request.user).exists() can_view_possibly_replaces = can_edit_replaces or is_author - rfc_number = name[3:] if name.startswith("rfc") else None - draft_name = None - for a in aliases: - if a.startswith("draft"): - draft_name = a - - rfc_aliases = [prettify_std_name(a) for a in aliases - if a.startswith("fyi") or a.startswith("std") or a.startswith("bcp")] - latest_revision = None - # Workaround to allow displaying last rev of draft that became rfc as a draft - # This should be unwound when RFCs become their own documents. - if snapshot: - doc.name = doc.doc.name - name = doc.doc.name - else: - name = doc.name - file_urls, found_types = build_file_urls(doc) - if not snapshot and doc.get_state_slug() == "rfc": - # content - content = doc.text_or_error() # pyflakes:ignore - content = markup_txt.markup(maybe_split(content, split=split_content)) - + if not request.user.is_authenticated: + file_urls = [fu for fu in file_urls if fu[0] != "pdfized"] content = doc.text_or_error() # pyflakes:ignore content = markup_txt.markup(maybe_split(content, split=split_content)) - if not snapshot and doc.get_state_slug() == "rfc": - if not found_types: - content = "This RFC is not currently available online." - split_content = False - elif "txt" not in found_types: - content = "This RFC is not available in plain text format." - split_content = False - else: - latest_revision = doc.latest_event(NewRevisionDocEvent, type="new_revision") + latest_revision = doc.latest_event(NewRevisionDocEvent, type="new_revision") # ballot iesg_ballot_summary = None @@ -390,13 +497,6 @@ def document_main(request, name, rev=None, document_html=False): can_submit_unsolicited_review_for_teams = Group.objects.filter( reviewteamsettings__isnull=False, role__person__user=request.user, role__name='secr') - # mailing list search archive - search_archive = "www.ietf.org/mail-archive/web/" - if doc.stream_id == "ietf" and group.type_id == "wg" and group.list_archive: - search_archive = group.list_archive - - search_archive = quote(search_archive, safe="~") - # conflict reviews conflict_reviews = [r.source.name for r in interesting_relations_that.filter(relationship="conflrev")] @@ -417,13 +517,17 @@ def document_main(request, name, rev=None, document_html=False): # remaining actions actions = [] - if can_adopt_draft(request.user, doc) and not doc.get_state_slug() in ["rfc"] and not snapshot: + if can_adopt_draft(request.user, doc) and doc.get_state_slug() not in ["rfc"] and not snapshot: + target = urlreverse("ietf.doc.views_draft.adopt_draft", kwargs=dict(name=doc.name)) if doc.group and doc.group.acronym != 'none': # individual submission # already adopted in one group button_text = "Switch adoption" else: button_text = "Manage adoption" - actions.append((button_text, urlreverse('ietf.doc.views_draft.adopt_draft', kwargs=dict(name=doc.name)))) + # can_adopt_draft currently returns False for Area Directors + if has_role(request.user, ["Secretariat", "WG Chair"]): + target = urlreverse("ietf.doc.views_draft.ask_about_ietf_adoption_call", kwargs=dict(name=doc.name)) + actions.append((button_text, target)) if can_unadopt_draft(request.user, doc) and not doc.get_state_slug() in ["rfc"] and not snapshot: if doc.get_state_slug('draft-iesg') == 'idexists': @@ -492,12 +596,13 @@ def document_main(request, name, rev=None, document_html=False): if doc.get_state_slug() not in ["rfc", "expired"] and doc.stream_id in ("ietf",) and not snapshot: if iesg_state_slug == 'idexists' and can_edit: actions.append(("Begin IESG Processing", urlreverse('ietf.doc.views_draft.edit_info', kwargs=dict(name=doc.name)) + "?new=1")) - elif can_edit_stream_info and (iesg_state_slug in ('idexists','watching')): + elif can_edit_stream_info and (iesg_state_slug == 'idexists'): actions.append(("Submit to IESG for Publication", urlreverse('ietf.doc.views_draft.to_iesg', kwargs=dict(name=doc.name)))) - augment_docs_and_user_with_user_info([doc], request.user) + if request.user.is_authenticated and hasattr(request.user, "person"): + augment_docs_and_person_with_person_info([doc], request.user.person) - published = doc.latest_event(type="published_rfc") + published = doc.latest_event(type="published_rfc") # todo rethink this now that published_rfc is on rfc started_iesg_process = doc.latest_event(type="started_iesg_process") review_assignments = review_assignments_to_list_for_docs([doc]).get(doc.name, []) @@ -517,12 +622,7 @@ def document_main(request, name, rev=None, document_html=False): additional_urls = doc.documenturl_set.exclude(tag_id='auth48') # Stream description and name passing test - if doc.stream != None: - stream_desc = doc.stream.desc - stream = "draft-stream-" + doc.stream.slug - else: - stream_desc = "(None)" - stream = "(None)" + stream = ("draft-stream-" + doc.stream.slug) if doc.stream != None else "(None)" html = None js = None @@ -555,13 +655,12 @@ def document_main(request, name, rev=None, document_html=False): html=html, group=group, top=top, - name=name, + name=doc.name, content=content, split_content=split_content, revisions=simple_diff_revisions if document_html else revisions, snapshot=snapshot, stream=stream, - stream_desc=stream_desc, latest_revision=latest_revision, latest_rev=latest_rev, can_edit=can_edit, @@ -575,12 +674,11 @@ def document_main(request, name, rev=None, document_html=False): can_edit_iana_state=can_edit_iana_state, can_edit_consensus=can_edit_consensus, can_edit_replaces=can_edit_replaces, + can_edit_action_holders=can_edit_action_holders, can_view_possibly_replaces=can_view_possibly_replaces, can_request_review=can_request_review, can_submit_unsolicited_review_for_teams=can_submit_unsolicited_review_for_teams, - rfc_number=rfc_number, - draft_name=draft_name, telechat=telechat, iesg_ballot_summary=iesg_ballot_summary, submission=submission, @@ -597,7 +695,6 @@ def document_main(request, name, rev=None, document_html=False): conflict_reviews=conflict_reviews, status_changes=status_changes, proposed_status_changes=proposed_status_changes, - rfc_aliases=rfc_aliases, has_errata=doc.pk and doc.tags.filter(slug="errata"), # doc.pk == None if using a fake_history_obj published=published, file_urls=file_urls, @@ -617,7 +714,6 @@ def document_main(request, name, rev=None, document_html=False): iana_experts_comment=iana_experts_comment, started_iesg_process=started_iesg_process, shepherd_writeup=shepherd_writeup, - search_archive=search_archive, actions=actions, presentations=presentations, review_assignments=review_assignments, @@ -627,9 +723,9 @@ def document_main(request, name, rev=None, document_html=False): diff_revisions=diff_revisions )) - if doc.type_id == "charter": + elif doc.type_id == "charter": content = doc.text_or_error() # pyflakes:ignore - content = markup_txt.markup(content) + content = markdown.markdown(content) ballot_summary = None if doc.get_state_slug() in ("intrev", "iesgrev"): @@ -664,7 +760,7 @@ def document_main(request, name, rev=None, document_html=False): can_manage=can_manage, )) - if doc.type_id == "bofreq": + elif doc.type_id == "bofreq": content = markdown.markdown(doc.text_or_error()) editors = bofreq_editors(doc) responsible = bofreq_responsible(doc) @@ -684,8 +780,8 @@ def document_main(request, name, rev=None, document_html=False): editor_can_manage=editor_can_manage, )) - if doc.type_id == "conflrev": - filename = "%s-%s.txt" % (doc.canonical_name(), doc.rev) + elif doc.type_id == "conflrev": + filename = "%s-%s.txt" % (doc.name, doc.rev) pathname = os.path.join(settings.CONFLICT_REVIEW_PATH,filename) if doc.rev == "00" and not os.path.isfile(pathname): @@ -699,7 +795,7 @@ def document_main(request, name, rev=None, document_html=False): if doc.get_state_slug() in ("iesgeval", ) and doc.active_ballot(): ballot_summary = needed_ballot_positions(doc, list(doc.active_ballot().active_balloter_positions().values())) - conflictdoc = doc.related_that_doc('conflrev')[0].document + conflictdoc = doc.related_that_doc('conflrev')[0] return render(request, "doc/document_conflict_review.html", dict(doc=doc, @@ -714,8 +810,8 @@ def document_main(request, name, rev=None, document_html=False): approved_states=('appr-reqnopub-pend','appr-reqnopub-sent','appr-noprob-pend','appr-noprob-sent'), )) - if doc.type_id == "statchg": - filename = "%s-%s.txt" % (doc.canonical_name(), doc.rev) + elif doc.type_id == "statchg": + filename = "%s-%s.txt" % (doc.name, doc.rev) pathname = os.path.join(settings.STATUS_CHANGE_PATH,filename) if doc.rev == "00" and not os.path.isfile(pathname): @@ -748,14 +844,14 @@ def document_main(request, name, rev=None, document_html=False): sorted_relations=sorted_relations, )) - if doc.type_id in ("slides", "agenda", "minutes", "bluesheets", "procmaterials",): + elif doc.type_id in ("slides", "agenda", "minutes", "narrativeminutes", "bluesheets", "procmaterials",): can_manage_material = can_manage_materials(request.user, doc.group) presentations = doc.future_presentations() if doc.uploaded_filename: # we need to remove the extension for the globbing below to work basename = os.path.splitext(doc.uploaded_filename)[0] else: - basename = "%s-%s" % (doc.canonical_name(), doc.rev) + basename = "%s-%s" % (doc.name, doc.rev) pathname = os.path.join(doc.get_file_path(), basename) @@ -789,6 +885,13 @@ def document_main(request, name, rev=None, document_html=False): and doc.group.features.has_nonsession_materials and doc.type_id in doc.group.features.material_types ) + + session_statusid = None + actual_doc = doc if isinstance(doc,Document) else doc.doc + if actual_doc.session_set.count() == 1: + if actual_doc.session_set.get().schedulingevent_set.exists(): + session_statusid = actual_doc.session_set.get().schedulingevent_set.order_by("-time").first().status_id + return render(request, "doc/document_material.html", dict(doc=doc, top=top, @@ -801,10 +904,11 @@ def document_main(request, name, rev=None, document_html=False): can_upload = can_upload, other_types=other_types, presentations=presentations, + session_statusid=session_statusid, )) - if doc.type_id == "review": + elif doc.type_id == "review": basename = "{}.txt".format(doc.name) pathname = os.path.join(doc.get_file_path(), basename) content = get_unicode_document_content(basename, pathname) @@ -830,11 +934,11 @@ def document_main(request, name, rev=None, document_html=False): assignments=assignments, )) - if doc.type_id in ("chatlog", "polls"): + elif doc.type_id in ("chatlog", "polls"): if isinstance(doc,DocHistory): - session = doc.doc.sessionpresentation_set.last().session + session = doc.doc.presentations.last().session else: - session = doc.sessionpresentation_set.last().session + session = doc.presentations.last().session pathname = Path(session.meeting.get_materials_path()) / doc.type_id / doc.uploaded_filename content = get_unicode_document_content(doc.name, str(pathname)) return render( @@ -851,7 +955,7 @@ def document_main(request, name, rev=None, document_html=False): ) ) - if doc.type_id == "statement": + elif doc.type_id == "statement": if doc.uploaded_filename: basename = doc.uploaded_filename.split(".")[0] # strip extension else: @@ -859,7 +963,7 @@ def document_main(request, name, rev=None, document_html=False): variants = set([match.name.split(".")[1] for match in Path(doc.get_file_path()).glob(f"{basename}.*")]) inlineable = any([ext in variants for ext in ["md", "txt"]]) if inlineable: - content = markdown.markdown(doc.text_or_error()) + content = markdown.liberal_markdown(doc.text_or_error()) else: content = "No format available to display inline" if "pdf" in variants: @@ -872,7 +976,6 @@ def document_main(request, name, rev=None, document_html=False): can_manage = has_role(request.user,["Secretariat"]) # Add IAB or IESG as appropriate interesting_relations_that, interesting_relations_that_doc = interesting_doc_relations(doc) published = doc.latest_event(type="published_statement").time - return render(request, "doc/document_statement.html", dict(doc=doc, top=top, @@ -885,6 +988,9 @@ def document_main(request, name, rev=None, document_html=False): replaced_by=interesting_relations_that.filter(relationship="replaces"), can_manage=can_manage, )) + elif doc.type_id in ["bcp", "std", "fyi"]: + return render(request, "doc/document_subseries.html", {"doc": doc, "top": top}) + raise Http404("Document not found: %s" % (name + ("-%s"%rev if rev else ""))) @@ -914,7 +1020,7 @@ def document_raw_id(request, name, rev=None, ext=None): for t in possible_types: if os.path.exists(base_path + t): found_types[t]=base_path+t - if ext == None: + if ext is None: ext = 'txt' if not ext in found_types: raise Http404('dont have the file for that extension') @@ -938,9 +1044,9 @@ def document_html(request, name, rev=None): doc = found.documents.get() rev = found.matched_rev - if not requested_rev and doc.is_rfc(): # Someone asked for /doc/html/8989 + if not requested_rev and doc.type_id == "rfc": # Someone asked for /doc/html/8989 if not name.startswith('rfc'): - return redirect('ietf.doc.views_doc.document_html', name=doc.canonical_name()) + return redirect('ietf.doc.views_doc.document_html', name=doc.name) if rev: doc = doc.history_set.filter(rev=rev).first() or doc.fake_history_obj(rev) @@ -948,8 +1054,15 @@ def document_html(request, name, rev=None): if not os.path.exists(doc.get_file_name()): raise Http404("File not found: %s" % doc.get_file_name()) - return document_main(request, name=doc.name if requested_rev else doc.canonical_name(), rev=doc.rev if requested_rev or not doc.is_rfc() else None, document_html=True) + return document_main( + request, + name=doc.name if requested_rev else doc.name, + rev=doc.rev if requested_rev or doc.type_id != "rfc" else None, + document_html=True, + ) + +@login_required def document_pdfized(request, name, rev=None, ext=None): found = fuzzy_find_documents(name, rev) @@ -974,41 +1087,18 @@ def document_pdfized(request, name, rev=None, ext=None): if not os.path.exists(doc.get_file_name()): raise Http404("File not found: %s" % doc.get_file_name()) - pdf = doc.pdfized() + try: + pdf = doc.pdfized() + except Exception: + return render(request, "doc/weasyprint_failed.html") if pdf: return HttpResponse(pdf,content_type='application/pdf') else: raise Http404 -def check_doc_email_aliases(): - pattern = re.compile(r'^expand-(.*?)(\..*?)?@.*? +(.*)$') - good_count = 0 - tot_count = 0 - with io.open(settings.DRAFT_VIRTUAL_PATH,"r") as virtual_file: - for line in virtual_file.readlines(): - m = pattern.match(line) - tot_count += 1 - if m: - good_count += 1 - if good_count > 50 and tot_count < 3*good_count: - return True - return False - -def get_doc_email_aliases(name): - if name: - pattern = re.compile(r'^expand-(%s)(\..*?)?@.*? +(.*)$'%name) - else: - pattern = re.compile(r'^expand-(.*?)(\..*?)?@.*? +(.*)$') - aliases = [] - with io.open(settings.DRAFT_VIRTUAL_PATH,"r") as virtual_file: - for line in virtual_file.readlines(): - m = pattern.match(line) - if m: - aliases.append({'doc_name':m.group(1),'alias_type':m.group(2),'expansion':m.group(3)}) - return aliases def document_email(request,name): - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) top = render_document_top(request, doc, "email", name) aliases = get_doc_email_aliases(name) if doc.type_id=='draft' else None @@ -1026,6 +1116,11 @@ def document_email(request,name): def get_diff_revisions(request, name, doc): + """ Calculate what to offer for diff comparisons + + returns list of (name, rev, time, url, is_this_doc, is_previous_doc) + ordered by -time for use by forms used to get to the diff tools. + """ diffable = any( [ name.startswith(prefix) @@ -1047,18 +1142,22 @@ def get_diff_revisions(request, name, doc): diff_documents = [doc] diff_documents.extend( - Document.objects.filter( - docalias__relateddocument__source=doc, - docalias__relateddocument__relationship="replaces", - ) + [ + r.target + for r in RelatedDocument.objects.filter(source=doc, relationship="replaces") + ] ) + if doc.came_from_draft(): + diff_documents.append(doc.came_from_draft()) + + if doc.became_rfc(): + rfc = doc.became_rfc() + e = rfc.latest_event(type="published_rfc") + diff_revisions.append((rfc.name, "", e.time if e else rfc.time, rfc.name, False, False)) - if doc.get_state_slug() == "rfc": + if doc.type_id == "rfc": e = doc.latest_event(type="published_rfc") - aliases = doc.docalias.filter(name__startswith="rfc") - if aliases: - name = aliases[0].name - diff_revisions.append((name, "", e.time if e else doc.time, name)) + diff_revisions.append((name, "", e.time if e else doc.time, name, True, False)) seen = set() for e in ( @@ -1087,13 +1186,22 @@ def get_diff_revisions(request, name, doc): # rfcdiff tool has special support for IDs url = e.doc.name + "-" + e.rev - diff_revisions.append((e.doc.name, e.rev, e.time, url)) + diff_revisions.append((e.doc.name, e.rev, e.time, url, e.doc == doc and e.rev == doc.rev, False)) + + diff_revisions.sort(key=lambda t: t[2], reverse=True) + for index, t in enumerate(diff_revisions): + if t[4]: # is_this_doc + n = index+1 + if n < len(diff_revisions): + t_name, rev, time, url, _, _ = diff_revisions[n] + diff_revisions[n] = (t_name, rev, time, url, False, True) + break return diff_revisions def document_history(request, name): - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) top = render_document_top(request, doc, "history", name) diff_revisions = get_diff_revisions(request, name, doc) @@ -1104,21 +1212,43 @@ def document_history(request, name): add_events_message_info(events) # figure out if the current user can add a comment to the history - if doc.type_id == "draft" and doc.group != None: - can_add_comment = bool(has_role(request.user, ("Area Director", "Secretariat", "IRTF Chair", "IANA", "RFC Editor")) or ( - request.user.is_authenticated and - Role.objects.filter(name__in=("chair", "secr"), - group__acronym=doc.group.acronym, - person__user=request.user))) + if doc.type_id in ("draft", "rfc") and doc.group is not None: + can_add_comment = bool( + has_role( + request.user, + ("Area Director", "Secretariat", "IRTF Chair", "IANA", "RFC Editor"), + ) + or ( + request.user.is_authenticated + and Role.objects.filter( + name__in=("chair", "secr"), + group__acronym=doc.group.acronym, + person__user=request.user, + ) + ) + ) else: - can_add_comment = has_role(request.user, ("Area Director", "Secretariat", "IRTF Chair")) - return render(request, "doc/document_history.html", - dict(doc=doc, - top=top, - diff_revisions=diff_revisions, - events=events, - can_add_comment=can_add_comment, - )) + can_add_comment = has_role( + request.user, ("Area Director", "Secretariat", "IRTF Chair") + ) + + # if the current user has balloted on this document, give them a revision hint + ballot_doc_rev = None + if request.user.is_authenticated: + ballot_doc_rev = last_ballot_doc_revision(doc, request.user.person) + + return render( + request, + "doc/document_history.html", + { + "doc": doc, + "top": top, + "diff_revisions": diff_revisions, + "events": events, + "can_add_comment": can_add_comment, + "ballot_doc_rev": ballot_doc_rev, + }, + ) def document_bibtex(request, name, rev=None): @@ -1126,7 +1256,7 @@ def document_bibtex(request, name, rev=None): raise Http404() # Make sure URL_REGEXPS did not grab too much for the rev number - if rev != None and len(rev) != 2: + if rev is not None and len(rev) != 2: mo = re.search(r"^(?P[0-9]{1,2})-(?P[0-9]{2})$", rev) if mo: name = name+"-"+mo.group(1) @@ -1135,27 +1265,29 @@ def document_bibtex(request, name, rev=None): name = name+"-"+rev rev = None - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) - latest_revision = doc.latest_event(NewRevisionDocEvent, type="new_revision") - replaced_by = [d.name for d in doc.related_that("replaces")] - published = doc.latest_event(type="published_rfc") is not None - rfc = latest_revision.doc if latest_revision and latest_revision.doc.get_state_slug() == "rfc" else None + if doc.type_id not in ["rfc", "draft"]: + raise Http404() - if rev != None and rev != doc.rev: - # find the entry in the history - for h in doc.history_set.order_by("-time"): - if rev == h.rev: - doc = h - break + doi = None + draft_became_rfc = None + replaced_by = None + latest_revision = None + if doc.type_id == "draft": + latest_revision = doc.latest_event(NewRevisionDocEvent, type="new_revision") + replaced_by = [d.name for d in doc.related_that("replaces")] + draft_became_rfc = doc.became_rfc() - if doc.is_rfc(): - # This needs to be replaced with a lookup, as the mapping may change - # over time. Probably by updating ietf/sync/rfceditor.py to add the - # as a DocAlias, and use a method on Document to retrieve it. - doi = "10.17487/RFC%04d" % int(doc.rfc_number()) - else: - doi = None + if rev is not None and rev != doc.rev: + # find the entry in the history + for h in doc.history_set.order_by("-time"): + if rev == h.rev: + doc = h + break + + elif doc.type_id == "rfc": + doi = doc.doi if doc.is_dochistory(): latest_event = doc.latest_event(type='new_revision', rev=rev) @@ -1165,8 +1297,7 @@ def document_bibtex(request, name, rev=None): return render(request, "doc/document_bibtex.bib", dict(doc=doc, replaced_by=replaced_by, - published=published, - rfc=rfc, + published_as=draft_became_rfc, latest_revision=latest_revision, doi=doi, ), @@ -1187,7 +1318,7 @@ def document_bibxml(request, name, rev=None): raise Http404() # Make sure URL_REGEXPS did not grab too much for the rev number - if rev != None and len(rev) != 2: + if rev is not None and len(rev) != 2: mo = re.search(r"^(?P[0-9]{1,2})-(?P[0-9]{2})$", rev) if mo: name = name+"-"+mo.group(1) @@ -1203,7 +1334,7 @@ def document_bibxml(request, name, rev=None): def document_writeup(request, name): - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) top = render_document_top(request, doc, "writeup", name) def text_from_writeup(event_type): @@ -1267,7 +1398,7 @@ def text_from_writeup(event_type): )) def document_shepherd_writeup(request, name): - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) lastwriteup = doc.latest_event(WriteupDocEvent,type="changed_protocol_writeup") if lastwriteup: writeup_text = lastwriteup.text @@ -1304,22 +1435,46 @@ def document_shepherd_writeup_template(request, type): def document_references(request, name): - doc = get_object_or_404(Document,docalias__name=name) + doc = get_object_or_404(Document,name=name) refs = doc.references() + if doc.type_id in ["bcp","std","fyi"]: + for rfc in doc.contains(): + refs |= rfc.references() return render(request, "doc/document_references.html",dict(doc=doc,refs=sorted(refs,key=lambda x:x.target.name),)) def document_referenced_by(request, name): - doc = get_object_or_404(Document,docalias__name=name) + """View documents that reference the named document + + The view lists both direct references to a the named document, plus references to + related other documents. For a draft that became an RFC, this will include references + to the RFC. For an RFC, this will include references to the draft it came from, if any. + For a subseries document, this will include references to any of the RFC documents it + contains. + + In the rendered output, a badge is applied to indicate the name of the document the + reference actually targeted. E.g., on the display for a draft that became RFC NNN, + references included because they point to that RFC would be shown with a tag "As RFC NNN". + The intention is to make the "Referenced By" page useful for finding related work while + accurately reflecting the actual reference relationships. + """ + doc = get_object_or_404(Document,name=name) refs = doc.referenced_by() - full = ( request.GET.get('full') != None ) + if doc.came_from_draft(): + refs |= doc.came_from_draft().referenced_by() + if doc.became_rfc(): + refs |= doc.became_rfc().referenced_by() + if doc.type_id in ["bcp","std","fyi"]: + for rfc in doc.contains(): + refs |= rfc.referenced_by() + full = ( request.GET.get('full') is not None ) numdocs = refs.count() if not full and numdocs>250: refs=refs[:250] else: numdocs=None - refs=sorted(refs,key=lambda x:(['refnorm','refinfo','refunk','refold'].index(x.relationship.slug),x.source.canonical_name())) + refs=sorted(refs,key=lambda x:(['refnorm','refinfo','refunk','refold'].index(x.relationship.slug),x.source.name)) return render(request, "doc/document_referenced_by.html", - dict(alias_name=name, + dict(name=name, doc=doc, numdocs=numdocs, refs=refs, @@ -1331,7 +1486,7 @@ def document_ballot_content(request, doc, ballot_id, editable=True): augment_events_with_revision(doc, all_ballots) ballot = None - if ballot_id != None: + if ballot_id is not None: ballot_id = int(ballot_id) for b in all_ballots: if b.id == ballot_id: @@ -1355,7 +1510,7 @@ def document_ballot_content(request, doc, ballot_id, editable=True): position_groups = [] for n in BallotPositionName.objects.filter(slug__in=[p.pos_id for p in positions]).order_by('order'): g = (n, [p for p in positions if p.pos_id == n.slug]) - g[1].sort(key=lambda p: (p.is_old_pos, p.balloter.plain_name())) + g[1].sort(key=lambda p: (p.is_old_pos, normalize_for_sorting(p.balloter.plain_name()))) if n.blocking: position_groups.insert(0, g) else: @@ -1393,22 +1548,13 @@ def document_ballot_content(request, doc, ballot_id, editable=True): request=request) def document_ballot(request, name, ballot_id=None): - doc = get_object_or_404(Document, docalias__name=name) - all_ballots = list(BallotDocEvent.objects.filter(doc=doc, type="created_ballot").order_by("time")) - if not ballot_id: - if all_ballots: - ballot = all_ballots[-1] - else: - raise Http404("Ballot not found for: %s" % name) - ballot_id = ballot.id + doc = get_object_or_404(Document, name=name) + ballots = BallotDocEvent.objects.filter(doc=doc, type="created_ballot").order_by("time") + if ballot_id is not None: + ballot = ballots.filter(id=ballot_id).first() else: - ballot_id = int(ballot_id) - for b in all_ballots: - if b.id == ballot_id: - ballot = b - break - - if not ballot_id or not ballot: + ballot = ballots.last() + if not ballot: raise Http404("Ballot not found for: %s" % name) if ballot.ballot_type.slug == "irsg-approve": @@ -1418,18 +1564,16 @@ def document_ballot(request, name, ballot_id=None): top = render_document_top(request, doc, ballot_tab, name) - c = document_ballot_content(request, doc, ballot_id, editable=True) - request.session['ballot_edit_return_point'] = request.path_info + c = document_ballot_content(request, doc, ballot.id, editable=True) return render(request, "doc/document_ballot.html", dict(doc=doc, top=top, ballot_content=c, - # ballot_type_slug=ballot.ballot_type.slug, )) def document_irsg_ballot(request, name, ballot_id=None): - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) top = render_document_top(request, doc, "irsgballot", name) if not ballot_id: ballot = doc.latest_event(BallotDocEvent, type="created_ballot", ballot_type__slug='irsg-approve') @@ -1438,8 +1582,6 @@ def document_irsg_ballot(request, name, ballot_id=None): c = document_ballot_content(request, doc, ballot_id, editable=True) - request.session['ballot_edit_return_point'] = request.path_info - return render(request, "doc/document_ballot.html", dict(doc=doc, top=top, @@ -1448,7 +1590,7 @@ def document_irsg_ballot(request, name, ballot_id=None): )) def document_rsab_ballot(request, name, ballot_id=None): - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) top = render_document_top(request, doc, "rsabballot", name) if not ballot_id: ballot = doc.latest_event(BallotDocEvent, type="created_ballot", ballot_type__slug='rsab-approve') @@ -1457,8 +1599,6 @@ def document_rsab_ballot(request, name, ballot_id=None): c = document_ballot_content(request, doc, ballot_id, editable=True) - request.session['ballot_edit_return_point'] = request.path_info - return render( request, "doc/document_ballot.html", @@ -1470,20 +1610,27 @@ def document_rsab_ballot(request, name, ballot_id=None): ) def ballot_popup(request, name, ballot_id): - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) c = document_ballot_content(request, doc, ballot_id=ballot_id, editable=False) ballot = get_object_or_404(BallotDocEvent,id=ballot_id) + + try: + return_to_url = parse_ballot_edit_return_point(request.GET.get('ballot_edit_return_point'), name, ballot_id) + except ValueError: + return HttpResponseBadRequest('ballot_edit_return_point is invalid') + return render(request, "doc/ballot_popup.html", dict(doc=doc, ballot_content=c, ballot_id=ballot_id, ballot_type_slug=ballot.ballot_type.slug, + ballot_edit_return_point=return_to_url, editable=True, )) def document_json(request, name, rev=None): - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) def extract_name(s): return s.name if s else None @@ -1503,15 +1650,21 @@ def extract_name(s): data["expires"] = doc.expires.strftime("%Y-%m-%d %H:%M:%S") if doc.expires else None data["title"] = doc.title data["abstract"] = doc.abstract - data["aliases"] = list(doc.docalias.values_list("name", flat=True)) data["state"] = extract_name(doc.get_state()) data["intended_std_level"] = extract_name(doc.intended_std_level) data["std_level"] = extract_name(doc.std_level) + author_qs = ( + doc.rfcauthor_set + if doc.type_id == "rfc" and doc.rfcauthor_set.exists() + else doc.documentauthor_set + ).select_related("person").prefetch_related("person__email_set").order_by("order") data["authors"] = [ - dict(name=author.person.name, - email=author.email.address if author.email else None, - affiliation=author.affiliation) - for author in doc.documentauthor_set.all().select_related("person", "email").order_by("order") + { + "name": author.titlepage_name if hasattr(author, "titlepage_name") else author.person.name, + "email": author.email.address if author.email else None, + "affiliation": author.affiliation, + } + for author in author_qs ] data["shepherd"] = doc.shepherd.formatted_email() if doc.shepherd else None data["ad"] = doc.ad.role_email("ad").formatted_email() if doc.ad else None @@ -1519,7 +1672,7 @@ def extract_name(s): latest_revision = doc.latest_event(NewRevisionDocEvent, type="new_revision") data["rev_history"] = make_rev_history(latest_revision.doc if latest_revision else doc) - if doc.type_id == "draft": + if doc.type_id == "draft": # These live only on drafts data["iesg_state"] = extract_name(doc.get_state("draft-iesg")) data["rfceditor_state"] = extract_name(doc.get_state("draft-rfceditor")) data["iana_review_state"] = extract_name(doc.get_state("draft-iana-review")) @@ -1528,6 +1681,8 @@ def extract_name(s): if doc.stream_id in ("ietf", "irtf", "iab"): e = doc.latest_event(ConsensusDocEvent, type="changed_consensus") data["consensus"] = e.consensus if e else None + + if doc.type_id in ["draft", "rfc"]: data["stream"] = extract_name(doc.stream) return HttpResponse(json.dumps(data, indent=2), content_type='application/json') @@ -1538,18 +1693,18 @@ class AddCommentForm(forms.Form): @role_required('Area Director', 'Secretariat', 'IRTF Chair', 'WG Chair', 'RG Chair', 'WG Secretary', 'RG Secretary', 'IANA', 'RFC Editor') def add_comment(request, name): """Add comment to history of document.""" - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) login = request.user.person - if doc.type_id == "draft" and doc.group != None: + if doc.type_id == "draft" and doc.group is not None: can_add_comment = bool(has_role(request.user, ("Area Director", "Secretariat", "IRTF Chair", "IANA", "RFC Editor")) or ( request.user.is_authenticated and Role.objects.filter(name__in=("chair", "secr"), group__acronym=doc.group.acronym, person__user=request.user))) else: - can_add_comment = has_role(request.user, ("Area Director", "Secretariat", "IRTF Chair")) + can_add_comment = has_role(request.user, ("Area Director", "Secretariat", "IRTF Chair", "RFC Editor")) if not can_add_comment: # The user is a chair or secretary, but not for this WG or RG permission_denied(request, "You need to be a chair or secretary of this group to add a comment.") @@ -1622,9 +1777,9 @@ def telechat_date(request, name): def doc_titletext(doc): if doc.type.slug=='conflrev': - conflictdoc = doc.relateddocument_set.get(relationship__slug='conflrev').target.document - return 'the conflict review of %s' % conflictdoc.canonical_name() - return doc.canonical_name() + conflictdoc = doc.relateddocument_set.get(relationship__slug='conflrev').target + return 'the conflict review of %s' % conflictdoc.name + return doc.name def edit_notify(request, name): @@ -1741,11 +1896,21 @@ def add_fields(self, form, index): }) -@role_required('Area Director', 'Secretariat') +@login_required def edit_action_holders(request, name): """Change the set of action holders for a doc""" doc = get_object_or_404(Document, name=name) - + + can_edit = has_role(request.user, ("Area Director", "Secretariat")) or ( + doc.group and doc.group.has_role(request.user, doc.group.features.docman_roles) + ) + if not can_edit: + # Keep the list of roles in this message up-to-date with the can_edit logic + message = "Restricted to roles: Area Director, Secretariat" + if doc.group and doc.group.acronym != "none": + message += f", and document managers for the {doc.group.acronym} group" + raise PermissionDenied(message) + if request.method == 'POST': form = ActionHoldersForm(request.POST) if form.is_valid(): @@ -1783,9 +1948,9 @@ def edit_action_holders(request, name): role_ids = dict() # maps role slug to list of Person IDs (assumed numeric in the JavaScript) extra_prefetch = [] # list of Person objects to prefetch for select2 field - if len(doc.authors()) > 0: + authors = doc.author_persons() + if len(authors) > 0: doc_role_labels.append(dict(slug='authors', label='Authors')) - authors = doc.authors() role_ids['authors'] = [p.pk for p in authors] extra_prefetch += authors @@ -1855,15 +2020,25 @@ class ReminderEmailForm(forms.Form): strip=True, ) -@role_required('Area Director', 'Secretariat') +@login_required def remind_action_holders(request, name): doc = get_object_or_404(Document, name=name) - + + can_edit = has_role(request.user, ("Area Director", "Secretariat")) or ( + doc.group and doc.group.has_role(request.user, doc.group.features.docman_roles) + ) + if not can_edit: + # Keep the list of roles in this message up-to-date with the can_edit logic + message = "Restricted to roles: Area Director, Secretariat" + if doc.group and doc.group.acronym != "none": + message += f", and document managers for the {doc.group.acronym} group" + raise PermissionDenied(message) + if request.method == 'POST': form = ReminderEmailForm(request.POST) if form.is_valid(): email_remind_action_holders(request, doc, form.cleaned_data['note']) - return redirect('ietf.doc.views_doc.document_main', name=doc.canonical_name()) + return redirect('ietf.doc.views_doc.document_main', name=doc.name) form = ReminderEmailForm() return render( @@ -1877,16 +2052,26 @@ def remind_action_holders(request, name): ) -def email_aliases(request,name=''): - doc = get_object_or_404(Document, name=name) if name else None - if not name: - # require login for the overview page, but not for the - # document-specific pages - if not request.user.is_authenticated: - return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path)) - aliases = get_doc_email_aliases(name) - - return render(request,'doc/email_aliases.html',{'aliases':aliases,'ietf_domain':settings.IETF_DOMAIN,'doc':doc}) +@login_required +def email_aliases(request): + """List of all email aliases + + This is currently slow except when cached + """ + slowcache = caches["slowpages"] + cache_key = "emailaliasesview" + aliases = slowcache.get(cache_key) + if not aliases: + aliases = get_doc_email_aliases() # gets all aliases + slowcache.set(cache_key, aliases, 3600) + return render( + request, + "doc/email_aliases.html", + { + "aliases": aliases, + "ietf_domain": settings.IETF_DOMAIN, + }, + ) class VersionForm(forms.Form): @@ -1900,7 +2085,7 @@ def __init__(self, *args, **kwargs): def edit_sessionpresentation(request,name,session_id): doc = get_object_or_404(Document, name=name) - sp = get_object_or_404(doc.sessionpresentation_set, session_id=session_id) + sp = get_object_or_404(doc.presentations, session_id=session_id) if not sp.session.can_manage_materials(request.user): raise Http404 @@ -1917,7 +2102,13 @@ def edit_sessionpresentation(request,name,session_id): if form.is_valid(): new_selection = form.cleaned_data['version'] if initial['version'] != new_selection: - doc.sessionpresentation_set.filter(pk=sp.pk).update(rev=None if new_selection=='current' else new_selection) + doc.presentations.filter(pk=sp.pk).update(rev=None if new_selection=='current' else new_selection) + if doc.type_id == "slides" and hasattr(settings, "MEETECHO_API_CONFIG"): + sm = SlidesManager(api_config=settings.MEETECHO_API_CONFIG) + try: + sm.send_update(sp.session) + except MeetechoAPIError as err: + log.log(f"Error in SlidesManager.send_update(): {err}") c = DocEvent(type="added_comment", doc=doc, rev=doc.rev, by=request.user.person) c.desc = "Revision for session %s changed to %s" % (sp.session,new_selection) c.save() @@ -1929,7 +2120,7 @@ def edit_sessionpresentation(request,name,session_id): def remove_sessionpresentation(request,name,session_id): doc = get_object_or_404(Document, name=name) - sp = get_object_or_404(doc.sessionpresentation_set, session_id=session_id) + sp = get_object_or_404(doc.presentations, session_id=session_id) if not sp.session.can_manage_materials(request.user): raise Http404 @@ -1938,7 +2129,13 @@ def remove_sessionpresentation(request,name,session_id): raise Http404 if request.method == 'POST': - doc.sessionpresentation_set.filter(pk=sp.pk).delete() + doc.presentations.filter(pk=sp.pk).delete() + if doc.type_id == "slides" and hasattr(settings, "MEETECHO_API_CONFIG"): + sm = SlidesManager(api_config=settings.MEETECHO_API_CONFIG) + try: + sm.delete(sp.session, doc) + except MeetechoAPIError as err: + log.log(f"Error in SlidesManager.delete(): {err}") c = DocEvent(type="added_comment", doc=doc, rev=doc.rev, by=request.user.person) c.desc = "Removed from session: %s" % (sp.session) c.save() @@ -1962,7 +2159,7 @@ def add_sessionpresentation(request,name): version_choices.insert(0,('current','Current at the time of the session')) sessions = get_upcoming_manageable_sessions(request.user) - sessions = sort_sessions([s for s in sessions if not s.sessionpresentation_set.filter(document=doc).exists()]) + sessions = sort_sessions([s for s in sessions if not s.presentations.filter(document=doc).exists()]) if doc.group: sessions = sorted(sessions,key=lambda x:0 if x.group==doc.group else 1) @@ -1975,7 +2172,25 @@ def add_sessionpresentation(request,name): session_id = session_form.cleaned_data['session'] version = version_form.cleaned_data['version'] rev = None if version=='current' else version - doc.sessionpresentation_set.create(session_id=session_id,rev=rev) + if doc.type_id == "slides": + max_order = SessionPresentation.objects.filter( + document__type='slides', + session__pk=session_id, + ).aggregate(Max('order'))['order__max'] or 0 + order = max_order + 1 + else: + order = 0 + sp = doc.presentations.create( + session_id=session_id, + rev=rev, + order=order, + ) + if doc.type_id == "slides" and hasattr(settings, "MEETECHO_API_CONFIG"): + sm = SlidesManager(api_config=settings.MEETECHO_API_CONFIG) + try: + sm.add(sp.session, doc, order=sp.order) + except MeetechoAPIError as err: + log.log(f"Error in SlidesManager.add(): {err}") c = DocEvent(type="added_comment", doc=doc, rev=doc.rev, by=request.user.person) c.desc = "%s to session: %s" % ('Added -%s'%rev if rev else 'Added', Session.objects.get(pk=session_id)) c.save() @@ -2028,23 +2243,144 @@ def idnits2_rfc_status(request): def idnits2_state(request, name, rev=None): - doc = get_object_or_404(Document, docalias__name=name) - if doc.type_id!='draft': + doc = get_object_or_404(Document, name=name) + if doc.type_id not in ["draft", "rfc"]: raise Http404 - zero_revision = NewRevisionDocEvent.objects.filter(doc=doc,rev='00').first() + zero_revision = None + if doc.type_id == "rfc": + draft = doc.came_from_draft() + if draft: + zero_revision = NewRevisionDocEvent.objects.filter( + doc=draft, rev="00" + ).first() + else: + zero_revision = NewRevisionDocEvent.objects.filter(doc=doc, rev="00").first() if zero_revision: doc.created = zero_revision.time else: - doc.created = doc.docevent_set.order_by('-time').first().time + if doc.type_id == "draft": + if doc.became_rfc(): + interesting_event = ( + doc.became_rfc() + .docevent_set.filter(type="published_rfc") + .order_by("-time") + .first() + ) + else: + interesting_event = doc.docevent_set.order_by( + "-time" + ).first() # Is taking the most _recent_ instead of the oldest event correct? + else: # doc.type_id == "rfc" + interesting_event = ( + doc.docevent_set.filter(type="published_rfc").order_by("-time").first() + ) + doc.created = interesting_event.time if doc.std_level: doc.deststatus = doc.std_level.name elif doc.intended_std_level: doc.deststatus = doc.intended_std_level.name else: - text = doc.text() + # 10000 is a conservative prefix on number of utf-8 encoded bytes to + # cover at least the first 10 lines of characters + text = doc.text(size=10000) if text: - parsed_draft = PlaintextDraft(text=doc.text(), source=name, name_from_source=False) - doc.deststatus = parsed_draft.get_status() + doc.deststatus = get_status_from_draft_text(text) + else: + doc.deststatus = "Unknown" + return render( + request, + "doc/idnits2-state.txt", + context={"doc": doc}, + content_type="text/plain;charset=utf-8", + ) + + +@role_required("Secretariat") +def investigate(request): + """Investigate a fragment + + A plain GET with no querystring returns the UI page. + + POST with the task_id field empty starts an async task and returns a JSON response with + the ID needed to monitor the task for results. + + GET with a querystring parameter "id" will poll the status of the async task and return "ready" + or "notready". + + POST with the task_id field set to the id of a "ready" task will return its results or an error + if the task failed or the id is invalid (expired, never exited, etc). + """ + results = None + # Start an investigation or retrieve a result on a POST + if request.method == "POST": + form = InvestigateForm(request.POST) + if form.is_valid(): + task_id = form.cleaned_data["task_id"] + if task_id: + # Ignore the rest of the form and retrieve the result + task_result = AsyncResult(task_id) + if task_result.successful(): + retval = task_result.get() + results = retval["results"] + form.data = form.data.copy() + form.data["name_fragment"] = retval[ + "name_fragment" + ] # ensure consistency + del form.data["task_id"] # do not request the task result again + else: + form.add_error( + None, + "The investigation task failed. Please try again and ask for help if this recurs.", + ) + # Falls through to the render at the end! + else: + name_fragment = form.cleaned_data["name_fragment"] + task_result = investigate_fragment_task.delay(name_fragment) + return JsonResponse({"id": task_result.id}) + else: + task_id = request.GET.get("id", None) + if task_id is not None: + # Check status if we got the "id" parameter + task_result = AsyncResult(task_id) + return JsonResponse( + {"status": "ready" if task_result.ready() else "notready"} + ) else: - doc.deststatus="Unknown" - return render(request, 'doc/idnits2-state.txt', context={'doc':doc}, content_type='text/plain;charset=utf-8') + # Serve up an empty form + form = InvestigateForm() + + # If we get here, it is just a plain GET - serve the UI + return render( + request, + "doc/investigate.html", + context={ + "form": form, + "results": results, + }, + ) + +def rfcxml_notprepped(request, number): + number = int(number) + if number < settings.FIRST_V3_RFC: + raise Http404 + rfc = Document.objects.filter(type="rfc", rfc_number=number).first() + if rfc is None: + raise Http404 + name = f"notprepped/rfc{number}.notprepped.xml" + if not StoredObject.objects.filter(name=name).exists(): + raise Http404 + try: + bytes = retrieve_bytes("rfc", name) + except FileNotFoundError: + raise Http404 + return FileResponse(ContentFile(bytes, name=f"rfc{number}.notprepped.xml"), as_attachment=True) + + +def rfcxml_notprepped_wrapper(request, number): + number = int(number) + if number < settings.FIRST_V3_RFC: + raise Http404 + rfc = Document.objects.filter(type="rfc", rfc_number=number).first() + if rfc is None: + raise Http404 + return render(request, "doc/notprepped_wrapper.html", context={"rfc": rfc}) diff --git a/ietf/doc/views_downref.py b/ietf/doc/views_downref.py index 1b7b51edb0..2668baae34 100644 --- a/ietf/doc/views_downref.py +++ b/ietf/doc/views_downref.py @@ -19,7 +19,7 @@ def downref_registry(request): downref_doc_pairs = [ ] downref_relations = RelatedDocument.objects.filter(relationship_id='downref-approval') for rel in downref_relations: - downref_doc_pairs.append((rel.target.document, rel.source)) + downref_doc_pairs.append((rel.target, rel.source)) return render(request, 'doc/downref.html', { "doc_pairs": downref_doc_pairs, @@ -38,18 +38,18 @@ def downref_registry_add(request): if form.is_valid(): drafts = form.cleaned_data['drafts'] rfc = form.cleaned_data['rfc'] - for da in drafts: - RelatedDocument.objects.create(source=da.document, + for d in drafts: + RelatedDocument.objects.create(source=d, target=rfc, relationship_id='downref-approval') - c = DocEvent(type="downref_approved", doc=da.document, - rev=da.document.rev, by=login) + c = DocEvent(type="downref_approved", doc=d, + rev=d.rev, by=login) c.desc = "Downref to RFC %s approved by Last Call for %s-%s" % ( - rfc.document.rfc_number(), da.name, da.document.rev) + rfc.rfc_number, d.name, d.rev) c.save() - c = DocEvent(type="downref_approved", doc=rfc.document, - rev=rfc.document.rev, by=login) + c = DocEvent(type="downref_approved", doc=rfc, + rev=rfc.rev, by=login) c.desc = "Downref to RFC %s approved by Last Call for %s-%s" % ( - rfc.document.rfc_number(), da.name, da.document.rev) + rfc.rfc_number, d.name, d.rev) c.save() return HttpResponseRedirect(urlreverse('ietf.doc.views_downref.downref_registry')) diff --git a/ietf/doc/views_draft.py b/ietf/doc/views_draft.py index b74042ac57..c5faf1140b 100644 --- a/ietf/doc/views_draft.py +++ b/ietf/doc/views_draft.py @@ -23,7 +23,7 @@ import debug # pyflakes:ignore -from ietf.doc.models import ( Document, DocAlias, RelatedDocument, State, +from ietf.doc.models import ( Document, RelatedDocument, State, StateType, DocEvent, ConsensusDocEvent, TelechatDocEvent, WriteupDocEvent, StateDocEvent, IanaExpertDocEvent, IESG_SUBSTATE_TAGS) from ietf.doc.mails import ( email_pulled_from_rfc_queue, email_resurrect_requested, @@ -32,13 +32,15 @@ generate_publication_request, email_adopted, email_intended_status_changed, email_iesg_processing_document, email_ad_approved_doc, email_iana_expert_review_state_changed ) +from ietf.doc.storage_utils import retrieve_bytes, store_bytes +from ietf.doc.templatetags.ietf_filters import is_doc_ietf_adoptable from ietf.doc.utils import ( add_state_change_event, can_adopt_draft, can_unadopt_draft, get_tags_for_stream_id, nice_consensus, update_action_holders, update_reminder, update_telechat, make_notify_changed_event, get_initial_notify, set_replaces_for_document, default_consensus, tags_suffix, can_edit_docextresources, update_doc_extresources ) from ietf.doc.lastcall import request_last_call -from ietf.doc.fields import SearchableDocAliasesField +from ietf.doc.fields import SearchableDocumentsField from ietf.doc.forms import ExtResourceForm from ietf.group.models import Group, Role, GroupFeatures from ietf.iesg.models import TelechatDate @@ -49,11 +51,12 @@ from ietf.name.models import IntendedStdLevelName, DocTagName, StreamName from ietf.person.fields import SearchableEmailField from ietf.person.models import Person, Email -from ietf.utils.mail import send_mail, send_mail_message, on_behalf_of +from ietf.utils.mail import send_mail, send_mail_message, on_behalf_of, send_mail_text from ietf.utils.textupload import get_cleaned_text_file_content from ietf.utils import log +from ietf.utils.fields import DatepickerDateField, ModelMultipleChoiceField, MultiEmailField from ietf.utils.response import permission_denied -from ietf.utils.timezone import datetime_today, DEADLINE_TZINFO +from ietf.utils.timezone import date_today, datetime_from_date, datetime_today, DEADLINE_TZINFO class ChangeStateForm(forms.Form): @@ -72,7 +75,7 @@ def clean(self): state = self.cleaned_data.get('state', '(None)') tag = self.cleaned_data.get('substate','') comment = self.cleaned_data['comment'].strip() # pyflakes:ignore - doc = get_object_or_404(Document, docalias__name=self.docname) + doc = get_object_or_404(Document, name=self.docname) prev = doc.get_state("draft-iesg") # tag handling is a bit awkward since the UI still works @@ -92,9 +95,10 @@ def clean(self): def change_state(request, name): """Change IESG state of Internet-Draft, notifying parties as necessary and logging the change as a comment.""" - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) - if (not doc.latest_event(type="started_iesg_process")) or doc.get_state_slug() == "expired": + # Steer ADs towards "Begin IESG Processing" + if doc.get_state_slug("draft-iesg")=="idexists" and not has_role(request.user,"Secretariat"): raise Http404 login = request.user.person @@ -212,7 +216,7 @@ class AddIanaExpertsCommentForm(forms.Form): @role_required('Secretariat', 'IANA') def add_iana_experts_comment(request, name): - doc = get_object_or_404(Document, docalias__name = name) + doc = get_object_or_404(Document, name = name) if request.method == 'POST': form = AddIanaExpertsCommentForm(request.POST) if form.is_valid(): @@ -238,7 +242,7 @@ def __init__(self, state_type, *args, **kwargs): def change_iana_state(request, name, state_type): """Change IANA review state of Internet-Draft. Normally, this is done via automatic sync, but this form allows one to set it manually.""" - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) state_type = doc.type_id + "-" + state_type @@ -278,7 +282,7 @@ class ChangeStreamForm(forms.Form): def change_stream(request, name): """Change the stream of a Document of type 'draft', notifying parties as necessary and logging the change as a comment.""" - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) if not doc.type_id=='draft': raise Http404 @@ -340,7 +344,7 @@ def change_stream(request, name): )) class ReplacesForm(forms.Form): - replaces = SearchableDocAliasesField(required=False) + replaces = SearchableDocumentsField(required=False) comment = forms.CharField(widget=forms.Textarea, required=False, strip=False) def __init__(self, *args, **kwargs): @@ -350,16 +354,16 @@ def __init__(self, *args, **kwargs): def clean_replaces(self): for d in self.cleaned_data['replaces']: - if d.document == self.doc: + if d == self.doc: raise forms.ValidationError("An Internet-Draft can't replace itself") - if d.document.type_id == "draft" and d.document.get_state_slug() == "rfc": + if d.type_id == "draft" and d.get_state_slug() == "rfc": raise forms.ValidationError("An Internet-Draft can't replace an RFC") return self.cleaned_data['replaces'] def replaces(request, name): """Change 'replaces' set of a Document of type 'draft' , notifying parties as necessary and logging the change as a comment.""" - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) if doc.type_id != 'draft': raise Http404 if not (has_role(request.user, ("Secretariat", "Area Director", "WG Chair", "RG Chair", "WG Secretary", "RG Secretary")) @@ -390,9 +394,9 @@ def replaces(request, name): )) class SuggestedReplacesForm(forms.Form): - replaces = forms.ModelMultipleChoiceField(queryset=DocAlias.objects.all(), - label="Suggestions", required=False, widget=forms.CheckboxSelectMultiple, - help_text="Select only the documents that are replaced by this document") + replaces = ModelMultipleChoiceField(queryset=Document.objects.all(), + label="Suggestions", required=False, widget=forms.CheckboxSelectMultiple, + help_text="Select only the documents that are replaced by this document") comment = forms.CharField(label="Optional comment", widget=forms.Textarea, required=False, strip=False) def __init__(self, suggested, *args, **kwargs): @@ -403,7 +407,7 @@ def __init__(self, suggested, *args, **kwargs): self.fields["replaces"].choices = [(d.pk, d.name) for d in suggested] def review_possibly_replaces(request, name): - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) if doc.type_id != 'draft': raise Http404 if not (has_role(request.user, ("Secretariat", "Area Director")) @@ -458,7 +462,7 @@ class ChangeIntentionForm(forms.Form): def change_intention(request, name): """Change the intended publication status of a Document of type 'draft' , notifying parties as necessary and logging the change as a comment.""" - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) if doc.type_id != 'draft': raise Http404 @@ -486,44 +490,10 @@ def change_intention(request, name): doc=doc, )) -class EditInfoForm(forms.Form): - intended_std_level = forms.ModelChoiceField(IntendedStdLevelName.objects.filter(used=True), empty_label="(None)", required=True, label="Intended RFC status") - area = forms.ModelChoiceField(Group.objects.filter(type="area", state="active"), empty_label="(None - individual submission)", required=False, label="Assigned to area") - ad = forms.ModelChoiceField(Person.objects.filter(role__name="ad", role__group__state="active",role__group__type='area').order_by('name'), label="Responsible AD", empty_label="(None)", required=True) - create_in_state = forms.ModelChoiceField(State.objects.filter(used=True, type="draft-iesg", slug__in=("pub-req", "watching")), empty_label=None, required=False) - notify = forms.CharField( - widget=forms.Textarea, - max_length=1023, - label="Notice emails", - help_text="Separate email addresses with commas.", - required=False, - ) - telechat_date = forms.TypedChoiceField(coerce=lambda x: datetime.datetime.strptime(x, '%Y-%m-%d').date(), empty_value=None, required=False, widget=forms.Select(attrs={'onchange':'make_bold()'})) - returning_item = forms.BooleanField(required=False) - - def __init__(self, *args, **kwargs): - super(self.__class__, self).__init__(*args, **kwargs) - - # if previous AD is now ex-AD, append that person to the list - ad_pk = self.initial.get('ad') - choices = self.fields['ad'].choices - if ad_pk and ad_pk not in [pk for pk, name in choices]: - self.fields['ad'].choices = list(choices) + [("", "-------"), (ad_pk, Person.objects.get(pk=ad_pk).plain_name())] - - # telechat choices - dates = [d.date for d in TelechatDate.objects.active().order_by('date')] - init = kwargs['initial']['telechat_date'] - if init and init not in dates: - dates.insert(0, init) - - self.fields['telechat_date'].choices = [("", "(not on agenda)")] + [(d, d.strftime("%Y-%m-%d")) for d in dates] - - # returning item is rendered non-standard - self.standard_fields = [x for x in self.visible_fields() if x.name not in ('returning_item',)] def to_iesg(request,name): """ Submit an IETF stream document to the IESG for publication """ - doc = get_object_or_404(Document, docalias__name=name, stream='ietf') + doc = get_object_or_404(Document, name=name, stream='ietf') if doc.get_state_slug('draft') == "expired" or doc.get_state_slug('draft-iesg') == 'pub-req' : raise Http404 @@ -560,22 +530,19 @@ def to_iesg(request,name): if request.method == 'POST': if request.POST.get("confirm", ""): - by = request.user.person events = [] - - changes = [] + def doc_event(type, by, doc, desc): + return DocEvent.objects.create(type=type, by=by, doc=doc, rev=doc.rev, desc=desc) if doc.get_state_slug("draft-iesg") == "idexists": - e = DocEvent() - e.type = "started_iesg_process" - e.by = by - e.doc = doc - e.rev = doc.rev - e.desc = "Document is now in IESG state %s" % target_state['iesg'].name - e.save() - events.append(e) + events.append(doc_event("started_iesg_process", by, doc, f"Document is now in IESG state {target_state['iesg'].name}")) + + # do this first, so AD becomes action holder + if not doc.ad == ad : + doc.ad = ad + events.append(doc_event("changed_document", by, doc, f"Responsible AD changed to {doc.ad}")) for state_type in ['draft-iesg','draft-stream-ietf']: prev_state=doc.get_state(state_type) @@ -587,25 +554,14 @@ def to_iesg(request,name): events.append(e) events.append(add_state_change_event(doc=doc,by=by,prev_state=prev_state,new_state=new_state)) - if not doc.ad == ad : - doc.ad = ad - changes.append("Responsible AD changed to %s" % doc.ad) - if not doc.notify == notify : doc.notify = notify - changes.append("State Change Notice email list changed to %s" % doc.notify) + events.append(doc_event("changed_document", by, doc, f"State Change Notice email list changed to {doc.notify}")) # Get the last available writeup previous_writeup = doc.latest_event(WriteupDocEvent,type="changed_protocol_writeup") if previous_writeup != None: - changes.append(previous_writeup.text) - - for c in changes: - e = DocEvent(doc=doc, rev=doc.rev, by=by) - e.desc = c - e.type = "changed_document" - e.save() - events.append(e) + events.append(doc_event("changed_document", by, doc, previous_writeup.text)) doc.save_with_history(events) @@ -632,16 +588,81 @@ def to_iesg(request,name): notify=notify, )) -@role_required('Area Director','Secretariat') +class EditInfoForm(forms.Form): + intended_std_level = forms.ModelChoiceField( + IntendedStdLevelName.objects.filter(used=True), + empty_label="(None)", + required=True, + label="Intended RFC status", + ) + area = forms.ModelChoiceField( + Group.objects.filter(type="area", state="active"), + empty_label="(None - individual submission)", + required=False, + label="Assigned to area", + ) + ad = forms.ModelChoiceField( + Person.objects.filter( + role__name="ad", role__group__state="active", role__group__type="area" + ).order_by("name"), + label="Responsible AD", + empty_label="(None)", + required=True, + ) + notify = forms.CharField( + widget=forms.Textarea, + max_length=1023, + label="Notice emails", + help_text="Separate email addresses with commas.", + required=False, + ) + telechat_date = forms.TypedChoiceField( + coerce=lambda x: datetime.datetime.strptime(x, "%Y-%m-%d").date(), + empty_value=None, + required=False, + widget=forms.Select(attrs={"onchange": "make_bold()"}), + ) + returning_item = forms.BooleanField(required=False) + + def __init__(self, *args, **kwargs): + super(self.__class__, self).__init__(*args, **kwargs) + + # if previous AD is now ex-AD, append that person to the list + ad_pk = self.initial.get("ad") + choices = self.fields["ad"].choices + if ad_pk and ad_pk not in [pk for pk, name in choices]: + self.fields["ad"].choices = list(choices) + [ + ("", "-------"), + (ad_pk, Person.objects.get(pk=ad_pk).plain_name()), + ] + + # telechat choices + dates = [d.date for d in TelechatDate.objects.active().order_by("date")] + init = kwargs["initial"]["telechat_date"] + if init and init not in dates: + dates.insert(0, init) + + self.fields["telechat_date"].choices = [("", "(not on agenda)")] + [ + (d, d.strftime("%Y-%m-%d")) for d in dates + ] + + # returning item is rendered non-standard + self.standard_fields = [ + x for x in self.visible_fields() if x.name not in ("returning_item",) + ] + + +@role_required("Area Director", "Secretariat") def edit_info(request, name): """Edit various Internet-Draft attributes, notifying parties as necessary and logging changes as document events.""" - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) if doc.get_state_slug() == "expired": raise Http404 new_document = False - if doc.get_state_slug("draft-iesg") == "idexists": # FIXME: should probably receive "new document" as argument to view instead of this + # FIXME: should probably receive "new document" as argument to view instead of this + if doc.get_state_slug("draft-iesg") == "idexists": new_document = True doc.notify = get_initial_notify(doc) @@ -649,34 +670,45 @@ def edit_info(request, name): initial_telechat_date = e.telechat_date if e else None initial_returning_item = bool(e and e.returning_item) - if request.method == 'POST': - form = EditInfoForm(request.POST, - initial=dict(ad=doc.ad_id, - telechat_date=initial_telechat_date)) + if request.method == "POST": + form = EditInfoForm( + request.POST, + initial=dict(ad=doc.ad_id, telechat_date=initial_telechat_date), + ) if form.is_valid(): by = request.user.person + pubreq_state = State.objects.get(type="draft-iesg", slug="pub-req") r = form.cleaned_data events = [] if new_document: - doc.set_state(r['create_in_state']) + doc.set_state(pubreq_state) # Is setting the WG state here too much of a hidden side-effect? - if r['create_in_state'].slug=='pub-req': - if doc.stream and doc.stream.slug=='ietf' and doc.group and doc.group.type_id == 'wg': - submitted_state = State.objects.get(type='draft-stream-ietf',slug='sub-pub') - doc.set_state(submitted_state) - e = DocEvent() - e.type = "changed_document" - e.by = by - e.doc = doc - e.rev = doc.rev - e.desc = "Working group state set to %s" % submitted_state.name - e.save() - events.append(e) + if ( + doc.stream + and doc.stream.slug == "ietf" + and doc.group + and doc.group.type_id == "wg" + ): + submitted_state = State.objects.get( + type="draft-stream-ietf", slug="sub-pub" + ) + doc.set_state(submitted_state) + e = DocEvent() + e.type = "changed_document" + e.by = by + e.doc = doc + e.rev = doc.rev + e.desc = "Working group state set to %s" % submitted_state.name + e.save() + events.append(e) - replaces = Document.objects.filter(docalias__relateddocument__source=doc, docalias__relateddocument__relationship="replaces") + replaces = Document.objects.filter( + targets_related__source=doc, + targets_related__relationship="replaces", + ) if replaces: # this should perhaps be somewhere else, e.g. the # place where the replace relationship is established? @@ -685,7 +717,10 @@ def edit_info(request, name): e.by = Person.objects.get(name="(System)") e.doc = doc e.rev = doc.rev - e.desc = "Earlier history may be found in the Comment Log for %s" % (replaces[0], replaces[0].get_absolute_url()) + e.desc = ( + 'Earlier history may be found in the Comment Log for %s' + % (replaces[0], replaces[0].get_absolute_url()) + ) e.save() events.append(e) @@ -694,7 +729,10 @@ def edit_info(request, name): e.by = by e.doc = doc e.rev = doc.rev - e.desc = "Document is now in IESG state %s" % doc.get_state("draft-iesg").name + e.desc = ( + "Document is now in IESG state %s" + % doc.get_state("draft-iesg").name + ) e.save() events.append(e) @@ -704,9 +742,9 @@ def desc(attr, new, old): entry = "%(attr)s changed to %(new)s from %(old)s" if new_document: entry = "%(attr)s changed to %(new)s" - + return entry % dict(attr=attr, new=new, old=old) - + def diff(attr, name): v = getattr(doc, attr) if r[attr] != v: @@ -714,9 +752,9 @@ def diff(attr, name): setattr(doc, attr, r[attr]) # update the attributes, keeping track of what we're doing - diff('intended_std_level', "Intended Status") - diff('ad', "Responsible AD") - diff('notify', "State Change Notice email list") + diff("intended_std_level", "Intended Status") + diff("ad", "Responsible AD") + diff("notify", "State Change Notice email list") if doc.group.type_id in ("individ", "area"): if not r["area"]: @@ -730,12 +768,16 @@ def diff(attr, name): doc.group = r["area"] for c in changes: - events.append(DocEvent.objects.create(doc=doc, rev=doc.rev, by=by, desc=c, type="changed_document")) + events.append( + DocEvent.objects.create( + doc=doc, rev=doc.rev, by=by, desc=c, type="changed_document" + ) + ) # Todo - chase this - e = update_telechat(request, doc, by, - r['telechat_date'], r['returning_item']) - + e = update_telechat( + request, doc, by, r["telechat_date"], r["returning_item"] + ) if e: events.append(e) @@ -743,45 +785,49 @@ def diff(attr, name): if new_document: # If we created a new doc, update the action holders as though it - # started in idexists and moved to its create_in_state. Do this + # started in idexists and moved to pub-req. Do this # after the doc has been updated so, e.g., doc.ad is set. update_action_holders( doc, - State.objects.get(type='draft-iesg', slug='idexists'), - r['create_in_state'] + State.objects.get(type="draft-iesg", slug="idexists"), + pubreq_state, ) if changes: email_iesg_processing_document(request, doc, changes) - + return HttpResponseRedirect(doc.get_absolute_url()) else: - init = dict(intended_std_level=doc.intended_std_level_id, - area=doc.group_id, - ad=doc.ad_id, - notify=doc.notify, - telechat_date=initial_telechat_date, - returning_item=initial_returning_item, - ) + init = dict( + intended_std_level=doc.intended_std_level_id, + area=doc.group_id, + ad=doc.ad_id, + notify=doc.notify, + telechat_date=initial_telechat_date, + returning_item=initial_returning_item, + ) form = EditInfoForm(initial=init) # optionally filter out some fields - if not new_document: - form.standard_fields = [x for x in form.standard_fields if x.name != "create_in_state"] if doc.group.type_id not in ("individ", "area"): form.standard_fields = [x for x in form.standard_fields if x.name != "area"] - return render(request, 'doc/draft/edit_info.html', - dict(doc=doc, - form=form, - user=request.user, - ballot_issued=doc.latest_event(type="sent_ballot_announcement"))) + return render( + request, + "doc/draft/edit_info.html", + dict( + doc=doc, + form=form, + user=request.user, + ballot_issued=doc.latest_event(type="sent_ballot_announcement"), + ), + ) @role_required('Area Director','Secretariat') def request_resurrect(request, name): """Request resurrect of expired Internet-Draft.""" - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) if doc.get_state_slug() != "expired": raise Http404 @@ -804,7 +850,7 @@ def request_resurrect(request, name): @role_required('Secretariat') def resurrect(request, name): """Resurrect expired Internet-Draft.""" - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) if doc.get_state_slug() != "expired": raise Http404 @@ -845,11 +891,19 @@ def restore_draft_file(request, draft): log.log("Resurrecting %s. Moving files:" % draft.name) for file in files: try: + # ghostlinkd would keep this in the combined all archive since it would + # be sourced from a different place. But when ghostlinkd is removed, nothing + # new is needed here - the file will already exist in the combined archive shutil.move(file, settings.INTERNET_DRAFT_PATH) log.log(" Moved file %s to %s" % (file, settings.INTERNET_DRAFT_PATH)) except shutil.Error as ex: messages.warning(request, 'There was an error restoring the Internet-Draft file: {} ({})'.format(file, ex)) log.log(" Exception %s when attempting to move %s" % (ex, file)) + _, ext = os.path.splitext(os.path.basename(file)) + if ext: + ext = ext[1:] + blobname = f"{ext}/{basename}.{ext}" + store_bytes("active-draft", blobname, retrieve_bytes("draft", blobname)) class ShepherdWriteupUploadForm(forms.Form): @@ -1510,11 +1564,13 @@ def adopt_draft(request, name): events.append(e) due_date = None - if form.cleaned_data["weeks"] != None: + if form.cleaned_data["weeks"] is not None: due_date = datetime_today(DEADLINE_TZINFO) + datetime.timedelta(weeks=form.cleaned_data["weeks"]) update_reminder(doc, "stream-s", e, due_date) + # The following call name is very misleading - the view allows + # setting states that are _not_ the adopted state. email_adopted(request, doc, prev_state, new_state, by, comment) # comment @@ -1612,7 +1668,7 @@ class ChangeStreamStateForm(forms.Form): new_state = forms.ModelChoiceField(queryset=State.objects.filter(used=True), label='State' ) weeks = forms.IntegerField(label='Expected weeks in state',required=False) comment = forms.CharField(widget=forms.Textarea, required=False, help_text="Optional comment for the document history.", strip=False) - tags = forms.ModelMultipleChoiceField(queryset=DocTagName.objects.filter(used=True), widget=forms.CheckboxSelectMultiple, required=False) + tags = ModelMultipleChoiceField(queryset=DocTagName.objects.filter(used=True), widget=forms.CheckboxSelectMultiple, required=False) def __init__(self, *args, **kwargs): doc = kwargs.pop("doc") @@ -1628,11 +1684,14 @@ def __init__(self, *args, **kwargs): f.queryset = f.queryset.exclude(pk__in=unused_states) f.label = state_type.label if self.stream.slug == 'ietf': + help_text_items = [] if self.can_set_sub_pub: - f.help_text = "Only select 'Submitted to IESG for Publication' to correct errors. Use the document's main page to request publication." + help_text_items.append("Only select 'Submitted to IESG for Publication' to correct errors. This is not how to submit a document to the IESG.") else: f.queryset = f.queryset.exclude(slug='sub-pub') - f.help_text = "You may not set the 'Submitted to IESG for Publication' using this form - Use the document's main page to request publication." + help_text_items.append("You may not set the 'Submitted to IESG for Publication' using this form - Use the button above or the document's main page to request publication.") + help_text_items.append("Only use this form in unusual circumstances when issuing call for adoption or working group last call.") + f.help_text = " ".join(help_text_items) f = self.fields['tags'] f.queryset = f.queryset.filter(slug__in=get_tags_for_stream_id(doc.stream_id)) @@ -1643,7 +1702,7 @@ def __init__(self, *args, **kwargs): def clean_new_state(self): new_state = self.cleaned_data.get('new_state') if new_state.slug=='sub-pub' and not self.can_set_sub_pub: - raise forms.ValidationError('You may not set the %s state using this form. Use the "Submit to IESG for publication" button on the document\'s main page instead. If that button does not appear, the document may already have IESG state. Ask your Area Director or the Secretariat for help.'%new_state.name) + raise forms.ValidationError('You may not set the %s state using this form. Use the "Submit to IESG for Publication" button on the document\'s main page instead. If that button does not appear, the document may already have IESG state. Ask your Area Director or the Secretariat for help.'%new_state.name) return new_state @@ -1669,6 +1728,19 @@ def next_states_for_stream_state(doc, state_type, current_state): return next_states +@login_required +def offer_wg_action_helpers(request, name): + doc = get_object_or_404(Document, type="draft", name=name) + if doc.stream is None or doc.stream_id != "ietf" or doc.became_rfc() is not None: + raise Http404 + + if not is_authorized_in_doc_stream(request.user, doc): + permission_denied(request, "You don't have permission to access this page.") + + return render(request, "doc/draft/wg_action_helpers.html", + {"doc": doc, + }) + @login_required def change_stream_state(request, name, state_type): doc = get_object_or_404(Document, type="draft", name=name) @@ -1683,10 +1755,17 @@ def change_stream_state(request, name, state_type): prev_state = doc.get_state(state_type.slug) next_states = next_states_for_stream_state(doc, state_type, prev_state) + # These tell the form to allow directly setting the state to fix up errors. can_set_sub_pub = has_role(request.user,('Secretariat','Area Director')) or (prev_state and prev_state.slug=='sub-pub') if request.method == 'POST': - form = ChangeStreamStateForm(request.POST, doc=doc, state_type=state_type,can_set_sub_pub=can_set_sub_pub,stream=doc.stream) + form = ChangeStreamStateForm( + request.POST, + doc=doc, + state_type=state_type, + can_set_sub_pub=can_set_sub_pub, + stream=doc.stream, + ) if form.is_valid(): by = request.user.person events = [] @@ -1701,13 +1780,13 @@ def change_stream_state(request, name, state_type): events.append(e) due_date = None - if form.cleaned_data["weeks"] != None: + if form.cleaned_data["weeks"] is not None: due_date = datetime_today(DEADLINE_TZINFO) + datetime.timedelta(weeks=form.cleaned_data["weeks"]) update_reminder(doc, "stream-s", e, due_date) email_stream_state_changed(request, doc, prev_state, new_state, by, comment) - + # tags existing_tags = set(doc.tags.all()) new_tags = set(form.cleaned_data["tags"]) @@ -1743,8 +1822,15 @@ def change_stream_state(request, name, state_type): else: form.add_error(None, "No change in state or tags found, and no comment provided -- nothing to do.") else: - form = ChangeStreamStateForm(initial=dict(new_state=prev_state.pk if prev_state else None, tags= doc.tags.all()), - doc=doc, state_type=state_type, can_set_sub_pub = can_set_sub_pub,stream = doc.stream) + form = ChangeStreamStateForm( + initial=dict( + new_state=prev_state.pk if prev_state else None, tags=doc.tags.all() + ), + doc=doc, + state_type=state_type, + can_set_sub_pub=can_set_sub_pub, + stream=doc.stream, + ) milestones = doc.groupmilestone_set.all() @@ -1789,3 +1875,325 @@ def set_intended_status_level(request, doc, new_level, old_level, comment): msg = "\n".join(e.desc for e in events) email_intended_status_changed(request, doc, msg) + +class IssueWorkingGroupLastCallForm(forms.Form): + end_date = DatepickerDateField( + required=True, + date_format="yyyy-mm-dd", + picker_settings={ + "autoclose": "1", + }, + help_text="The date the Last Call closes. If you change this, review the subject and body carefully to ensure the change is captured correctly.", + ) + + to = MultiEmailField( + required=True, + help_text="Comma separated list of address to use in the To: header", + ) + cc = MultiEmailField( + required=False, help_text="Comma separated list of addresses to copy" + ) + subject = forms.CharField( + required=True, + help_text="Subject for Last Call message. If you change the date here, be sure to make a matching change in the body.", + ) + body = forms.CharField( + widget=forms.Textarea, required=True, help_text="Body for Last Call message" + ) + + def clean_end_date(self): + end_date = self.cleaned_data["end_date"] + if end_date <= date_today(DEADLINE_TZINFO): + raise forms.ValidationError("End date must be later than today") + return end_date + + def clean(self): + cleaned_data = super().clean() + end_date = cleaned_data.get("end_date") + if end_date is not None: + body = cleaned_data.get("body") + subject = cleaned_data.get("subject") + if end_date.isoformat() not in body: + self.add_error( + "body", + forms.ValidationError( + f"Last call end date ({end_date.isoformat()}) not found in body" + ), + ) + if end_date.isoformat() not in subject: + self.add_error( + "subject", + forms.ValidationError( + f"Last call end date ({end_date.isoformat()}) not found in subject" + ), + ) + return cleaned_data + + +@login_required +def issue_wg_lc(request, name): + doc = get_object_or_404(Document, name=name) + + if doc.stream_id != "ietf": + raise Http404 + if doc.type_id != "draft" or doc.group.type_id != "wg": + raise Http404 + if doc.get_state_slug("draft-stream-ietf") == "wg-lc": + raise Http404 + if doc.get_state_slug("draft") == "rfc": + raise Http404 + + if not is_authorized_in_doc_stream(request.user, doc): + permission_denied(request, "You don't have permission to access this page.") + + if request.method == "POST": + form = IssueWorkingGroupLastCallForm(request.POST) + if form.is_valid(): + # Intentionally not changing tags or adding a comment + # those things can be done with other workflows + by = request.user.person + prev_state = doc.get_state("draft-stream-ietf") + events = [] + wglc_state = State.objects.get(type="draft-stream-ietf", slug="wg-lc") + doc.set_state(wglc_state) + e = add_state_change_event(doc, by, prev_state, wglc_state) + events.append(e) + end_date = form.cleaned_data["end_date"] + update_reminder( + doc, "stream-s", e, datetime_from_date(end_date, DEADLINE_TZINFO) + ) + doc.save_with_history(events) + email_stream_state_changed(request, doc, prev_state, wglc_state, by) + send_mail_text( + request, + to = form.cleaned_data["to"], + frm = request.user.person.formatted_email(), + subject = form.cleaned_data["subject"], + txt = form.cleaned_data["body"], + cc = form.cleaned_data["cc"], + ) + return redirect("ietf.doc.views_doc.document_main", name=doc.name) + else: + end_date = date_today(DEADLINE_TZINFO) + datetime.timedelta(days=14) + subject = f"WG Last Call: {doc.name}-{doc.rev} (Ends {end_date})" + body = render_to_string( + "doc/mail/wg_last_call_issued.txt", + dict( + doc=doc, + end_date=end_date, + wg_list=doc.group.list_email, + settings=settings, + ), + ) + (to, cc) = gather_address_lists("doc_wg_last_call_issued", doc=doc) + + form = IssueWorkingGroupLastCallForm( + initial=dict( + end_date=end_date, + to=", ".join(to), + cc=", ".join(cc), + subject=subject, + body=body, + ) + ) + + return render( + request, + "doc/draft/issue_working_group_last_call.html", + dict( + doc=doc, + form=form, + ), + ) + +class IssueCallForAdoptionForm(forms.Form): + end_date = DatepickerDateField( + required=True, + date_format="yyyy-mm-dd", + picker_settings={ + "autoclose": "1", + }, + help_text="The date the Call for Adoption closes. If you change this, review the subject and body carefully to ensure the change is captured correctly.", + ) + + to = MultiEmailField( + required=True, + help_text="Comma separated list of address to use in the To: header", + ) + cc = MultiEmailField( + required=False, help_text="Comma separated list of addresses to copy" + ) + subject = forms.CharField( + required=True, + help_text="Subject for Call for Adoption message. If you change the date here, be sure to make a matching change in the body.", + ) + body = forms.CharField( + widget=forms.Textarea, required=True, help_text="Body for Call for Adoption message" + ) + + def clean_end_date(self): + end_date = self.cleaned_data["end_date"] + if end_date <= date_today(DEADLINE_TZINFO): + raise forms.ValidationError("End date must be later than today") + return end_date + + def clean(self): + cleaned_data = super().clean() + end_date = cleaned_data.get("end_date") + if end_date is not None: + body = cleaned_data.get("body") + subject = cleaned_data.get("subject") + if end_date.isoformat() not in body: + self.add_error( + "body", + forms.ValidationError( + f"Call for adoption end date ({end_date.isoformat()}) not found in body" + ), + ) + if end_date.isoformat() not in subject: + self.add_error( + "subject", + forms.ValidationError( + f"Call for adoption end date ({end_date.isoformat()}) not found in subject" + ), + ) + return cleaned_data + +@login_required +def issue_wg_call_for_adoption(request, name, acronym): + doc = get_object_or_404(Document, name=name) + group = Group.objects.filter(acronym=acronym, type_id="wg").first() + reject = False + if group is None or doc.type_id != "draft" or not is_doc_ietf_adoptable(doc): + reject = True + if doc.stream is None: + if not can_adopt_draft(request.user, doc): + reject = True + elif doc.stream_id != "ietf": + reject = True + else: # doc.stream_id == "ietf" + if not is_authorized_in_doc_stream(request.user, doc): + reject = True + if reject: + raise permission_denied(request, f"You can't issue a {acronym} wg call for adoption for this document.") + + if request.method == "POST": + form = IssueCallForAdoptionForm(request.POST) + if form.is_valid(): + # Intentionally not changing tags or adding a comment + # those things can be done with other workflows + by = request.user.person + + events = [] + if doc.stream_id != "ietf": + stream = StreamName.objects.get(slug="ietf") + doc.stream = stream + e = DocEvent(type="changed_stream", doc=doc, rev=doc.rev, by=by) + e.desc = f"Changed stream to {stream.name}" # Propogates embedding html in DocEvent.desc for consistency + e.save() + events.append(e) + if doc.group != group: + doc.group = group + e = DocEvent(type="changed_group", doc=doc, rev=doc.rev, by=by) + e.desc = f"Changed group to {group.name} ({group.acronym.upper()})" # Even if it makes the cats cry + e.save() + events.append(e) + prev_state = doc.get_state("draft-stream-ietf") + c_adopt_state = State.objects.get(type="draft-stream-ietf", slug="c-adopt") + doc.set_state(c_adopt_state) + e = add_state_change_event(doc, by, prev_state, c_adopt_state) + events.append(e) + end_date = form.cleaned_data["end_date"] + update_reminder( + doc, "stream-s", e, datetime_from_date(end_date, DEADLINE_TZINFO) + ) + doc.save_with_history(events) + email_stream_state_changed(request, doc, prev_state, c_adopt_state, by) + send_mail_text( + request, + to = form.cleaned_data["to"], + frm = request.user.person.formatted_email(), + subject = form.cleaned_data["subject"], + txt = form.cleaned_data["body"], + cc = form.cleaned_data["cc"], + ) + return redirect("ietf.doc.views_doc.document_main", name=doc.name) + else: + end_date = date_today(DEADLINE_TZINFO) + datetime.timedelta(days=14) + subject = f"Call for adoption: {doc.name}-{doc.rev} (Ends {end_date})" + body = render_to_string( + "doc/mail/wg_call_for_adoption_issued.txt", + dict( + doc=doc, + group=group, + end_date=end_date, + wg_list=doc.group.list_email, + settings=settings, + ), + ) + (to, cc) = gather_address_lists("doc_wg_call_for_adoption_issued", doc=doc) + if doc.group.acronym == "none": + to.insert(0, f"{group.acronym}-chairs@ietf.org") + to.insert(0, group.list_email) + form = IssueCallForAdoptionForm( + initial=dict( + end_date=end_date, + to=", ".join(to), + cc=", ".join(cc), + subject=subject, + body=body, + ) + ) + + return render( + request, + "doc/draft/issue_working_group_call_for_adoption.html", + dict( + doc=doc, + form=form, + ), + ) + +class GroupModelChoiceField(forms.ModelChoiceField): + def label_from_instance(self, obj): + return f"{obj.acronym} - {obj.name}" + + +class WgForm(forms.Form): + group = GroupModelChoiceField( + queryset=Group.objects.filter(type_id="wg", state="active") + .order_by("acronym") + .distinct(), + required=True, + empty_label="Select IETF Working Group", + ) + + def __init__(self, *args, **kwargs): + user = kwargs.pop("user") + super(WgForm, self).__init__(*args, **kwargs) + if not has_role(user, ["Secretariat", "Area Director"]): + self.fields["group"].queryset = self.fields["group"].queryset.filter( + role__name_id="chair", role__person=user.person + ) + + +@role_required("Secretariat", "WG Chair") +def ask_about_ietf_adoption_call(request, name): + doc = get_object_or_404(Document, name=name) + if doc.stream is not None or doc.group.acronym != "none": + raise Http404 + if request.method == "POST": + form = WgForm(request.POST, user=request.user) + if form.is_valid(): + group = form.cleaned_data["group"] + return redirect(issue_wg_call_for_adoption, name=doc.name, acronym=group.acronym) + else: + form = WgForm(initial={"group": None}, user=request.user) + return render( + request, + "doc/draft/ask_about_ietf_adoption.html", + dict( + doc=doc, + form=form, + ), + ) diff --git a/ietf/doc/views_help.py b/ietf/doc/views_help.py index 73cdcdd20f..34d29aaccb 100644 --- a/ietf/doc/views_help.py +++ b/ietf/doc/views_help.py @@ -1,5 +1,7 @@ # Copyright The IETF Trust 2013-2023, All Rights Reserved +import debug # pyflakes: ignore + from django.shortcuts import render, get_object_or_404 from django.http import Http404 @@ -7,6 +9,18 @@ from ietf.name.models import DocRelationshipName, DocTagName from ietf.doc.utils import get_tags_for_stream_id +def state_index(request): + types = StateType.objects.all() + names = [ type.slug for type in types ] + for type in types: + if "-" in type.slug and type.slug.split('-',1)[0] in names: + type.stategroups = None + else: + groups = StateType.objects.filter(slug__startswith=type.slug) + type.stategroups = [ g.slug[len(type.slug)+1:] for g in groups if not g == type ] or "" + + return render(request, 'doc/state_index.html', {"types": types}) + def state_help(request, type=None): slug, title = { "draft-iesg": ("draft-iesg", "IESG States for Internet-Drafts"), @@ -18,12 +32,36 @@ def state_help(request, type=None): "draft-stream-irtf": ("draft-stream-irtf", "IRTF Stream States for Internet-Drafts"), "draft-stream-ise": ("draft-stream-ise", "ISE Stream States for Internet-Drafts"), "draft-stream-iab": ("draft-stream-iab", "IAB Stream States for Internet-Drafts"), + "draft-stream-editorial": ("draft-stream-editorial", "Editorial Stream States for Internet-Drafts"), "charter": ("charter", "Charter States"), "conflict-review": ("conflrev", "Conflict Review States"), "status-change": ("statchg", "RFC Status Change States"), "bofreq": ("bofreq", "BOF Request States"), "procmaterials": ("procmaterials", "Proceedings Materials States"), - "statement": {"statement", "Statement States"} + "statement": ("statement", "Statement States"), + "slides": ("slides", "Slides States"), + "minutes": ("minutes", "Minutes States"), + "liai-att": ("liai-att", "Liaison Attachment States"), + "recording": ("recording", "Recording States"), + "bluesheets": ("bluesheets", "Bluesheets States"), + "reuse_policy": ("reuse_policy", "Reuse Policy States"), + "review": ("review", "Review States"), + "liaison": ("liaison", "Liaison States"), + "shepwrit": ("shepwrit", "Shapherd Writeup States"), + "bofreq": ("bofreq", "BOF Request States"), + "procmaterials": ("procmaterials", "Proceedings Materials States"), + "chatlog": ("chatlog", "Chat Log States"), + "polls": ("polls", "Polls States"), + "statement": ("statement", "Statement States"), + "rfc": ("rfc", "RFC States"), + "bcp": ("bcp", "BCP States"), + "std": ("std", "STD States"), + "fyi": ("fyi", "FYI States"), + "narrativeminutes": ("narrativeminutes", "Narrative Minutes States"), + "draft": ("draft", "Draft States"), + "statchg": ("statchg", "Status Change States"), + "agenda": ("agenda", "Agenda States"), + "conflrev": ("conflrev", "Conflict Review States") }.get(type, (None, None)) state_type = get_object_or_404(StateType, slug=slug) diff --git a/ietf/doc/views_material.py b/ietf/doc/views_material.py index 21b93397a8..eefac0ca61 100644 --- a/ietf/doc/views_material.py +++ b/ietf/doc/views_material.py @@ -3,11 +3,12 @@ # views for managing group materials (slides, ...) -import io import os +from pathlib import Path import re from django import forms +from django.conf import settings from django.contrib.auth.decorators import login_required from django.http import Http404 from django.shortcuts import render, get_object_or_404, redirect @@ -16,14 +17,19 @@ import debug # pyflakes:ignore -from ietf.doc.models import Document, DocAlias, DocTypeName, DocEvent, State +from ietf.doc.models import Document, DocTypeName, DocEvent, State from ietf.doc.models import NewRevisionDocEvent from ietf.doc.utils import add_state_change_event, check_common_doc_name_rules from ietf.group.models import Group from ietf.group.utils import can_manage_materials +from ietf.meeting.utils import resolve_uploaded_material +from ietf.utils import log +from ietf.utils.decorators import ignore_view_kwargs +from ietf.utils.meetecho import MeetechoAPIError, SlidesManager from ietf.utils.response import permission_denied @login_required +@ignore_view_kwargs("group_type") def choose_material_type(request, acronym): group = get_object_or_404(Group, acronym=acronym) if not group.features.has_nonsession_materials: @@ -91,6 +97,7 @@ def clean_name(self): return name @login_required +@ignore_view_kwargs("group_type") def edit_material(request, name=None, acronym=None, action=None, doc_type=None): # the materials process is not very developed, so at the moment we # handle everything through the same view/form @@ -110,6 +117,8 @@ def edit_material(request, name=None, acronym=None, action=None, doc_type=None): valid_doctypes = ['procmaterials'] if group is not None: valid_doctypes.extend(['minutes','agenda','bluesheets']) + if group.acronym=="iesg": + valid_doctypes.append("narrativeminutes") valid_doctypes.extend(group.features.material_types) if document_type.slug not in valid_doctypes: @@ -118,6 +127,8 @@ def edit_material(request, name=None, acronym=None, action=None, doc_type=None): if not can_manage_materials(request.user, group): permission_denied(request, "You don't have permission to access this view") + sessions_with_slide_title_updates = set() + if request.method == 'POST': form = UploadMaterialForm(document_type, action, group, doc, request.POST, request.FILES) @@ -152,13 +163,26 @@ def edit_material(request, name=None, acronym=None, action=None, doc_type=None): f = form.cleaned_data["material"] file_ext = os.path.splitext(f.name)[1] - with io.open(os.path.join(doc.get_file_path(), doc.name + "-" + doc.rev + file_ext), 'wb+') as dest: + basename = f"{doc.name}-{doc.rev}{file_ext}" # Note the lack of a . before file_ext - see os.path.splitext + filepath = Path(doc.get_file_path()) / basename + with filepath.open('wb+') as dest: for chunk in f.chunks(): dest.write(chunk) - - if action == "new": - alias, __ = DocAlias.objects.get_or_create(name=doc.name) - alias.docs.add(doc) + f.seek(0) + doc.store_file(basename, f) + if not doc.meeting_related(): + log.assertion('doc.type_id == "slides"') + ftp_filepath = Path(settings.FTP_DIR) / doc.type_id / basename + try: + os.link(filepath, ftp_filepath) # Path.hardlink is not available until 3.10 + except IOError as ex: + log.log( + "There was an error creating a hardlink at %s pointing to %s: %s" + % (ftp_filepath, filepath, ex) + ) + else: + for meeting in set([s.meeting for s in doc.session_set.all()]): + resolve_uploaded_material(meeting=meeting, doc=doc) if prev_rev != doc.rev: e = NewRevisionDocEvent(type="new_revision", doc=doc, rev=doc.rev) @@ -174,6 +198,9 @@ def edit_material(request, name=None, acronym=None, action=None, doc_type=None): e.desc += " from %s" % prev_title e.save() events.append(e) + if doc.type_id == "slides": + for sp in doc.presentations.all(): + sessions_with_slide_title_updates.add(sp.session) if prev_abstract != doc.abstract: e = DocEvent(doc=doc, rev=doc.rev, by=request.user.person, type='changed_document') @@ -191,6 +218,16 @@ def edit_material(request, name=None, acronym=None, action=None, doc_type=None): if events: doc.save_with_history(events) + # Call Meetecho API if any session slides titles changed + if sessions_with_slide_title_updates and hasattr(settings, "MEETECHO_API_CONFIG"): + sm = SlidesManager(api_config=settings.MEETECHO_API_CONFIG) + for session in sessions_with_slide_title_updates: + try: + # SessionPresentations are unique over (session, document) so there will be no duplicates + sm.send_update(session) + except MeetechoAPIError as err: + log.log(f"Error in SlidesManager.send_update(): {err}") + return redirect("ietf.doc.views_doc.document_main", name=doc.name) else: form = UploadMaterialForm(document_type, action, group, doc) diff --git a/ietf/doc/views_review.py b/ietf/doc/views_review.py index e0e6cb05b7..1968b133ce 100644 --- a/ietf/doc/views_review.py +++ b/ietf/doc/views_review.py @@ -2,11 +2,11 @@ # -*- coding: utf-8 -*- -import io import itertools import json import os import datetime +from pathlib import Path import requests import email.utils @@ -28,7 +28,7 @@ from django.template.loader import render_to_string, TemplateDoesNotExist from django.urls import reverse as urlreverse -from ietf.doc.models import (Document, NewRevisionDocEvent, State, DocAlias, +from ietf.doc.models import (Document, NewRevisionDocEvent, State, LastCallDocEvent, ReviewRequestDocEvent, ReviewAssignmentDocEvent, DocumentAuthor) from ietf.name.models import (ReviewRequestStateName, ReviewAssignmentStateName, ReviewResultName, ReviewTypeName) @@ -52,7 +52,7 @@ from ietf.utils.textupload import get_cleaned_text_file_content from ietf.utils.mail import send_mail_message from ietf.mailtrigger.utils import gather_address_lists -from ietf.utils.fields import MultiEmailField +from ietf.utils.fields import ModelMultipleChoiceField, MultiEmailField from ietf.utils.http import is_ajax from ietf.utils.response import permission_denied from ietf.utils.timezone import date_today, DEADLINE_TZINFO @@ -68,7 +68,7 @@ def clean_doc_revision(doc, rev): return rev class RequestReviewForm(forms.ModelForm): - team = forms.ModelMultipleChoiceField(queryset=Group.objects.all(), widget=forms.CheckboxSelectMultiple) + team = ModelMultipleChoiceField(queryset=Group.objects.all(), widget=forms.CheckboxSelectMultiple) deadline = DatepickerDateField(date_format="yyyy-mm-dd", picker_settings={ "autoclose": "1", "start-date": "+0d" }) class Meta: @@ -117,7 +117,7 @@ def clean(self): @login_required def request_review(request, name): - doc = get_object_or_404(Document, name=name) + doc = get_object_or_404(Document, type_id="draft", name=name) if not can_request_review_of_doc(request.user, doc): permission_denied(request, "You do not have permission to perform this action") @@ -753,9 +753,7 @@ def complete_review(request, name, assignment_id=None, acronym=None): name=review_name, defaults={'type_id': 'review', 'group': team}, ) - if created: - DocAlias.objects.create(name=review_name).docs.add(review) - else: + if not created: messages.warning(request, message='Attempt to save review failed: review document already exists. This most likely occurred because the review was submitted twice in quick succession. If you intended to submit a new review, rather than update an existing one, things are probably OK. Please verify that the shown review is what you expected.') return redirect("ietf.doc.views_doc.document_main", name=review_name) @@ -805,9 +803,14 @@ def complete_review(request, name, assignment_id=None, acronym=None): else: content = form.cleaned_data['review_content'] - filename = os.path.join(review.get_file_path(), '{}.txt'.format(review.name)) - with io.open(filename, 'w', encoding='utf-8') as destination: - destination.write(content) + review_path = Path(review.get_file_path()) / f"{review.name}.txt" + review_path.write_text(content) + review.store_str(f"{review.name}.txt", content, allow_overwrite=True) # We have a bug that review revisions dont create a new version! + review_ftp_path = Path(settings.FTP_DIR) / "review" / review_path.name + # See https://github.com/ietf-tools/datatracker/issues/6941 - when that's + # addressed, making this link should not be conditional + if not review_ftp_path.exists(): + os.link(review_path, review_ftp_path) # switch this to Path.hardlink when python>=3.10 is available completion_datetime = timezone.now() if "completion_date" in form.cleaned_data: @@ -894,7 +897,7 @@ def complete_review(request, name, assignment_id=None, acronym=None): if need_to_email_review: # email the review - subject = "{} {} {} of {}-{}".format(assignment.review_request.team.acronym.capitalize(),assignment.review_request.type.name.lower(),"partial review" if assignment.state_id == "part-completed" else "review", assignment.review_request.doc.name, assignment.reviewed_rev) + subject = "{}-{} {} {} {}".format(assignment.review_request.doc.name, assignment.reviewed_rev, assignment.review_request.type.name.lower(), assignment.review_request.team.acronym.capitalize(), "partial review" if assignment.state_id == "part-completed" else "review") related_groups = [ assignment.review_request.team, ] if assignment.review_request.doc.group: related_groups.append(assignment.review_request.doc.group) @@ -955,14 +958,14 @@ def complete_review(request, name, assignment_id=None, acronym=None): form = CompleteReviewForm(assignment, doc, team, is_reviewer, initial=initial) - mail_archive_query_urls = mailarch.construct_query_urls(doc, team) + mail_archive_query_data = mailarch.construct_query_data(doc, team) return render(request, 'doc/review/complete_review.html', { 'doc': doc, 'team': team, 'assignment': assignment, 'form': form, - 'mail_archive_query_urls': mail_archive_query_urls, + 'mail_archive_query_data': mail_archive_query_data, 'revising_review': revising_review, 'review_to': to, 'review_cc': cc, @@ -984,27 +987,25 @@ def search_mail_archive(request, name, acronym=None, assignment_id=None): if not (is_reviewer or can_manage_request): permission_denied(request, "You do not have permission to perform this action") - res = mailarch.construct_query_urls(doc, team, query=request.GET.get("query")) - if not res: - return JsonResponse({ "error": "Couldn't do lookup in mail archive - don't know where to look"}) - - MAX_RESULTS = 30 + query_data = mailarch.construct_query_data(doc, team, query=request.GET.get("query")) + if not query_data: + return JsonResponse({"error": "Couldn't do lookup in mail archive - don't know where to look"}) try: - res["messages"] = mailarch.retrieve_messages(res["query_data_url"])[:MAX_RESULTS] - for message in res["messages"]: + query_data["messages"] = mailarch.retrieve_messages(query_data) + for message in query_data["messages"]: try: revision_guess = message["subject"].split(name)[1].split('-')[1] message["revision_guess"] = revision_guess if revision_guess.isnumeric() else None except IndexError: pass except KeyError as e: - res["error"] = "No results found (%s)" % str(e) + query_data["error"] = "No results found (%s)" % str(e) except Exception as e: - res["error"] = "Retrieval from mail archive failed: %s" % str(e) + query_data["error"] = "Retrieval from mail archive failed: %s" % str(e) # raise # useful when debugging - return JsonResponse(res) + return JsonResponse(query_data) class EditReviewRequestCommentForm(forms.ModelForm): comment = forms.CharField(widget=forms.Textarea, strip=False) @@ -1055,7 +1056,11 @@ def edit_deadline(request, name, request_id): if form.is_valid(): if form.cleaned_data['deadline'] != old_deadline: form.save() - subject = "Deadline changed: {} {} review of {}-{}".format(review_req.team.acronym.capitalize(),review_req.type.name.lower(), review_req.doc.name, review_req.requested_rev) + subject = f"Deadline changed: {review_req.team.acronym.capitalize()} {review_req.type.name.lower()} review of {review_req.doc.name}" + if review_req.requested_rev: + subject += f"-{review_req.requested_rev}" + descr = "Deadine changed from {} to {}".format(old_deadline, review_req.deadline) + update_change_reason(review_req, descr) msg = render_to_string("review/deadline_changed.txt", { "review_req": review_req, "old_deadline": old_deadline, @@ -1093,7 +1098,7 @@ def save(self): @login_required def review_wish_add(request, name): - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) if request.method == "POST": form = ReviewWishAddForm(request.user, doc, request.POST) @@ -1110,7 +1115,7 @@ def review_wish_add(request, name): @login_required def review_wishes_remove(request, name): - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) person = get_object_or_404(Person, user=request.user) if request.method == "POST": diff --git a/ietf/doc/views_search.py b/ietf/doc/views_search.py index 4e791aea76..4232d77f6c 100644 --- a/ietf/doc/views_search.py +++ b/ietf/doc/views_search.py @@ -1,4 +1,4 @@ -# Copyright The IETF Trust 2009-2022, All Rights Reserved +# Copyright The IETF Trust 2009-2023, All Rights Reserved # -*- coding: utf-8 -*- # # Some parts Copyright (C) 2009-2010 Nokia Corporation and/or its subsidiary(-ies). @@ -33,11 +33,15 @@ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - +import hashlib +import json import re import datetime +import copy +import operator from collections import defaultdict +from functools import reduce from django import forms from django.conf import settings @@ -47,27 +51,30 @@ from django.http import Http404, HttpResponseBadRequest, HttpResponse, HttpResponseRedirect, QueryDict from django.shortcuts import render from django.utils import timezone +from django.utils.html import strip_tags from django.utils.cache import _generate_cache_key # type: ignore - +from django.utils.text import slugify import debug # pyflakes:ignore -from ietf.doc.models import ( Document, DocHistory, DocAlias, State, - LastCallDocEvent, NewRevisionDocEvent, IESG_SUBSTATE_TAGS, +from ietf.doc.models import ( Document, DocHistory, State, + NewRevisionDocEvent, IESG_SUBSTATE_TAGS, IESG_BALLOT_ACTIVE_STATES, IESG_STATCHG_CONFLREV_ACTIVE_STATES, IESG_CHARTER_ACTIVE_STATES ) from ietf.doc.fields import select2_id_doc_name_json -from ietf.doc.utils import get_search_cache_key, augment_events_with_revision, needed_ballot_positions +from ietf.doc.utils import augment_events_with_revision, needed_ballot_positions from ietf.group.models import Group from ietf.idindex.index import active_drafts_index_by_group from ietf.name.models import DocTagName, DocTypeName, StreamName from ietf.person.models import Person from ietf.person.utils import get_active_ads from ietf.utils.draft_search import normalize_draftname +from ietf.utils.fields import ModelMultipleChoiceField from ietf.utils.log import log -from ietf.doc.utils_search import prepare_document_table - +from ietf.doc.utils_search import prepare_document_table, doc_type, doc_state, doc_type_name, AD_WORKLOAD +from ietf.ietfauth.utils import has_role +from ietf.utils.unicodenormalize import normalize_for_sorting class SearchForm(forms.Form): name = forms.CharField(required=False) @@ -95,7 +102,7 @@ class SearchForm(forms.Form): ("ad", "AD"), ("-ad", "AD (desc)"), ), required=False, widget=forms.HiddenInput) - doctypes = forms.ModelMultipleChoiceField(queryset=DocTypeName.objects.filter(used=True).exclude(slug__in=('draft','liai-att')).order_by('name'), required=False) + doctypes = ModelMultipleChoiceField(queryset=DocTypeName.objects.filter(used=True).exclude(slug__in=('draft', 'rfc', 'bcp', 'std', 'fyi', 'liai-att')).order_by('name'), required=False) def __init__(self, *args, **kwargs): super(SearchForm, self).__init__(*args, **kwargs) @@ -154,8 +161,11 @@ def retrieve_search_results(form, all_types=False): else: types = [] - if query['activedrafts'] or query['olddrafts'] or query['rfcs']: + if query['activedrafts'] or query['olddrafts']: types.append('draft') + + if query['rfcs']: + types.append('rfc') types.extend(query["doctypes"]) @@ -166,33 +176,76 @@ def retrieve_search_results(form, all_types=False): # name if query["name"]: - docs = docs.filter(Q(docalias__name__icontains=query["name"]) | - Q(title__icontains=query["name"])).distinct() + look_for = query["name"] + queries = [ + Q(name__icontains=look_for), + Q(title__icontains=look_for) + ] + # Check to see if this is just a search for an rfc look for a few variants + if look_for.lower()[:3] == "rfc" and look_for[3:].strip().isdigit(): + spaceless = look_for.lower()[:3]+look_for[3:].strip() + if spaceless != look_for: + queries.extend([ + Q(name__icontains=spaceless), + Q(title__icontains=spaceless) + ]) + singlespace = look_for.lower()[:3]+" "+look_for[3:].strip() + if singlespace != look_for: + queries.extend([ + Q(name__icontains=singlespace), + Q(title__icontains=singlespace) + ]) + + # Do a similar thing if the search is just for a subseries doc, like a bcp. + if look_for.lower()[:3] in ["bcp", "fyi", "std"] and look_for[3:].strip().isdigit() and query["rfcs"]: # Also look for rfcs contained in the subseries. + queries.extend([ + Q(targets_related__source__name__icontains=look_for, targets_related__relationship_id="contains"), + Q(targets_related__source__title__icontains=look_for, targets_related__relationship_id="contains"), + ]) + spaceless = look_for.lower()[:3]+look_for[3:].strip() + if spaceless != look_for: + queries.extend([ + Q(targets_related__source__name__icontains=spaceless, targets_related__relationship_id="contains"), + Q(targets_related__source__title__icontains=spaceless, targets_related__relationship_id="contains"), + ]) + singlespace = look_for.lower()[:3]+" "+look_for[3:].strip() + if singlespace != look_for: + queries.extend([ + Q(targets_related__source__name__icontains=singlespace, targets_related__relationship_id="contains"), + Q(targets_related__source__title__icontains=singlespace, targets_related__relationship_id="contains"), + ]) + + if query["rfcs"]: + queries.extend([Q(targets_related__source__name__icontains=look_for, targets_related__relationship_id="became_rfc")]) + + combined_query = reduce(operator.or_, queries) + docs = docs.filter(combined_query) # rfc/active/old check buttons allowed_draft_states = [] - if query["rfcs"]: - allowed_draft_states.append("rfc") if query["activedrafts"]: allowed_draft_states.append("active") if query["olddrafts"]: allowed_draft_states.extend(['repl', 'expired', 'auth-rm', 'ietf-rm']) docs = docs.filter(Q(states__slug__in=allowed_draft_states) | - ~Q(type__slug='draft')).distinct() + ~Q(type__slug='draft')) # radio choices by = query["by"] if by == "author": docs = docs.filter( Q(documentauthor__person__alias__name__icontains=query["author"]) | - Q(documentauthor__person__email__address__icontains=query["author"]) + Q(documentauthor__person__email__address__icontains=query["author"]) | + Q(rfcauthor__person__alias__name__icontains=query["author"]) | + Q(rfcauthor__person__email__address__icontains=query["author"]) | + Q(rfcauthor__titlepage_name__icontains=query["author"]) ) elif by == "group": docs = docs.filter(group__acronym__iexact=query["group"]) elif by == "area": docs = docs.filter(Q(group__type="wg", group__parent=query["area"]) | - Q(group=query["area"])).distinct() + Q(group=query["area"])) elif by == "ad": docs = docs.filter(ad=query["ad"]) elif by == "state": @@ -205,9 +258,18 @@ def retrieve_search_results(form, all_types=False): elif by == "stream": docs = docs.filter(stream=query["stream"]) + docs=docs.distinct() + return docs + def search(request): + def _get_cache_key(params): + fields = set(SearchForm.base_fields) - {'sort'} + kwargs = dict([(k, v) for (k, v) in list(params.items()) if k in fields]) + key = "doc:document:search:" + hashlib.sha512(json.dumps(kwargs, sort_keys=True).encode('utf-8')).hexdigest() + return key + if request.GET: # backwards compatibility get_params = request.GET.copy() @@ -222,7 +284,7 @@ def search(request): if not form.is_valid(): return HttpResponseBadRequest("form not valid: %s" % form.errors) - cache_key = get_search_cache_key(get_params) + cache_key = _get_cache_key(get_params) cached_val = cache.get(cache_key) if cached_val: [results, meta] = cached_val @@ -248,17 +310,17 @@ def frontpage(request): def search_for_name(request, name): def find_unique(n): - exact = DocAlias.objects.filter(name__iexact=n).first() + exact = Document.objects.filter(name__iexact=n).first() if exact: return exact.name - aliases = DocAlias.objects.filter(name__istartswith=n)[:2] - if len(aliases) == 1: - return aliases[0].name + startswith = Document.objects.filter(name__istartswith=n)[:2] + if len(startswith) == 1: + return startswith[0].name - aliases = DocAlias.objects.filter(name__icontains=n)[:2] - if len(aliases) == 1: - return aliases[0].name + contains = Document.objects.filter(name__icontains=n)[:2] + if len(contains) == 1: + return contains[0].name return None @@ -291,13 +353,13 @@ def cached_redirect(cache_key, url): if redirect_to: rev = rev_split.group(2) # check if we can redirect directly to the rev if it's draft, if rfc - always redirect to main page - if not redirect_to.startswith('rfc') and DocHistory.objects.filter(doc__docalias__name=redirect_to, rev=rev).exists(): + if not redirect_to.startswith('rfc') and DocHistory.objects.filter(doc__name=redirect_to, rev=rev).exists(): return cached_redirect(cache_key, urlreverse("ietf.doc.views_doc.document_main", kwargs={ "name": redirect_to, "rev": rev })) else: return cached_redirect(cache_key, urlreverse("ietf.doc.views_doc.document_main", kwargs={ "name": redirect_to })) # build appropriate flags based on string prefix - doctypenames = DocTypeName.objects.filter(used=True) + doctypenames = DocTypeName.objects.filter(used=True).exclude(slug__in=["bcp","std","fyi"]) # This would have been more straightforward if document prefixes couldn't # contain a dash. Probably, document prefixes shouldn't contain a dash ... search_args = "?name=%s" % n @@ -313,76 +375,55 @@ def cached_redirect(cache_key, url): return cached_redirect(cache_key, urlreverse('ietf.doc.views_search.search') + search_args) -def ad_dashboard_group_type(doc): - # Return group type for document for dashboard. - # If doc is not defined return list of all possible - # group types - if not doc: - return ('I-D', 'RFC', 'Conflict Review', 'Status Change', 'Charter') - if doc.type.slug=='draft': - if doc.get_state_slug('draft') == 'rfc': - return 'RFC' - elif doc.get_state_slug('draft') == 'active' and doc.get_state_slug('draft-iesg') and doc.get_state('draft-iesg').name =='RFC Ed Queue': - return 'RFC' - elif doc.get_state_slug('draft') == 'active' and doc.get_state_slug('draft-iesg') and doc.get_state('draft-iesg').name in ('Dead', 'I-D Exists', 'AD is watching'): - return None - elif doc.get_state('draft').name in ('Expired', 'Replaced'): - return None - else: - return 'I-D' - elif doc.type.slug=='conflrev': - return 'Conflict Review' - elif doc.type.slug=='statchg': - return 'Status Change' - elif doc.type.slug=='charter': - return "Charter" - else: - return "Document" -def ad_dashboard_group(doc): - - if doc.type.slug=='draft': - if doc.get_state_slug('draft') == 'rfc': - return 'RFC' - elif doc.get_state_slug('draft') == 'active' and doc.get_state_slug('draft-iesg'): - return '%s Internet-Draft' % doc.get_state('draft-iesg').name - else: - return '%s Internet-Draft' % doc.get_state('draft').name - elif doc.type.slug=='conflrev': - if doc.get_state_slug('conflrev') in ('appr-reqnopub-sent','appr-noprob-sent'): - return 'Approved Conflict Review' - elif doc.get_state_slug('conflrev') in ('appr-reqnopub-pend','appr-noprob-pend','appr-reqnopub-pr','appr-noprob-pr'): - return "%s Conflict Review" % State.objects.get(type__slug='draft-iesg',slug='approved') - else: - return '%s Conflict Review' % doc.get_state('conflrev') - elif doc.type.slug=='statchg': - if doc.get_state_slug('statchg') in ('appr-sent',): - return 'Approved Status Change' - if doc.get_state_slug('statchg') in ('appr-pend','appr-pr'): - return '%s Status Change' % State.objects.get(type__slug='draft-iesg',slug='approved') - else: - return '%s Status Change' % doc.get_state('statchg') - elif doc.type.slug=='charter': - if doc.get_state_slug('charter') == 'approved': - return "Approved Charter" +def get_state_name_calculator(): + """Get a function to calculate state names + + Queries the database once when called, then uses cached look-up table for name calculations. + """ + # state_lut always has at least rfc, draft, and draft-iesg keys + state_lut = defaultdict(dict, **{"rfc": {}, "draft":{}, "draft-iesg": {}}) + for state in State.objects.filter(used=True): + state_lut[state.type_id][state.slug] = state.name + state_lut = dict(state_lut) # convert to dict to freeze key changes + + def _get_state_name(doc_type, state_slug): + """Get display name for a doc type / state slug + + Note doc_type rfc here is _not_ necessarily Document.type - for some callers + it is a type derived from draft... The ad_workload view needs more rework so that + the code isn't having to shadow-box so much. + """ + if doc_type == "rfc": + if state_slug == "rfc": + return "RFC" + elif state_slug in state_lut["rfc"]: + return state_lut["rfc"][state_slug] + else: + return state_lut["draft"].get( + state_slug, + state_lut["draft-iesg"][state_slug], + ) + elif doc_type == "draft" and state_slug not in ["rfc", "expired"]: + return state_lut["draft"].get( + state_slug, + state_lut["draft-iesg"][state_slug], + ) + elif doc_type == "draft" and state_slug == "rfc": + return "RFC" + elif doc_type == "conflrev" and state_slug.startswith("appr"): + return "Approved" else: - return '%s Charter' % doc.get_state('charter') - else: - return "Document" + return state_lut[doc_type][state_slug] + # return the function as a closure + return _get_state_name -def shorten_group_name(name): - for s in [ - " Internet-Draft", - " Conflict Review", - " Status Change", - " (Internal Steering Group/IAB Review) Charter", - "Charter", - ]: - if name.endswith(s): - name = name[: -len(s)] +def shorten_state_name(name): + """Get abbreviated display name for a state""" for pat, sub in [ + (r" \(Internal Steering Group/IAB Review\)", ""), ("Writeup", "Write-up"), ("Requested", "Req"), ("Evaluation", "Eval"), @@ -390,82 +431,47 @@ def shorten_group_name(name): ("Waiting", "Wait"), ("Go-Ahead", "OK"), ("Approved-", "App, "), + ("Approved No Problem", "App."), ("announcement", "ann."), ("IESG Eval - ", ""), ("Not currently under review", "Not under review"), ("External Review", "Ext. Review"), - (r"IESG Review \(Charter for Approval, Selected by Secretariat\)", "IESG Review"), + ( + r"IESG Review \(Charter for Approval, Selected by Secretariat\)", + "IESG Review", + ), ("Needs Shepherd", "Needs Shep."), ("Approved", "App."), ("Replaced", "Repl."), ("Withdrawn", "Withd."), ("Chartering/Rechartering", "Charter"), - (r"\(Message to Community, Selected by Secretariat\)", "") + (r"\(Message to Community, Selected by Secretariat\)", ""), ]: name = re.sub(pat, sub, name) - return name.strip() -def ad_dashboard_sort_key(doc): - - if doc.type.slug=='draft' and doc.get_state_slug('draft') == 'rfc': - return "21%04d" % int(doc.rfc_number()) - if doc.type.slug=='statchg' and doc.get_state_slug('statchg') == 'appr-sent': - return "22%d" % 0 # TODO - get the date of the transition into this state here - if doc.type.slug=='conflrev' and doc.get_state_slug('conflrev') in ('appr-reqnopub-sent','appr-noprob-sent'): - return "23%d" % 0 # TODO - get the date of the transition into this state here - if doc.type.slug=='charter' and doc.get_state_slug('charter') == 'approved': - return "24%d" % 0 # TODO - get the date of the transition into this state here - - seed = ad_dashboard_group(doc) - - if doc.type.slug=='conflrev' and doc.get_state_slug('conflrev') == 'adrev': - state = State.objects.get(type__slug='draft-iesg',slug='ad-eval') - return "1%d%s" % (state.order,seed) - - if doc.type.slug=='charter' and doc.get_state_slug('charter') != 'replaced': - if doc.get_state_slug('charter') in ('notrev','infrev'): - return "100%s" % seed - elif doc.get_state_slug('charter') == 'intrev': - state = State.objects.get(type__slug='draft-iesg',slug='ad-eval') - return "1%d%s" % (state.order,seed) - elif doc.get_state_slug('charter') == 'extrev': - state = State.objects.get(type__slug='draft-iesg',slug='lc') - return "1%d%s" % (state.order,seed) - elif doc.get_state_slug('charter') == 'iesgrev': - state = State.objects.get(type__slug='draft-iesg',slug='iesg-eva') - return "1%d%s" % (state.order,seed) - - if doc.type.slug=='statchg' and doc.get_state_slug('statchg') == 'adrev': - state = State.objects.get(type__slug='draft-iesg',slug='ad-eval') - return "1%d%s" % (state.order,seed) - - if seed.startswith('Needs Shepherd'): - return "100%s" % seed - if seed.endswith(' Document'): - seed = seed[:-9] - elif seed.endswith(' Internet-Draft'): - seed = seed[:-15] - elif seed.endswith(' Conflict Review'): - seed = seed[:-16] - elif seed.endswith(' Status Change'): - seed = seed[:-14] - state = State.objects.filter(type__slug='draft-iesg',name=seed) - if state: - ageseconds = 0 - changetime= doc.latest_event(type='changed_document') - if changetime: - ad = (timezone.now()-doc.latest_event(type='changed_document').time) - ageseconds = (ad.microseconds + (ad.seconds + ad.days * 24 * 3600) * 10**6) / 10**6 - return "1%d%s%s%010d" % (state[0].order,seed,doc.type.slug,ageseconds) - - return "3%s" % seed +def date_to_bucket(date, now, num_buckets): + return num_buckets - int((now.date() - date.date()).total_seconds() / 60 / 60 / 24) def ad_workload(request): - delta = datetime.timedelta(days=120) - right_now = timezone.now() + _calculate_state_name = get_state_name_calculator() + IESG_STATES = State.objects.filter(type="draft-iesg").values_list("name", flat=True) + STATE_SLUGS = { + dt: {_calculate_state_name(dt, ds): ds for ds in AD_WORKLOAD[dt]} # type: ignore + for dt in AD_WORKLOAD.keys() + } + + def _state_to_doc_type(state): + for dt in STATE_SLUGS: + if state in STATE_SLUGS[dt]: + return dt + return None + + # number of days (= buckets) to show in the graphs + days = 120 if has_role(request.user, ["Area Director", "Secretariat"]) else 1 + now = timezone.now() ads = [] responsible = Document.objects.values_list("ad", flat=True).distinct() @@ -479,211 +485,174 @@ def ad_workload(request): ).distinct(): if p in get_active_ads(): ads.append(p) + ads.sort(key=lambda p: normalize_for_sorting(p.plain_name())) - doctypes = list( - DocTypeName.objects.filter(used=True) - .exclude(slug__in=("draft", "liai-att")) - .values_list("pk", flat=True) - ) - - up_is_good = {} - group_types = ad_dashboard_group_type(None) - groups = {g: {} for g in group_types} - group_names = {g: [] for g in group_types} - - # Prefill groups in preferred sort order - # FIXME: This should really use the database states instead of replicating the logic - for id, (g, uig) in enumerate( - [ - ("Publication Requested Internet-Draft", False), - ("AD Evaluation Internet-Draft", False), - ("Last Call Requested Internet-Draft", True), - ("In Last Call Internet-Draft", True), - ("Waiting for Writeup Internet-Draft", False), - ("IESG Evaluation - Defer Internet-Draft", False), - ("IESG Evaluation Internet-Draft", True), - ("Waiting for AD Go-Ahead Internet-Draft", False), - ("Approved-announcement to be sent Internet-Draft", True), - ("Approved-announcement sent Internet-Draft", True), - ] - ): - groups["I-D"][g] = id - group_names["I-D"].append(g) - up_is_good[g] = uig - - for id, g in enumerate(["RFC Ed Queue Internet-Draft", "RFC"]): - groups["RFC"][g] = id - group_names["RFC"].append(g) - up_is_good[g] = True - - for id, (g, uig) in enumerate( - [ - ("AD Review Conflict Review", False), - ("Needs Shepherd Conflict Review", False), - ("IESG Evaluation Conflict Review", True), - ("Approved Conflict Review", True), - ("Withdrawn Conflict Review", None), - ] - ): - groups["Conflict Review"][g] = id - group_names["Conflict Review"].append(g) - up_is_good[g] = uig - - for id, (g, uig) in enumerate( - [ - ("Publication Requested Status Change", False), - ("AD Evaluation Status Change", False), - ("Last Call Requested Status Change", True), - ("In Last Call Status Change", True), - ("Waiting for Writeup Status Change", False), - ("IESG Evaluation Status Change", True), - ("Waiting for AD Go-Ahead Status Change", False), - ] - ): - groups["Status Change"][g] = id - group_names["Status Change"].append(g) - up_is_good[g] = uig - - for id, (g, uig) in enumerate( - [ - ("Not currently under review Charter", None), - ("Draft Charter Charter", None), - ("Start Chartering/Rechartering (Internal Steering Group/IAB Review) Charter", False), - ("External Review (Message to Community, Selected by Secretariat) Charter", True), - ("IESG Review (Charter for Approval, Selected by Secretariat) Charter", True), - ("Approved Charter", True), - ("Replaced Charter", None), - ] - ): - groups["Charter"][g] = id - group_names["Charter"].append(g) - up_is_good[g] = uig + bucket_template = { + dt: {state: [[] for _ in range(days)] for state in STATE_SLUGS[dt].values()} + for dt in STATE_SLUGS + } + sums = copy.deepcopy(bucket_template) for ad in ads: - form = SearchForm( - { - "by": "ad", - "ad": ad.id, - "rfcs": "on", - "activedrafts": "on", - "olddrafts": "on", - "doctypes": doctypes, - } - ) - ad.dashboard = urlreverse( "ietf.doc.views_search.docs_for_ad", kwargs=dict(name=ad.full_name_as_key()) ) - ad.counts = defaultdict(list) - ad.prev = defaultdict(list) - ad.doc_now = defaultdict(list) - ad.doc_prev = defaultdict(list) - - for doc in retrieve_search_results(form): - group_type = ad_dashboard_group_type(doc) - if group_type and group_type in groups: - # Right now, anything with group_type "Document", such as a bofreq is not handled. - group = ad_dashboard_group(doc) - if group not in groups[group_type]: - groups[group_type][group] = len(groups[group_type]) - group_names[group_type].append(group) - - inc = len(groups[group_type]) - len(ad.counts[group_type]) - if inc > 0: - ad.counts[group_type].extend([0] * inc) - ad.prev[group_type].extend([0] * inc) - ad.doc_now[group_type].extend(set() for _ in range(inc)) - ad.doc_prev[group_type].extend(set() for _ in range(inc)) - - ad.counts[group_type][groups[group_type][group]] += 1 - ad.doc_now[group_type][groups[group_type][group]].add(doc) - - last_state_event = ( - doc.docevent_set.filter( - Q(type="started_iesg_process") | Q(type="changed_state") - ) - .order_by("-time") - .first() - ) - if (last_state_event is not None) and (right_now - last_state_event.time) > delta: - ad.prev[group_type][groups[group_type][group]] += 1 - ad.doc_prev[group_type][groups[group_type][group]].add(doc) + ad.buckets = copy.deepcopy(bucket_template) + + # https://github.com/ietf-tools/datatracker/issues/4577 + docs_via_group_ad = Document.objects.exclude( + group__acronym="none" + ).filter( + group__role__name="ad", + group__role__person=ad + ).filter( + states__type="draft-stream-ietf", + states__slug__in=["wg-doc","wg-lc","waiting-for-implementation","chair-w","writeupw"] + ) - for ad in ads: - ad.doc_diff = defaultdict(list) - for gt in group_types: - inc = len(groups[gt]) - len(ad.counts[gt]) - if inc > 0: - ad.counts[gt].extend([0] * inc) - ad.prev[gt].extend([0] * inc) - ad.doc_now[gt].extend([set()] * inc) - ad.doc_prev[gt].extend([set()] * inc) - - ad.doc_diff[gt].extend([set()] * len(groups[gt])) - for idx, g in enumerate(group_names[gt]): - ad.doc_diff[gt][idx] = ad.doc_prev[gt][idx] ^ ad.doc_now[gt][idx] - - # Shorten the names of groups - for gt in group_types: - for idx, g in enumerate(group_names[gt]): - group_names[gt][idx] = ( - shorten_group_name(g), - g, - up_is_good[g] if g in up_is_good else None, - ) + doc_for_ad = Document.objects.filter(ad=ad) - workload = [ - dict( - group_type=gt, - group_names=group_names[gt], - counts=[ - ( - ad, - [ - ( - group_names[gt][index], - ad.counts[gt][index], - ad.prev[gt][index], - ad.doc_diff[gt][index], - ) - for index in range(len(group_names[gt])) - ], - ) - for ad in ads - ], - sums=[ - ( - group_names[gt][index], - sum([ad.counts[gt][index] for ad in ads]), - sum([ad.prev[gt][index] for ad in ads]), - ) - for index in range(len(group_names[gt])) + ad.pre_pubreq = (docs_via_group_ad | doc_for_ad).filter( + type="draft" + ).filter( + states__type="draft", + states__slug="active" + ).filter( + states__type="draft-iesg", + states__slug="idexists" + ).distinct().count() + + for doc in Document.objects.exclude(type_id="rfc").filter(ad=ad): + dt = doc_type(doc) + state = doc_state(doc) + + state_events = doc.docevent_set.filter( + type__in=["started_iesg_process", "changed_state", "closed_ballot"] + ) + if doc.became_rfc(): + state_events = state_events | doc.became_rfc().docevent_set.filter(type="published_rfc") + state_events = state_events.order_by("-time") + + # compute state history for drafts + last = now + for e in state_events: + to_state = None + if dt == "charter": + if e.type == "closed_ballot": + to_state = _calculate_state_name(dt, state) + elif e.desc.endswith("has been replaced"): + # stop tracking + last = e.time + break + + if not to_state: + # get the state name this event changed the doc into + match = re.search( + r"(RFC) published|[Ss]tate changed to (.*?)(?:::.*)? from (.*?)(?=::|$)", + strip_tags(e.desc), + flags=re.MULTILINE, + ) + if not match: + # some irrelevant state change for the AD dashboard, ignore it + continue + to_state = match.group(1) or match.group(2) + + # fix up some states that have been renamed + if dt == "conflrev" and to_state.startswith("Approved"): + to_state = "Approved" + elif dt == "charter" and to_state.startswith( + "Start Chartering/Rechartering" + ): + to_state = "Start Chartering/Rechartering (Internal Steering Group/IAB Review)" + elif to_state == "RFC Published": + to_state = "RFC" + + if dt == "rfc": + new_dt = _state_to_doc_type(to_state) + if new_dt is not None and new_dt != dt: + dt = new_dt + + if to_state not in STATE_SLUGS[dt].keys() or to_state == "Replaced": + # change into a state the AD dashboard doesn't display + if to_state in IESG_STATES or to_state == "Replaced": + # if it's an IESG state we don't display, record it's time + last = e.time + # keep going with next event + continue + + sn = STATE_SLUGS[dt][to_state] + buckets_start = date_to_bucket(e.time, now, days) + buckets_end = date_to_bucket(last, now, days) + + if dt == "charter" and to_state == "Approved" and buckets_start < 0: + # don't count old charter approvals + break + + if buckets_start <= 0: + if buckets_end >= 0: + for b in range(0, buckets_end): + ad.buckets[dt][sn][b].append(doc.name) + sums[dt][sn][b].append(doc.name) + last = e.time + break + + # record doc state in the indicated buckets + for b in range(buckets_start, buckets_end): + ad.buckets[dt][sn][b].append(doc.name) + sums[dt][sn][b].append(doc.name) + last = e.time + + metadata = [ + { + "type": (dt, doc_type_name(dt)), + "states": [ + (state, shorten_state_name(_calculate_state_name(dt, state))) for state in ad.buckets[dt] ], - ) - for gt in group_types + "ads": ads, + } + for dt in AD_WORKLOAD ] - return render(request, "doc/ad_list.html", {"workload": workload, "delta": delta}) + data = { + dt: {slugify(ad): ad.buckets[dt] for ad in ads} | {"sum": sums[dt]} + for dt in AD_WORKLOAD + } + + return render( + request, + "doc/ad_list.html", + {"metadata": metadata, "data": data, "delta": days}, + ) + def docs_for_ad(request, name): + def sort_key(doc): + dt = doc_type(doc) + dt_key = list(AD_WORKLOAD.keys()).index(dt) + ds = doc_state(doc) + ds_key = AD_WORKLOAD[dt].index(ds) if ds in AD_WORKLOAD[dt] else 99 + return dt_key * 100 + ds_key + ad = None - responsible = Document.objects.values_list('ad', flat=True).distinct() - for p in Person.objects.filter(Q(role__name__in=("pre-ad", "ad"), - role__group__type="area", - role__group__state="active") - | Q(pk__in=responsible)).distinct(): + responsible = Document.objects.values_list("ad", flat=True).distinct() + for p in Person.objects.filter( + Q( + role__name__in=("pre-ad", "ad"), + role__group__type="area", + role__group__state="active", + ) + | Q(pk__in=responsible) + ).distinct(): if name == p.full_name_as_key(): ad = p break if not ad: raise Http404 - form = SearchForm({'by':'ad','ad': ad.id, - 'rfcs':'on', 'activedrafts':'on', 'olddrafts':'on', - 'sort': 'status', - 'doctypes': list(DocTypeName.objects.filter(used=True).exclude(slug__in=('draft','liai-att')).values_list("pk", flat=True))}) - results, meta = prepare_document_table(request, retrieve_search_results(form), form.data, max_results=500) - results.sort(key=ad_dashboard_sort_key) - del meta["headers"][-1] + + results, meta = prepare_document_table( + request, Document.objects.filter(ad=ad), max_results=500, show_ad_and_shepherd=False + ) + results.sort(key=lambda d: sort_key(d)) # filter out some results results = [ @@ -701,33 +670,45 @@ def docs_for_ad(request, name): and ( r.get_state_slug("draft-iesg") == "dead" or r.get_state_slug("draft") == "repl" + or r.get_state_slug("draft") == "rfc" + or (r.get_state_slug("draft") == "expired" and r.get_state_slug("draft-iesg") == "idexists") ) ) ] + _calculate_state_name = get_state_name_calculator() for d in results: - d.search_heading = ad_dashboard_group(d) + dt = d.type.slug + d.search_heading = _calculate_state_name(dt, doc_state(d)) + if d.search_heading != "RFC": + d.search_heading += f" {doc_type_name(dt)}" # Additional content showing docs with blocking positions by this AD, # and docs that the AD hasn't balloted on that are lacking ballot positions to progress blocked_docs = [] not_balloted_docs = [] if ad in get_active_ads(): - iesg_docs = Document.objects.filter(Q(states__type="draft-iesg", - states__slug__in=IESG_BALLOT_ACTIVE_STATES) | - Q(states__type="charter", - states__slug__in=IESG_CHARTER_ACTIVE_STATES) | - Q(states__type__in=("statchg", "conflrev"), - states__slug__in=IESG_STATCHG_CONFLREV_ACTIVE_STATES)).distinct() - possible_docs = iesg_docs.filter(docevent__ballotpositiondocevent__pos__blocking=True, - docevent__ballotpositiondocevent__balloter=ad) + iesg_docs = Document.objects.filter( + Q(states__type="draft-iesg", states__slug__in=IESG_BALLOT_ACTIVE_STATES) + | Q(states__type="charter", states__slug__in=IESG_CHARTER_ACTIVE_STATES) + | Q( + states__type__in=("statchg", "conflrev"), + states__slug__in=IESG_STATCHG_CONFLREV_ACTIVE_STATES, + ) + ).distinct() + possible_docs = iesg_docs.filter( + docevent__ballotpositiondocevent__pos__blocking=True, + docevent__ballotpositiondocevent__balloter=ad, + ) for doc in possible_docs: ballot = doc.active_ballot() if not ballot: continue blocking_positions = [p for p in ballot.all_positions() if p.pos.blocking] - if not blocking_positions or not any( p.balloter==ad for p in blocking_positions ): + if not blocking_positions or not any( + p.balloter == ad for p in blocking_positions + ): continue augment_events_with_revision(doc, blocking_positions) @@ -739,7 +720,12 @@ def docs_for_ad(request, name): # latest first if blocked_docs: - blocked_docs.sort(key=lambda d: min(p.time for p in d.blocking_positions if p.balloter==ad), reverse=True) + blocked_docs.sort( + key=lambda d: min( + p.time for p in d.blocking_positions if p.balloter == ad + ), + reverse=True, + ) possible_docs = iesg_docs.exclude( Q(docevent__ballotpositiondocevent__balloter=ad) @@ -750,7 +736,7 @@ def docs_for_ad(request, name): not ballot or doc.get_state_slug("draft") == "repl" or doc.get_state_slug("draft-iesg") == "defer" - or (doc.telechat_date() and doc.telechat_date() > timezone.now().date()) + or not doc.previous_telechat_date() ): continue @@ -760,9 +746,103 @@ def docs_for_ad(request, name): if re.search(r"\bNeeds\s+\d+", iesg_ballot_summary): not_balloted_docs.append(doc) - return render(request, 'doc/drafts_for_ad.html', { - 'form':form, 'docs':results, 'meta':meta, 'ad_name': ad.plain_name(), 'blocked_docs': blocked_docs, 'not_balloted_docs': not_balloted_docs - }) + return render( + request, + "doc/drafts_for_ad.html", + { + "docs": results, + "meta": meta, + "ad": ad, + "blocked_docs": blocked_docs, + "not_balloted_docs": not_balloted_docs, + }, + ) + + +def docs_for_iesg(request): + def sort_key(doc): + dt = doc_type(doc) + dt_key = list(AD_WORKLOAD.keys()).index(dt) + ds = doc_state(doc) + ds_key = AD_WORKLOAD[dt].index(ds) if ds in AD_WORKLOAD[dt] else 99 + return dt_key * 100 + ds_key + + results, meta = prepare_document_table( + request, + Document.objects.filter( + ad__in=Person.objects.filter( + Q( + role__name__in=("pre-ad", "ad"), + role__group__type="area", + role__group__state="active", + ) + ) + ).exclude( + type_id="rfc", + ).exclude( + type_id="draft", + states__type="draft", + states__slug__in=["repl", "rfc"], + ).exclude( + type_id="draft", + states__type="draft-iesg", + states__slug__in=["idexists", "rfcqueue"], + ).exclude( + type_id="conflrev", + states__type="conflrev", + states__slug__in=["appr-noprob-sent", "appr-reqnopub-sent", "withdraw", "dead"], + ).exclude( + type_id="statchg", + states__type="statchg", + states__slug__in=["appr-sent", "dead"], + ).exclude( + type_id="charter", + states__type="charter", + states__slug__in=["notrev", "infrev", "approved", "replaced"], + ), + max_results=1000, + show_ad_and_shepherd=True, + ) + results.sort(key=lambda d: sort_key(d)) + + # filter out some results + results = [ + r + for r in results + if not ( + r.type_id == "charter" + and ( + r.group.state_id == "abandon" + or r.get_state_slug("charter") == "replaced" + ) + ) + and not ( + r.type_id == "draft" + and ( + r.get_state_slug("draft-iesg") == "dead" + or r.get_state_slug("draft") == "repl" + or r.get_state_slug("draft") == "rfc" + ) + ) + ] + + _calculate_state_name = get_state_name_calculator() + for d in results: + dt = d.type.slug + d.search_heading = _calculate_state_name(dt, doc_state(d)) + if d.search_heading != "RFC": + d.search_heading += f" {doc_type_name(dt)}" + + return render( + request, + "doc/drafts_for_iesg.html", + { + "docs": results, + "meta": meta, + }, + ) + + def drafts_in_last_call(request): lc_state = State.objects.get(type="draft-iesg", slug="lc").pk form = SearchForm({'by':'state','state': lc_state, 'rfcs':'on', 'activedrafts':'on'}) @@ -775,31 +855,6 @@ def drafts_in_last_call(request): 'form':form, 'docs':results, 'meta':meta, 'pages':pages }) -def drafts_in_iesg_process(request): - states = State.objects.filter(type="draft-iesg").exclude(slug__in=('idexists', 'pub', 'dead', 'watching', 'rfcqueue')) - title = "Documents in IESG process" - - grouped_docs = [] - - for s in states.order_by("order"): - docs = Document.objects.filter(type="draft", states=s).distinct().order_by("time").select_related("ad", "group", "group__parent") - if docs: - if s.slug == "lc": - for d in docs: - e = d.latest_event(LastCallDocEvent, type="sent_last_call") - # If we don't have an event, use an arbitrary date in the past (but not datetime.datetime.min, - # which causes problems with timezone conversions) - d.lc_expires = e.expires if e else datetime.datetime(1950, 1, 1) - docs = list(docs) - docs.sort(key=lambda d: d.lc_expires) - - grouped_docs.append((s, docs)) - - return render(request, 'doc/drafts_in_iesg_process.html', { - "grouped_docs": grouped_docs, - "title": title, - }) - def recent_drafts(request, days=7): slowcache = caches['slowpages'] cache_key = f'recentdraftsview{days}' @@ -824,47 +879,50 @@ def recent_drafts(request, days=7): }) -def index_all_drafts(request): +def index_all_drafts(request): # Should we rename this # try to be efficient since this view returns a lot of data categories = [] - for s in ("active", "rfc", "expired", "repl", "auth-rm", "ietf-rm"): + # Gather drafts + for s in ("active", "expired", "repl", "auth-rm", "ietf-rm"): state = State.objects.get(type="draft", slug=s) - if state.slug == "rfc": - heading = "RFCs" - elif state.slug in ("ietf-rm", "auth-rm"): + if state.slug in ("ietf-rm", "auth-rm"): heading = "Internet-Drafts %s" % state.name else: heading = "%s Internet-Drafts" % state.name - draft_names = DocAlias.objects.filter(docs__states=state).values_list("name", "docs__name") - - names = [] - names_to_skip = set() - for name, doc in draft_names: - sort_key = name - if name != doc: - if not name.startswith("rfc"): - name, doc = doc, name - names_to_skip.add(doc) - - if name.startswith("rfc"): - name = name.upper() - sort_key = '%09d' % (100000000-int(name[3:])) - - names.append((name, sort_key)) + drafts = Document.objects.filter(type_id="draft", states=state).order_by("name") - names.sort(key=lambda t: t[1]) - - names = [f'{n}' - for n, __ in names if n not in names_to_skip] + names = [ + f'{doc.name}' + for doc in drafts + ] categories.append((state, heading, len(names), "
".join(names) )) + + # gather RFCs + rfcs = Document.objects.filter(type_id="rfc").order_by('-rfc_number') + names = [ + f'{rfc.name.upper()}' + for rfc in rfcs + ] + + state = State.objects.get(type_id="rfc", slug="published") + + categories.append((state, + "RFCs", + len(names), + "
".join(names) + )) + + # Return to the previous section ordering + categories = categories[0:1]+categories[5:]+categories[1:5] + return render(request, 'doc/index_all_drafts.html', { "categories": categories }) def index_active_drafts(request): @@ -876,27 +934,42 @@ def index_active_drafts(request): slowcache.set(cache_key, groups, 15*60) return render(request, "doc/index_active_drafts.html", { 'groups': groups }) -def ajax_select2_search_docs(request, model_name, doc_type): - if model_name == "docalias": - model = DocAlias - else: - model = Document +def ajax_select2_search_docs(request, model_name, doc_type): # TODO - remove model_name argument... + """Get results for a select2 search field + + doc_type can be "draft", "rfc", or "all", to search for only docs of type "draft", only docs of + type "rfc", or docs of type "draft" or "rfc" or any of the subseries ("bcp", "std", ...). + + If a need arises for searching _only_ for draft or rfc, without including the subseries, then an + additional option or options will be needed. + """ + model = Document # Earlier versions allowed searching over DocAlias which no longer exists q = [w.strip() for w in request.GET.get('q', '').split() if w.strip()] if not q: objs = model.objects.none() else: - qs = model.objects.all() - - if model == Document: - qs = qs.filter(type=doc_type) - elif model == DocAlias: - qs = qs.filter(docs__type=doc_type) - + if doc_type == "draft": + types = ["draft"] + elif doc_type == "rfc": + types = ["rfc"] + elif doc_type == "all": + types = ("draft", "rfc", "bcp", "fyi", "std") + else: + return HttpResponseBadRequest("Invalid document type") + qs = model.objects.filter(type__in=[t.strip() for t in types]) for t in q: qs = qs.filter(name__icontains=t) objs = qs.distinct().order_by("name")[:20] return HttpResponse(select2_id_doc_name_json(model, objs), content_type='application/json') + +def index_subseries(request, type_id): + docs = sorted(Document.objects.filter(type_id=type_id),key=lambda o: int(o.name[3:])) + if len(docs)>0: + type = docs[0].type + else: + type = DocTypeName.objects.get(slug=type_id) + return render(request, "doc/index_subseries.html", {"type": type, "docs": docs}) diff --git a/ietf/doc/views_statement.py b/ietf/doc/views_statement.py index 7f10af3562..4b85c81d83 100644 --- a/ietf/doc/views_statement.py +++ b/ietf/doc/views_statement.py @@ -1,4 +1,5 @@ -# Copyright The IETF Trust 2023, All Rights Reserved +# Copyright The IETF Trust 2023-2025, All Rights Reserved +from django.contrib import messages import debug # pyflakes: ignore @@ -6,14 +7,17 @@ from django import forms from django.conf import settings -from django.http import FileResponse, Http404 +from django.http import FileResponse, Http404, HttpResponseRedirect from django.views.decorators.cache import cache_control from django.shortcuts import get_object_or_404, render, redirect from django.template.loader import render_to_string + +from ietf.doc.forms import ChangeStatementStateForm +from ietf.doc.utils import add_state_change_event from ietf.utils import markdown from django.utils.html import escape -from ietf.doc.models import Document, DocAlias, DocEvent, NewRevisionDocEvent, State +from ietf.doc.models import Document, DocEvent, NewRevisionDocEvent, State from ietf.group.models import Group from ietf.ietfauth.utils import role_required from ietf.utils.text import xslugify @@ -94,7 +98,7 @@ def require_field(f): ) if markdown_content != "": try: - _ = markdown.markdown(markdown_content) + _ = markdown.liberal_markdown(markdown_content) except Exception as e: raise forms.ValidationError(f"Markdown processing failed: {e}") @@ -137,12 +141,15 @@ def submit(request, name): mode="wb" if writing_pdf else "w" ) as destination: if writing_pdf: - for chunk in form.cleaned_data["statement_file"].chunks(): + f = form.cleaned_data["statement_file"] + for chunk in f.chunks(): destination.write(chunk) + f.seek(0) + statement.store_file(statement.uploaded_filename, f) else: destination.write(markdown_content) + statement.store_str(statement.uploaded_filename, markdown_content) return redirect("ietf.doc.views_doc.document_main", name=statement.name) - else: if statement.uploaded_filename.endswith("pdf"): text = CONST_PDF_REV_NOTICE @@ -242,8 +249,6 @@ def new_statement(request): time=statement.time, ) statement.save_with_history([e1, e2]) - alias = DocAlias.objects.create(name=name) - alias.docs.set([statement]) markdown_content = "" if statement_submission == "upload": if not writing_pdf: @@ -256,10 +261,14 @@ def new_statement(request): mode="wb" if writing_pdf else "w" ) as destination: if writing_pdf: - for chunk in form.cleaned_data["statement_file"].chunks(): + f = form.cleaned_data["statement_file"] + for chunk in f.chunks(): destination.write(chunk) + f.seek(0) + statement.store_file(statement.uploaded_filename, f) else: destination.write(markdown_content) + statement.store_str(statement.uploaded_filename, markdown_content) return redirect("ietf.doc.views_doc.document_main", name=statement.name) else: @@ -273,3 +282,40 @@ def new_statement(request): } form = NewStatementForm(initial=init) return render(request, "doc/statement/new_statement.html", {"form": form}) + + +@role_required("Secretariat") +def change_statement_state(request, name): + """Change state of a statement Document""" + statement = get_object_or_404( + Document.objects.filter(type_id="statement"), + name=name, + ) + if request.method == "POST": + form = ChangeStatementStateForm(request.POST) + if form.is_valid(): + new_state = form.cleaned_data["state"] + prev_state = statement.get_state() + if new_state == prev_state: + messages.info(request, f"State not changed, remains {prev_state}.") + else: + statement.set_state(new_state) + e = add_state_change_event( + statement, + request.user.person, + prev_state, + new_state, + ) + statement.save_with_history([e]) + messages.success(request, f"State changed to {new_state}.") + return HttpResponseRedirect(statement.get_absolute_url()) + else: + form = ChangeStatementStateForm(initial={"state": statement.get_state()}) + return render( + request, + "doc/statement/change_statement_state.html", + { + "form": form, + "statement": statement, + }, + ) diff --git a/ietf/doc/views_stats.py b/ietf/doc/views_stats.py index cefc7e152b..028573b338 100644 --- a/ietf/doc/views_stats.py +++ b/ietf/doc/views_stats.py @@ -4,26 +4,21 @@ import datetime from django.conf import settings -from django.core.cache import cache -from django.urls import reverse as urlreverse from django.db.models.aggregates import Count from django.db.models.functions import TruncDate -from django.http import JsonResponse, HttpResponseBadRequest -from django.shortcuts import render +from django.http import JsonResponse from django.views.decorators.cache import cache_page import debug # pyflakes:ignore from ietf.doc.models import DocEvent from ietf.doc.templatetags.ietf_filters import comma_separated_list -from ietf.doc.utils import get_search_cache_key -from ietf.doc.views_search import SearchForm, retrieve_search_results from ietf.name.models import DocTypeName from ietf.person.models import Person from ietf.utils.timezone import date_today -epochday = datetime.datetime.utcfromtimestamp(0).date().toordinal() +epochday = datetime.datetime.fromtimestamp(0, datetime.UTC).date().toordinal() def dt(s): @@ -40,13 +35,13 @@ def model_to_timeline_data(model, field='time', **kwargs): assert field in [ f.name for f in model._meta.get_fields() ] objects = ( model.objects.filter(**kwargs) - .annotate(date=TruncDate(field, tzinfo=datetime.timezone.utc)) + .annotate(date=TruncDate(field, tzinfo=datetime.UTC)) .order_by('date') .values('date') .annotate(count=Count('id'))) if objects.exists(): obj_list = list(objects) - today = date_today(datetime.timezone.utc) + today = date_today(datetime.UTC) if not obj_list[-1]['date'] == today: obj_list += [ {'date': today, 'count': 0} ] data = [ ((e['date'].toordinal()-epochday)*1000*60*60*24, e['count']) for e in obj_list ] @@ -113,49 +108,6 @@ def make_title(queryargs): title += ' with name matching "%s"' % name return title -def chart_newrevisiondocevent(request): - return render(request, "doc/stats/highstock.html", { - "title": "Document Statistics", - "confurl": urlreverse("ietf.doc.views_stats.chart_conf_newrevisiondocevent"), - "dataurl": urlreverse("ietf.doc.views_stats.chart_data_newrevisiondocevent"), - "queryargs": request.GET.urlencode(), - } - ) - -#@cache_page(60*15) -def chart_data_newrevisiondocevent(request): - queryargs = request.GET - if queryargs: - cache_key = get_search_cache_key(queryargs) - results = cache.get(cache_key) - if not results: - form = SearchForm(queryargs) - if not form.is_valid(): - return HttpResponseBadRequest("form not valid: %s" % form.errors) - results = retrieve_search_results(form) - if results.exists(): - cache.set(cache_key, results) - if results.exists(): - data = model_to_timeline_data(DocEvent, doc__in=results, type='new_revision') - else: - data = [] - else: - data = [] - return JsonResponse(data, safe=False) - - -@cache_page(60*15) -def chart_conf_newrevisiondocevent(request): - queryargs = request.GET - if queryargs: - conf = copy.deepcopy(settings.CHART_TYPE_COLUMN_OPTIONS) - conf['title']['text'] = make_title(queryargs) - conf['series'][0]['name'] = "Submitted %s" % get_doctypes(queryargs, pluralize=True).lower(), - else: - conf = {} - return JsonResponse(conf) - - @cache_page(60*15) def chart_conf_person_drafts(request, id): person = Person.objects.filter(id=id).first() diff --git a/ietf/doc/views_status_change.py b/ietf/doc/views_status_change.py index 6db4338f42..2bccc213c4 100644 --- a/ietf/doc/views_status_change.py +++ b/ietf/doc/views_status_change.py @@ -5,6 +5,7 @@ import datetime import io import os +from pathlib import Path import re from typing import Dict # pyflakes:ignore @@ -21,7 +22,7 @@ import debug # pyflakes:ignore from ietf.doc.mails import email_ad_approved_status_change -from ietf.doc.models import ( Document, DocAlias, State, DocEvent, BallotDocEvent, +from ietf.doc.models import ( Document, State, DocEvent, BallotDocEvent, BallotPositionDocEvent, NewRevisionDocEvent, WriteupDocEvent, STATUSCHANGE_RELATIONS ) from ietf.doc.forms import AdForm from ietf.doc.lastcall import request_last_call @@ -33,6 +34,7 @@ from ietf.mailtrigger.utils import gather_address_lists from ietf.name.models import DocRelationshipName, StdLevelName from ietf.person.models import Person +from ietf.utils.log import log from ietf.utils.mail import send_mail_preformatted from ietf.utils.textupload import get_cleaned_text_file_content from ietf.utils.timezone import date_today, DEADLINE_TZINFO @@ -104,8 +106,8 @@ def change_state(request, name, option=None): relationship__slug__in=STATUSCHANGE_RELATIONS ) related_doc_info = [ - dict(title=rel_doc.target.document.title, - canonical_name=rel_doc.target.document.canonical_name(), + dict(title=rel_doc.target.title, + name=rel_doc.target.name, newstatus=newstatus(rel_doc)) for rel_doc in related_docs ] @@ -154,12 +156,23 @@ def clean_txt(self): return get_cleaned_text_file_content(self.cleaned_data["txt"]) def save(self, doc): - filename = os.path.join(settings.STATUS_CHANGE_PATH, '%s-%s.txt' % (doc.canonical_name(), doc.rev)) - with io.open(filename, 'w', encoding='utf-8') as destination: - if self.cleaned_data['txt']: - destination.write(self.cleaned_data['txt']) - else: - destination.write(self.cleaned_data['content']) + basename = f"{doc.name}-{doc.rev}.txt" + filename = Path(settings.STATUS_CHANGE_PATH) / basename + with io.open(filename, 'w', encoding='utf-8') as destination: + if self.cleaned_data['txt']: + content = self.cleaned_data['txt'] + else: + content = self.cleaned_data['content'] + destination.write(content) + doc.store_str(basename, content) + try: + ftp_filename = Path(settings.FTP_DIR) / "status-changes" / basename + os.link(filename, ftp_filename) # Path.hardlink is not available until 3.10 + except IOError as ex: + log( + "There was an error creating a hardlink at %s pointing to %s: %s" + % (ftp_filename, filename, ex) + ) #This is very close to submit on charter - can we get better reuse? @role_required('Area Director','Secretariat') @@ -168,7 +181,7 @@ def submit(request, name): login = request.user.person - path = os.path.join(settings.STATUS_CHANGE_PATH, '%s-%s.txt' % (doc.canonical_name(), doc.rev)) + path = os.path.join(settings.STATUS_CHANGE_PATH, '%s-%s.txt' % (doc.name, doc.rev)) not_uploaded_yet = doc.rev == "00" and not os.path.exists(path) if not_uploaded_yet: @@ -185,7 +198,7 @@ def submit(request, name): events = [] e = NewRevisionDocEvent(doc=doc, by=login, type="new_revision") - e.desc = "New version available: %s-%s.txt" % (doc.canonical_name(), doc.rev) + e.desc = "New version available: %s-%s.txt" % (doc.name, doc.rev) e.rev = doc.rev e.save() events.append(e) @@ -217,7 +230,7 @@ def submit(request, name): dict(), ) else: - filename = os.path.join(settings.STATUS_CHANGE_PATH, '%s-%s.txt' % (doc.canonical_name(), doc.rev)) + filename = os.path.join(settings.STATUS_CHANGE_PATH, '%s-%s.txt' % (doc.name, doc.rev)) try: with io.open(filename, 'r') as f: init["content"] = f.read() @@ -259,7 +272,7 @@ def edit_title(request, name): init = { "title" : status_change.title } form = ChangeTitleForm(initial=init) - titletext = '%s-%s.txt' % (status_change.canonical_name(),status_change.rev) + titletext = '%s-%s.txt' % (status_change.name,status_change.rev) return render(request, 'doc/change_title.html', {'form': form, 'doc': status_change, @@ -290,7 +303,7 @@ def edit_ad(request, name): init = { "ad" : status_change.ad_id } form = AdForm(initial=init) - titletext = '%s-%s.txt' % (status_change.canonical_name(),status_change.rev) + titletext = '%s-%s.txt' % (status_change.name,status_change.rev) return render(request, 'doc/change_ad.html', {'form': form, 'doc': status_change, @@ -315,7 +328,7 @@ def default_approval_text(status_change,relateddoc): current_text = status_change.text_or_error() # pyflakes:ignore - if relateddoc.target.document.std_level_id in ('std','ps','ds','bcp',): + if relateddoc.target.std_level_id in ('std','ps','ds','bcp',): action = "Protocol Action" else: action = "Document Action" @@ -326,7 +339,7 @@ def default_approval_text(status_change,relateddoc): dict(status_change=status_change, status_change_url = settings.IDTRACKER_BASE_URL+status_change.get_absolute_url(), relateddoc= relateddoc, - relateddoc_url = settings.IDTRACKER_BASE_URL+relateddoc.target.document.get_absolute_url(), + relateddoc_url = settings.IDTRACKER_BASE_URL+relateddoc.target.get_absolute_url(), approved_text = current_text, action=action, newstatus=newstatus(relateddoc), @@ -394,7 +407,7 @@ def approve(request, name): for rel in status_change.relateddocument_set.filter(relationship__slug__in=STATUSCHANGE_RELATIONS): # Add a document event to each target - c = DocEvent(type="added_comment", doc=rel.target.document, rev=rel.target.document.rev, by=login) + c = DocEvent(type="added_comment", doc=rel.target, rev=rel.target.rev, by=login) c.desc = "New status of %s approved by the IESG\n%s%s" % (newstatus(rel), settings.IDTRACKER_BASE_URL,reverse('ietf.doc.views_doc.document_main', kwargs={'name': status_change.name})) c.save() @@ -405,7 +418,7 @@ def approve(request, name): init = [] for rel in status_change.relateddocument_set.filter(relationship__slug__in=STATUSCHANGE_RELATIONS): init.append({"announcement_text" : escape(default_approval_text(status_change,rel)), - "label": "Announcement text for %s to %s"%(rel.target.document.canonical_name(),newstatus(rel)), + "label": "Announcement text for %s to %s"%(rel.target.name,newstatus(rel)), }) formset = AnnouncementFormSet(initial=init) for form in formset.forms: @@ -445,7 +458,7 @@ def clean_helper(form, formtype): if not re.match(r'(?i)rfc\d{1,4}',key): errors.append(key+" is not a valid RFC - please use the form RFCn\n") - elif not DocAlias.objects.filter(name=key): + elif not Document.objects.filter(name=key): errors.append(key+" does not exist\n") if new_relations[key] not in STATUSCHANGE_RELATIONS: @@ -543,7 +556,7 @@ def start_rfc_status_change(request, name=None): if name: if not re.match("(?i)rfc[0-9]{1,4}",name): raise Http404 - seed_rfc = get_object_or_404(Document, type="draft", docalias__name=name) + seed_rfc = get_object_or_404(Document, type="rfc", name=name) login = request.user.person @@ -566,14 +579,11 @@ def start_rfc_status_change(request, name=None): group=iesg_group, ) status_change.set_state(form.cleaned_data['create_in_state']) - - DocAlias.objects.create( name= 'status-change-'+form.cleaned_data['document_name']).docs.add(status_change) for key in form.cleaned_data['relations']: - status_change.relateddocument_set.create(target=DocAlias.objects.get(name=key), + status_change.relateddocument_set.create(target=Document.objects.get(name=key), relationship_id=form.cleaned_data['relations'][key]) - tc_date = form.cleaned_data['telechat_date'] if tc_date: update_telechat(request, status_change, login, tc_date) @@ -583,9 +593,9 @@ def start_rfc_status_change(request, name=None): init = {} if name: init['title'] = "%s to CHANGETHIS" % seed_rfc.title - init['document_name'] = "%s-to-CHANGETHIS" % seed_rfc.canonical_name() + init['document_name'] = "%s-to-CHANGETHIS" % seed_rfc.name relations={} - relations[seed_rfc.canonical_name()]=None + relations[seed_rfc.name]=None init['relations'] = relations form = StartStatusChangeForm(initial=init) @@ -611,11 +621,11 @@ def edit_relations(request, name): old_relations={} for rel in status_change.relateddocument_set.filter(relationship__slug__in=STATUSCHANGE_RELATIONS): - old_relations[rel.target.document.canonical_name()]=rel.relationship.slug + old_relations[rel.target.name]=rel.relationship.slug new_relations=form.cleaned_data['relations'] status_change.relateddocument_set.filter(relationship__slug__in=STATUSCHANGE_RELATIONS).delete() for key in new_relations: - status_change.relateddocument_set.create(target=DocAlias.objects.get(name=key), + status_change.relateddocument_set.create(target=Document.objects.get(name=key), relationship_id=new_relations[key]) c = DocEvent(type="added_comment", doc=status_change, rev=status_change.rev, by=login) c.desc = "Affected RFC list changed.\nOLD:" @@ -632,7 +642,7 @@ def edit_relations(request, name): else: relations={} for rel in status_change.relateddocument_set.filter(relationship__slug__in=STATUSCHANGE_RELATIONS): - relations[rel.target.document.canonical_name()]=rel.relationship.slug + relations[rel.target.name]=rel.relationship.slug init = { "relations":relations, } form = EditStatusChangeForm(initial=init) @@ -659,8 +669,8 @@ def generate_last_call_text(request, doc): settings=settings, requester=requester, expiration_date=expiration_date.strftime("%Y-%m-%d"), - changes=['%s from %s to %s\n (%s)'%(rel.target.name.upper(),rel.target.document.std_level.name,newstatus(rel),rel.target.document.title) for rel in doc.relateddocument_set.filter(relationship__slug__in=STATUSCHANGE_RELATIONS)], - urls=[rel.target.document.get_absolute_url() for rel in doc.relateddocument_set.filter(relationship__slug__in=STATUSCHANGE_RELATIONS)], + changes=['%s from %s to %s\n (%s)'%(rel.target.name.upper(),rel.target.std_level.name,newstatus(rel),rel.target.title) for rel in doc.relateddocument_set.filter(relationship__slug__in=STATUSCHANGE_RELATIONS)], + urls=[rel.target.get_absolute_url() for rel in doc.relateddocument_set.filter(relationship__slug__in=STATUSCHANGE_RELATIONS)], cc=cc ) ) diff --git a/ietf/group/admin.py b/ietf/group/admin.py index 5095b6b24d..685c10aeea 100644 --- a/ietf/group/admin.py +++ b/ietf/group/admin.py @@ -1,4 +1,4 @@ -# Copyright The IETF Trust 2010-2020, All Rights Reserved +# Copyright The IETF Trust 2010-2024, All Rights Reserved # -*- coding: utf-8 -*- import re @@ -26,14 +26,15 @@ MilestoneGroupEvent, GroupExtResource, Appeal, AppealArtifact ) from ietf.name.models import GroupTypeName -from ietf.utils.validators import validate_external_resource_value +from ietf.utils.admin import SaferTabularInline from ietf.utils.response import permission_denied +from ietf.utils.validators import validate_external_resource_value -class RoleInline(admin.TabularInline): +class RoleInline(SaferTabularInline): model = Role raw_id_fields = ["person", "email"] -class GroupURLInline(admin.TabularInline): +class GroupURLInline(SaferTabularInline): model = GroupURL class GroupForm(forms.ModelForm): @@ -72,6 +73,12 @@ def clean(self): 'Acronym is invalid. For groups that create documents, the acronym must be at least ' 'two characters and only contain lowercase letters and numbers starting with a letter.' ) + elif self.cleaned_data['type'].pk == 'sdo': + valid_re = r'^[a-z0-9][a-z0-9-]*[a-z0-9]$' + error_msg = ( + 'Acronym is invalid. It must be at least two characters and only contain lowercase ' + 'letters and numbers. It may contain hyphens, but that is discouraged.' + ) else: valid_re = r'^[a-z][a-z0-9-]*[a-z0-9]$' error_msg = ( diff --git a/ietf/group/management/commands/generate_group_aliases.py b/ietf/group/management/commands/generate_group_aliases.py deleted file mode 100755 index 630f35c441..0000000000 --- a/ietf/group/management/commands/generate_group_aliases.py +++ /dev/null @@ -1,105 +0,0 @@ -# Copyright The IETF Trust 2012-2021, All Rights Reserved -# -*- coding: utf-8 -*- - -# This was written as a script by Markus Stenberg . -# It was turned into a management command by Russ Housley . - -import datetime -import io -import os -import shutil -import stat -import time - -from tempfile import mkstemp - -from django.conf import settings -from django.core.management.base import BaseCommand -from django.utils import timezone - -import debug # pyflakes:ignore - -from ietf.group.models import Group -from ietf.group.utils import get_group_ad_emails, get_group_role_emails, get_child_group_role_emails -from ietf.name.models import GroupTypeName -from ietf.utils.aliases import dump_sublist - -DEFAULT_YEARS = 5 -ACTIVE_STATES=['active', 'bof', 'proposed'] -GROUP_TYPES=['wg', 'rg', 'rag', 'dir', 'team', 'review', 'program', 'rfcedtyp', 'edappr', 'edwg'] # This should become groupfeature driven... -NO_AD_GROUP_TYPES=['rg', 'rag', 'team', 'program', 'rfcedtyp', 'edappr', 'edwg'] -IETF_DOMAIN=['ietf.org', ] -IRTF_DOMAIN=['irtf.org', ] -IAB_DOMAIN=['iab.org', ] - -class Command(BaseCommand): - help = ('Generate the group-aliases and group-virtual files for Internet-Draft ' - 'mail aliases, placing them in the file configured in ' - 'settings.GROUP_ALIASES_PATH and settings.GROUP_VIRTUAL_PATH, ' - 'respectively. The generation includes aliases for groups that ' - 'have seen activity in the last %s years.' % (DEFAULT_YEARS)) - - def handle(self, *args, **options): - show_since = timezone.now() - datetime.timedelta(DEFAULT_YEARS*365) - - date = time.strftime("%Y-%m-%d_%H:%M:%S") - signature = '# Generated by %s at %s\n' % (os.path.abspath(__file__), date) - - ahandle, aname = mkstemp() - os.close(ahandle) - afile = io.open(aname,"w") - - vhandle, vname = mkstemp() - os.close(vhandle) - vfile = io.open(vname,"w") - - afile.write(signature) - vfile.write(signature) - vfile.write("%s anything\n" % settings.GROUP_VIRTUAL_DOMAIN) - - # Loop through each group type and build -ads and -chairs entries - for g in GROUP_TYPES: - domains = [] - domains += IETF_DOMAIN - if g in ('rg', 'rag'): - domains += IRTF_DOMAIN - if g == 'program': - domains += IAB_DOMAIN - - entries = Group.objects.filter(type=g).all() - active_entries = entries.filter(state__in=ACTIVE_STATES) - inactive_recent_entries = entries.exclude(state__in=ACTIVE_STATES).filter(time__gte=show_since) - interesting_entries = active_entries | inactive_recent_entries - - for e in interesting_entries.distinct().iterator(): - name = e.acronym - - # Research groups, teams, and programs do not have -ads lists - if not g in NO_AD_GROUP_TYPES: - dump_sublist(afile, vfile, name+'-ads', domains, settings.GROUP_VIRTUAL_DOMAIN, get_group_ad_emails(e)) - # All group types have -chairs lists - dump_sublist(afile, vfile, name+'-chairs', domains, settings.GROUP_VIRTUAL_DOMAIN, get_group_role_emails(e, ['chair', 'secr'])) - - # The area lists include every chair in active working groups in the area - areas = Group.objects.filter(type='area').all() - active_areas = areas.filter(state__in=ACTIVE_STATES) - for area in active_areas: - name = area.acronym - area_ad_emails = get_group_role_emails(area, ['pre-ad', 'ad', 'chair']) - dump_sublist(afile, vfile, name+'-ads', IETF_DOMAIN, settings.GROUP_VIRTUAL_DOMAIN, area_ad_emails) - dump_sublist(afile, vfile, name+'-chairs', IETF_DOMAIN, settings.GROUP_VIRTUAL_DOMAIN, (get_child_group_role_emails(area, ['chair', 'secr']) | area_ad_emails)) - - # Other groups with chairs that require Internet-Draft submission approval - gtypes = GroupTypeName.objects.values_list('slug', flat=True) - special_groups = Group.objects.filter(type__features__req_subm_approval=True, acronym__in=gtypes, state='active') - for group in special_groups: - dump_sublist(afile, vfile, group.acronym+'-chairs', IETF_DOMAIN, settings.GROUP_VIRTUAL_DOMAIN, get_group_role_emails(group, ['chair', 'delegate'])) - - afile.close() - vfile.close() - - os.chmod(aname, stat.S_IWUSR|stat.S_IRUSR|stat.S_IRGRP|stat.S_IROTH) - os.chmod(vname, stat.S_IWUSR|stat.S_IRUSR|stat.S_IRGRP|stat.S_IROTH) - - shutil.move(aname, settings.GROUP_ALIASES_PATH) - shutil.move(vname, settings.GROUP_VIRTUAL_PATH) diff --git a/ietf/group/migrations/0003_iabworkshops.py b/ietf/group/migrations/0003_iabworkshops.py new file mode 100644 index 0000000000..c8d175a100 --- /dev/null +++ b/ietf/group/migrations/0003_iabworkshops.py @@ -0,0 +1,34 @@ +# Copyright The IETF Trust 2023, All Rights Reserved + +from django.db import migrations + +def forward(apps, schema_editor): + GroupFeatures = apps.get_model("group", "GroupFeatures") + GroupTypeName = apps.get_model("name", "GroupTypeName") + + iabworkshop = GroupFeatures.objects.create( + type_id="iabworkshop", + need_parent=True, + default_parent="iab", + has_documents=True, + has_session_materials=True, + has_meetings=True, + has_default_chat=True, + session_purposes='["regular"]', + ) + iabworkshop.parent_types.add(GroupTypeName.objects.get(slug="ietf")) + + +def reverse(apps, schema_editor): + GroupFeatures = apps.get_model("group", "GroupFeatures") + GroupFeatures.objects.filter(type="iabworkshop").delete() + +class Migration(migrations.Migration): + dependencies = [ + ("group", "0002_appeal"), + ("name", "0009_iabworkshops"), + ] + + operations = [ + migrations.RunPython(forward, reverse) + ] diff --git a/ietf/group/migrations/0004_modern_list_archive.py b/ietf/group/migrations/0004_modern_list_archive.py new file mode 100644 index 0000000000..91c2fd23e2 --- /dev/null +++ b/ietf/group/migrations/0004_modern_list_archive.py @@ -0,0 +1,24 @@ +# Copyright The IETF Trust 2023, All Rights Reserved + +from django.conf import settings +from django.db import migrations +from django.db.models import Value +from django.db.models.functions import Replace + + +def forward(apps, schema_editor): + Group = apps.get_model("group", "Group") + old_pattern = f"{settings.MAILING_LIST_ARCHIVE_URL}/arch/search/?email_list=" + new_pattern = f"{settings.MAILING_LIST_ARCHIVE_URL}/arch/browse/" + + Group.objects.filter(list_archive__startswith=old_pattern).update( + list_archive=Replace("list_archive", Value(old_pattern), Value(new_pattern)) + ) + + +class Migration(migrations.Migration): + dependencies = [ + ("group", "0003_iabworkshops"), + ] + + operations = [migrations.RunPython(forward)] diff --git a/ietf/group/migrations/0005_remove_sdo_authorized_individuals.py b/ietf/group/migrations/0005_remove_sdo_authorized_individuals.py new file mode 100644 index 0000000000..77fe25b467 --- /dev/null +++ b/ietf/group/migrations/0005_remove_sdo_authorized_individuals.py @@ -0,0 +1,192 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +from collections import defaultdict + +from django.db import migrations + +from ietf.person.name import plain_name + + +def get_plain_name(person): + return person.plain or plain_name(person.name) + + +def forward(apps, schema_editor): + """Remove any 'auth' Role objects for groups of type 'sdo' + + The IAB has decided that the Authorized Individual concept for + authorizing entry or management of liaison statments hasn't worked + well - the roles for the groups are not being maintained, Instead, + the concept will be removed and the liaison managers or secretariat + (and soon the liaison coordinators) will operate the liaison tool + on their behalf. + """ + Role = apps.get_model("group", "Role") + GroupEvent = apps.get_model("group", "GroupEvent") + groups = defaultdict(list) + role_qs = Role.objects.filter(name_id="auth", group__type_id="sdo") + for role in role_qs: + groups[role.group].append(role) + for group in groups: + desc = f"Removed Authorized Persons: {', '.join([get_plain_name(role.person) for role in groups[group]])}" + GroupEvent.objects.create( + group=group, + by_id=1, # (System) + desc=desc, + ) + role_qs.delete() + + +def reverse(apps, schema_editor): + """Intentionally does nothing""" + pass + + +class Migration(migrations.Migration): + dependencies = [ + ("group", "0004_modern_list_archive"), + ] + + operations = [migrations.RunPython(forward, reverse)] + + +# At the time this migration was created, it would have removed these Role objects: +# { "authorized_individuals" : [ +# {"person_id": 107937, "group_id": 56, "email": "hannu.hietalahti@nokia.com" }, # Hannu Hietalahti is Authorized Individual in 3gpp +# {"person_id": 107943, "group_id": 56, "email": "3GPPLiaison@etsi.org" }, # Susanna Kooistra is Authorized Individual in 3gpp +# {"person_id": 112807, "group_id": 56, "email": "Paolo.Usai@etsi.org" }, # Paolo Usai is Authorized Individual in 3gpp +# {"person_id": 105859, "group_id": 56, "email": "atle.monrad@ericsson.com" }, # Atle Monrad is Authorized Individual in 3gpp +# {"person_id": 116149, "group_id": 1907, "email": "tsgsx_chair@3GPP2.org" }, # Xiaowu Zhao is Authorized Individual in 3gpp2-tsg-sx +# {"person_id": 120914, "group_id": 1902, "email": "ozgur.oyman@intel.com" }, # Ozgur Oyman is Authorized Individual in 3gpp-tsgsa-sa4 +# {"person_id": 107943, "group_id": 1902, "email": "3GPPLiaison@etsi.org" }, # Susanna Kooistra is Authorized Individual in 3gpp-tsgsa-sa4 +# {"person_id": 119203, "group_id": 1902, "email": "fanyanping@huawei.com" }, # Yanping Fan is Authorized Individual in 3gpp-tsgsa-sa4 +# {"person_id": 112977, "group_id": 1902, "email": "tomas.frankkila@ericsson.com" }, # Tomas Frankkila is Authorized Individual in 3gpp-tsgsa-sa4 +# {"person_id": 120240, "group_id": 2019, "email": "CM8655@att.com" }, # Peter Musgrove is Authorized Individual in atis-eloc-tf +# {"person_id": 120241, "group_id": 2019, "email": "Christian.Militeau@intrado.com" }, # Christian Militeau is Authorized Individual in atis-eloc-tf +# {"person_id": 120243, "group_id": 2019, "email": "ablasgen@atis.org" }, # Alexandra Blasgen is Authorized Individual in atis-eloc-tf +# {"person_id": 114696, "group_id": 67, "email": "KEN.KO@adtran.com" }, # Ken Ko is Authorized Individual in broadband-forum +# {"person_id": 119494, "group_id": 67, "email": "michael.fargano@centurylink.com" }, # Michael Fargano is Authorized Individual in broadband-forum +# {"person_id": 124318, "group_id": 67, "email": "joey.boyd@adtran.com" }, # Joey Boyd is Authorized Individual in broadband-forum +# {"person_id": 114762, "group_id": 67, "email": "bwelch@juniper.net" }, # Bill Welch is Authorized Individual in broadband-forum +# {"person_id": 112837, "group_id": 67, "email": "christophe.alter@orange.com" }, # Christophe Alter is Authorized Individual in broadband-forum +# {"person_id": 141083, "group_id": 2407, "email": "dan.middleton@intel.com" }, # Dan Middleton is Authorized Individual in confidential-computing-consortium +# {"person_id": 117421, "group_id": 1933, "email": "chairman@dmtf.org" }, # Winston Bumpus is Authorized Individual in dmtf +# {"person_id": 116529, "group_id": 1919, "email": "istvan@ecma-international.org" }, # Istvan Sebestyen is Authorized Individual in ecma-tc39 +# {"person_id": 116363, "group_id": 1915, "email": "e2nasupport@etsi.org" }, # Sonia Compans is Authorized Individual in etsi-e2na +# {"person_id": 116862, "group_id": 2003, "email": "latif@ladid.lu" }, # Latif Ladid is Authorized Individual in etsi-isg-ip6 +# {"person_id": 116283, "group_id": 2198, "email": "adrian.neal@vodafone.com" }, # Adrian Neal is Authorized Individual in etsi-isg-mec +# {"person_id": 119412, "group_id": 2004, "email": "jkfernic@uwaterloo.ca" }, # Jennifer Fernick is Authorized Individual in etsi-isg-qsc +# {"person_id": 122406, "group_id": 2165, "email": "d.lake@surrey.ac.uk" }, # David Lake is Authorized Individual in etsi-ngp +# {"person_id": 122407, "group_id": 2165, "email": "andy.sutton@ee.co.uk" }, # Andy Sutton is Authorized Individual in etsi-ngp +# {"person_id": 112609, "group_id": 2165, "email": "richard.li@futurewei.com" }, # Richard Li is Authorized Individual in etsi-ngp +# {"person_id": 122406, "group_id": 2177, "email": "d.lake@surrey.ac.uk" }, # David Lake is Authorized Individual in etsi-ngp-isp +# {"person_id": 112609, "group_id": 2177, "email": "richard.li@futurewei.com" }, # Richard Li is Authorized Individual in etsi-ngp-isp +# {"person_id": 122407, "group_id": 2177, "email": "andy.sutton@ee.co.uk" }, # Andy Sutton is Authorized Individual in etsi-ngp-isp +# {"person_id": 118527, "group_id": 1986, "email": "luca.pesando@telecomitalia.it" }, # Luca Pesando is Authorized Individual in etsi-ntech +# {"person_id": 118526, "group_id": 1986, "email": "NTECHsupport@etsi.org" }, # Sylwia Korycinska is Authorized Individual in etsi-ntech +# {"person_id": 116052, "group_id": 1904, "email": "Beniamino.gorini@alcatel-lucent.com" }, # Gorini Beniamino is Authorized Individual in etsi-tc-ee +# {"person_id": 19651, "group_id": 63, "email": "glenn.parsons@ericsson.com" }, # Glenn Parsons is Authorized Individual in ieee-802-1 +# {"person_id": 107599, "group_id": 63, "email": "tony@jeffree.co.uk" }, # Tony Jeffree is Authorized Individual in ieee-802-1 +# {"person_id": 117415, "group_id": 1862, "email": "Adrian.P.Stephens@intel.com" }, # Adrian Stephens is Authorized Individual in ieee-802-11 +# {"person_id": 106284, "group_id": 1862, "email": "dstanley@arubanetworks.com" }, # Dorothy Stanley is Authorized Individual in ieee-802-11 +# {"person_id": 114106, "group_id": 1871, "email": "r.b.marks@ieee.org" }, # Roger Marks is Authorized Individual in ieee-802-16 +# {"person_id": 101753, "group_id": 1885, "email": "max.riegel@ieee.org" }, # Max Riegel is Authorized Individual in ieee-802-ec-omniran +# {"person_id": 113810, "group_id": 1859, "email": "jehrig@inventures.com" }, # John Ehrig is Authorized Individual in imtc +# {"person_id": 123010, "group_id": 48, "email": "Emil.Kowalczyk@orange.com" }, # Emil Kowalczyk is Authorized Individual in iso-iec-jtc1-sc2 +# {"person_id": 11182, "group_id": 48, "email": "paf@netnod.se" }, # Patrik Fältström is Authorized Individual in iso-iec-jtc1-sc2 +# {"person_id": 117429, "group_id": 1939, "email": "krystyna.passia@din.de" }, # Krystyna Passia is Authorized Individual in iso-iec-jtc1-sc27 +# {"person_id": 117428, "group_id": 1939, "email": "walter.fumy@bdr.de" }, # Walter Fumy is Authorized Individual in iso-iec-jtc1-sc27 +# {"person_id": 114435, "group_id": 74, "email": "watanabe@itscj.ipsj.or.jp" }, # Shinji Watanabe is Authorized Individual in iso-iec-jtc1-sc29-wg11 +# {"person_id": 112106, "group_id": 49, "email": "jooran@kisi.or.kr" }, # Jooran Lee is Authorized Individual in iso-iec-jtc1-sc6 +# {"person_id": 17037, "group_id": 49, "email": "dykim@comsun.chungnnam.ac.kr" }, # Dae Kim is Authorized Individual in iso-iec-jtc1-sc6 +# {"person_id": 117426, "group_id": 1938, "email": "chair@jtc1-sc7.org" }, # Francois Coallier is Authorized Individual in iso-iec-jtc1-sc7 +# {"person_id": 117427, "group_id": 1938, "email": "secretariat@jtc1-sc7.org" }, # Witold Suryn is Authorized Individual in iso-iec-jtc1-sc7 +# {"person_id": 118769, "group_id": 2144, "email": "alexandre.petrescu@gmail.com" }, # Alexandre Petrescu is Authorized Individual in isotc204 +# {"person_id": 115544, "group_id": 1890, "email": "sergio.buonomo@itu.int" }, # Sergio Buonomo is Authorized Individual in itu-r +# {"person_id": 122111, "group_id": 2157, "email": "h.mazar@atdi.com" }, # Haim Mazar is Authorized Individual in itu-r-wp-5c +# {"person_id": 115544, "group_id": 2157, "email": "sergio.buonomo@itu.int" }, # Sergio Buonomo is Authorized Individual in itu-r-wp-5c +# {"person_id": 112105, "group_id": 51, "email": "Malcolm.Johnson@itu.int" }, # Malcom Johnson is Authorized Individual in itu-t +# {"person_id": 113911, "group_id": 1860, "email": "martin.adolph@itu.int" }, # Martin Adolph is Authorized Individual in itu-t-fg-dist +# {"person_id": 122779, "group_id": 2180, "email": "Leo.Lehmann@bakom.admin.ch" }, # Leo Lehmann is Authorized Individual in itu-t-fg-imt-2020 +# {"person_id": 103383, "group_id": 2180, "email": "peter.ashwoodsmith@huawei.com" }, # Peter Ashwood-Smith is Authorized Individual in itu-t-fg-imt-2020 +# {"person_id": 107300, "group_id": 1872, "email": "tatiana.kurakova@itu.int" }, # Tatiana Kurakova is Authorized Individual in itu-t-jca-cloud +# {"person_id": 106224, "group_id": 1872, "email": "mmorrow@cisco.com" }, # Monique Morrow is Authorized Individual in itu-t-jca-cloud +# {"person_id": 105714, "group_id": 1874, "email": "martin.euchner@itu.int" }, # Martin Euchner is Authorized Individual in itu-t-jca-cop +# {"person_id": 106475, "group_id": 2170, "email": "khj@etri.re.kr" }, # Hyoung-Jun Kim is Authorized Individual in itu-t-jca-iot-scc +# {"person_id": 122491, "group_id": 2170, "email": "tsbjcaiot@itu.int" }, # ITU Tsb is Authorized Individual in itu-t-jca-iot-scc +# {"person_id": 122490, "group_id": 2170, "email": "fabio.bigi@virgilio.it" }, # Fabio Bigi is Authorized Individual in itu-t-jca-iot-scc +# {"person_id": 116952, "group_id": 1927, "email": "chengying10@chinaunicom.cn" }, # Ying Cheng is Authorized Individual in itu-t-jca-sdn +# {"person_id": 111205, "group_id": 1927, "email": "t-egawa@ct.jp.nec.com" }, # Takashi Egawa is Authorized Individual in itu-t-jca-sdn +# {"person_id": 107298, "group_id": 2178, "email": "tsbsg11@itu.int" }, # Arshey Odedra is Authorized Individual in itu-tsbsg-11 +# {"person_id": 107300, "group_id": 77, "email": "tatiana.kurakova@itu.int" }, # Tatiana Kurakova is Authorized Individual in itu-t-sg-11 +# {"person_id": 112573, "group_id": 77, "email": "stefano.polidori@itu.int" }, # Stefano Polidori is Authorized Individual in itu-t-sg-11 +# {"person_id": 115401, "group_id": 84, "email": "spennock@rim.com" }, # Scott Pennock is Authorized Individual in itu-t-sg-12 +# {"person_id": 114255, "group_id": 84, "email": "hiroshi.ota@itu.int" }, # Hiroshi Ota is Authorized Individual in itu-t-sg-12 +# {"person_id": 113032, "group_id": 84, "email": "catherine.quinquis@orange.com" }, # Catherine Quinquis is Authorized Individual in itu-t-sg-12 +# {"person_id": 113031, "group_id": 84, "email": "gunilla.berndtsson@ericsson.com" }, # Gunilla Berndtsson is Authorized Individual in itu-t-sg-12 +# {"person_id": 113672, "group_id": 84, "email": "sarah.scott@itu.int" }, # Sarah Scott is Authorized Individual in itu-t-sg-12 +# {"person_id": 122459, "group_id": 81, "email": "chan@etri.re.kr" }, # Kangchan Lee is Authorized Individual in itu-t-sg-13 +# {"person_id": 107300, "group_id": 81, "email": "tatiana.kurakova@itu.int" }, # Tatiana Kurakova is Authorized Individual in itu-t-sg-13 +# {"person_id": 109145, "group_id": 62, "email": "lihan@chinamobile.com" }, # Han Li is Authorized Individual in itu-t-sg-15 +# {"person_id": 115875, "group_id": 62, "email": "mark.jones@xtera.com" }, # Mark Jones is Authorized Individual in itu-t-sg-15 +# {"person_id": 115846, "group_id": 62, "email": "peter.stassar@huawei.com" }, # Peter Stassar is Authorized Individual in itu-t-sg-15 +# {"person_id": 123452, "group_id": 62, "email": "sshew@ciena.com" }, # Stephen Shew is Authorized Individual in itu-t-sg-15 +# {"person_id": 109312, "group_id": 62, "email": "huubatwork@gmail.com" }, # Huub van Helvoort is Authorized Individual in itu-t-sg-15 +# {"person_id": 115874, "group_id": 62, "email": "tom.huber@tellabs.com" }, # Tom Huber is Authorized Individual in itu-t-sg-15 +# {"person_id": 110799, "group_id": 62, "email": "koike.yoshinori@lab.ntt.co.jp" }, # Yoshinori Koike is Authorized Individual in itu-t-sg-15 +# {"person_id": 110831, "group_id": 62, "email": "kam.lam@nokia.com" }, # Hing-Kam Lam is Authorized Individual in itu-t-sg-15 +# {"person_id": 114255, "group_id": 62, "email": "hiroshi.ota@itu.int" }, # Hiroshi Ota is Authorized Individual in itu-t-sg-15 +# {"person_id": 115874, "group_id": 62, "email": "tom.huber@coriant.com" }, # Tom Huber is Authorized Individual in itu-t-sg-15 +# {"person_id": 123014, "group_id": 62, "email": "jessy.rouyer@nokia.com" }, # Jessy Rouyer is Authorized Individual in itu-t-sg-15 +# {"person_id": 111160, "group_id": 62, "email": "ryoo@etri.re.kr" }, # Jeong-dong Ryoo is Authorized Individual in itu-t-sg-15 +# {"person_id": 107296, "group_id": 62, "email": "greg.jones@itu.int" }, # Greg Jones is Authorized Individual in itu-t-sg-15 +# {"person_id": 118539, "group_id": 72, "email": "rosa.angelesleondev@itu.int" }, # Rosa De Vivero is Authorized Individual in itu-t-sg-16 +# {"person_id": 123169, "group_id": 72, "email": "garysull@microsoft.com" }, # Gary Sullivan is Authorized Individual in itu-t-sg-16 +# {"person_id": 107746, "group_id": 72, "email": "hiwasaki.yusuke@lab.ntt.co.jp" }, # Yusuke Hiwasaki is Authorized Individual in itu-t-sg-16 +# {"person_id": 108160, "group_id": 1987, "email": "Christian.Groves@nteczone.com" }, # Christian Groves is Authorized Individual in itu-t-sg-16-q3 +# {"person_id": 118539, "group_id": 1987, "email": "rosa.angelesleondev@itu.int" }, # Rosa De Vivero is Authorized Individual in itu-t-sg-16-q3 +# {"person_id": 124354, "group_id": 76, "email": "jhbaek@kisa.or.kr" }, # Jonghyun Baek is Authorized Individual in itu-t-sg-17 +# {"person_id": 12898, "group_id": 1937, "email": "youki-k@is.aist-nara.ac.jp" }, # Youki Kadobayashi is Authorized Individual in itu-t-sg-17-q4 +# {"person_id": 113593, "group_id": 79, "email": "maite.comasbarnes@itu.int" }, # Maite Barnes is Authorized Individual in itu-t-sg-3 +# {"person_id": 122983, "group_id": 2000, "email": "cristina.bueti@itu.int" }, # Cristina Bueti is Authorized Individual in itu-t-sg-5 +# {"person_id": 112573, "group_id": 2072, "email": "stefano.polidori@itu.int" }, # Stefano Polidori is Authorized Individual in itu-t-sg-9 +# {"person_id": 113101, "group_id": 82, "email": "steve.trowbridge@alcatel-lucent.com" }, # Stephen Trowbridge is Authorized Individual in itu-t-tsag +# {"person_id": 20783, "group_id": 82, "email": "reinhard.scholl@itu.int" }, # Reinhard Scholl is Authorized Individual in itu-t-tsag +# {"person_id": 107300, "group_id": 1846, "email": "tatiana.kurakova@itu.int" }, # Tatiana Kurakova is Authorized Individual in itu-t-wp-5-13 +# {"person_id": 112107, "group_id": 69, "email": "michael.oreirdan@maawg.org" }, # Michael O'Reirdan is Authorized Individual in maawg +# {"person_id": 121870, "group_id": 75, "email": "liaisons@mef.net" }, # Liaison Mef is Authorized Individual in mef +# {"person_id": 112510, "group_id": 75, "email": "nan@mef.net" }, # Nan Chen is Authorized Individual in mef +# {"person_id": 124306, "group_id": 75, "email": "jason.wolfe@bell.ca" }, # WOLFE Jason is Authorized Individual in mef +# {"person_id": 114454, "group_id": 75, "email": "mike.bencheck@siamasystems.com" }, # Mike Bencheck is Authorized Individual in mef +# {"person_id": 115327, "group_id": 1888, "email": "klaus.moschner@ngmn.org" }, # Klaus Moschner is Authorized Individual in ngmn +# {"person_id": 123305, "group_id": 1888, "email": "office@ngmn.org" }, # Office Ngmn is Authorized Individual in ngmn +# {"person_id": 115160, "group_id": 1888, "email": "jminlee@sk.com" }, # Jongmin Lee is Authorized Individual in ngmn +# {"person_id": 117424, "group_id": 1936, "email": "patrick.gallagher@nist.gov" }, # Patrick Gallagher is Authorized Individual in nist +# {"person_id": 117431, "group_id": 1941, "email": "chet.ensign@xn--oasis-open-vt6e.org" }, # Chet Ensign is Authorized Individual in oasis +# {"person_id": 120913, "group_id": 2142, "email": "james.walker@tatacommunications.com" }, # James Walker is Authorized Individual in occ +# {"person_id": 6699, "group_id": 2142, "email": "dromasca@gmail.com" }, # Dan Romascanu is Authorized Individual in occ +# {"person_id": 118403, "group_id": 2142, "email": "richard.schell@verizon.com" }, # Rick Schell is Authorized Individual in occ +# {"person_id": 109676, "group_id": 83, "email": "Jonathan.Sadler@tellabs.com" }, # Jonathan Sadler is Authorized Individual in oif +# {"person_id": 122843, "group_id": 2122, "email": "tzhang@omaorg.org" }, # Tiffany Zhang is Authorized Individual in oma +# {"person_id": 116967, "group_id": 1947, "email": "JMudge@omaorg.org" }, # John Mudge is Authorized Individual in oma-architecture-wg +# {"person_id": 117423, "group_id": 1935, "email": "soley@omg.org" }, # Richard Soley is Authorized Individual in omg +# {"person_id": 110831, "group_id": 1858, "email": "kam.lam@nokia.com" }, # Hing-Kam Lam is Authorized Individual in onf +# {"person_id": 113674, "group_id": 1858, "email": "dan.pitt@opennetworking.org" }, # Dan Pitt is Authorized Individual in onf +# {"person_id": 118348, "group_id": 1984, "email": "dave.hood@ericsson.com" }, # Dave Hood is Authorized Individual in onf-arch-wg +# {"person_id": 116967, "group_id": 60, "email": "JMudge@omaorg.org" }, # John Mudge is Authorized Individual in open-mobile-alliance +# {"person_id": 112613, "group_id": 60, "email": "jerry.shih@att.com" }, # Jerry Shih is Authorized Individual in open-mobile-alliance +# {"person_id": 113067, "group_id": 60, "email": "laurent.goix@econocom.com" }, # Laurent Goix is Authorized Individual in open-mobile-alliance +# {"person_id": 112772, "group_id": 60, "email": "zhiyuan.hu@alcatel-sbell.com.cn" }, # Hu Zhiyuan is Authorized Individual in open-mobile-alliance +# {"person_id": 113064, "group_id": 60, "email": "thierry.berisot@telekom.de" }, # Thierry Berisot is Authorized Individual in open-mobile-alliance +# {"person_id": 124276, "group_id": 2212, "email": "jmisener@qti.qualcomm.com" }, # Jim Misener is Authorized Individual in sae-cell-v2x +# {"person_id": 124278, "group_id": 2212, "email": "Keith.Wilson@sae.org" }, # Keith Wilson is Authorized Individual in sae-cell-v2x +# {"person_id": 124277, "group_id": 2212, "email": "Elizabeth.Perry@sae.org" }, # Elizabeth Perry is Authorized Individual in sae-cell-v2x +# {"person_id": 117430, "group_id": 1940, "email": "admin@trustedcomputinggroup.org" }, # Lindsay Adamson is Authorized Individual in tcg +# {"person_id": 117422, "group_id": 1934, "email": "j.hietala@opengroup.org" }, # Jim Hietala is Authorized Individual in the-open-group +# {"person_id": 112104, "group_id": 53, "email": "rick@unicode.org" }, # Rick McGowan is Authorized Individual in unicode +# {"person_id": 112103, "group_id": 54, "email": "plh@w3.org" }, # Philippe Le Hégaret is Authorized Individual in w3c +# {"person_id": 120261, "group_id": 54, "email": "wendy@seltzer.org" }, # Wendy Seltzer is Authorized Individual in w3c +# {"person_id": 118020, "group_id": 1955, "email": "tiago@wballiance.com" }, # Tiago Rodrigues is Authorized Individual in wba +# {"person_id": 125489, "group_id": 1955, "email": "bruno@wballiance.com" }, # Bruno Tomas is Authorized Individual in wba +# {"person_id": 109129, "group_id": 70, "email": "smccammon@amsl.com" }, # Stephanie McCammon is Authorized Individual in zigbee-alliance +# ]} diff --git a/ietf/group/migrations/0006_remove_liason_contacts.py b/ietf/group/migrations/0006_remove_liason_contacts.py new file mode 100644 index 0000000000..13afd1a53e --- /dev/null +++ b/ietf/group/migrations/0006_remove_liason_contacts.py @@ -0,0 +1,270 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +from collections import defaultdict + +from django.db import migrations + +from ietf.person.name import plain_name + + +def get_plain_name(person): + return person.plain or plain_name(person.name) + + +def forward(apps, schema_editor): + """Removes liaison_contact and liaison_cc_contact roles from all groups + + The IAB has decided to remove the liaison_contact and liaison_cc_contact + role concept from the datatracker as the roles are not well understood + and have not been being maintained. + """ + Role = apps.get_model("group", "Role") + GroupEvent = apps.get_model("group", "GroupEvent") + for role_name in ["liaison_contact", "liaison_cc_contact"]: + groups = defaultdict(list) + role_qs = Role.objects.filter(name_id=role_name) + for role in role_qs: + groups[role.group].append(role) + for group in groups: + desc = f"Removed {role_name}: {', '.join([get_plain_name(role.person) for role in groups[group]])}" + GroupEvent.objects.create( + group=group, + by_id=1, # (System) + desc=desc, + ) + role_qs.delete() + + +def reverse(apps, schema_editor): + """Intentionally does nothing""" + pass + + +class Migration(migrations.Migration): + dependencies = [ + ("group", "0005_remove_sdo_authorized_individuals"), + ] + + operations = [ + migrations.RunPython(forward, reverse), + ] + + +# At the time this migration was created, it would remove these objects +# {"liaison_contacts":[ +# { "role_name": "liaison_contact", "person_id": 107943, "group_id": 56, "email": "3GPPLiaison@etsi.org" }, # Susanna Kooistra is Liaison Contact in 3gpp +# { "role_name": "liaison_contact", "person_id": 107737, "group_id": 56, "email": "lionel.morand@orange.com" }, # Lionel Morand is Liaison Contact in 3gpp +# { "role_name": "liaison_contact", "person_id": 127959, "group_id": 57, "email": "mahendra@qualcomm.com" }, # Mahendran Ac is Liaison Contact in 3gpp2 +# { "role_name": "liaison_contact", "person_id": 111440, "group_id": 2026, "email": "georg.mayer.huawei@gmx.com" }, # Georg Mayer is Liaison Contact in 3gpp-tsgct +# { "role_name": "liaison_contact", "person_id": 107943, "group_id": 2026, "email": "3GPPLiaison@etsi.org" }, # Susanna Kooistra is Liaison Contact in 3gpp-tsgct +# { "role_name": "liaison_contact", "person_id": 107943, "group_id": 2027, "email": "3GPPLiaison@etsi.org" }, # Susanna Kooistra is Liaison Contact in 3gpp-tsgct-ct1 +# { "role_name": "liaison_contact", "person_id": 107737, "group_id": 2027, "email": "lionel.morand@orange.com" }, # Lionel Morand is Liaison Contact in 3gpp-tsgct-ct1 +# { "role_name": "liaison_contact", "person_id": 107737, "group_id": 2410, "email": "lionel.morand@orange.com" }, # Lionel Morand is Liaison Contact in 3gpp-tsgct-ct3 +# { "role_name": "liaison_contact", "person_id": 107943, "group_id": 2410, "email": "3GPPLiaison@etsi.org" }, # Susanna Kooistra is Liaison Contact in 3gpp-tsgct-ct3 +# { "role_name": "liaison_contact", "person_id": 107737, "group_id": 2028, "email": "lionel.morand@orange.com" }, # Lionel Morand is Liaison Contact in 3gpp-tsgct-ct4 +# { "role_name": "liaison_contact", "person_id": 107943, "group_id": 2028, "email": "3GPPLiaison@etsi.org" }, # Susanna Kooistra is Liaison Contact in 3gpp-tsgct-ct4 +# { "role_name": "liaison_contact", "person_id": 107943, "group_id": 2029, "email": "3GPPLiaison@etsi.org" }, # Susanna Kooistra is Liaison Contact in 3gpp-tsgran +# { "role_name": "liaison_contact", "person_id": 111440, "group_id": 2029, "email": "georg.mayer.huawei@gmx.com" }, # Georg Mayer is Liaison Contact in 3gpp-tsgran +# { "role_name": "liaison_contact", "person_id": 107737, "group_id": 2030, "email": "lionel.morand@orange.com" }, # Lionel Morand is Liaison Contact in 3gpp-tsgran-ran2 +# { "role_name": "liaison_contact", "person_id": 107943, "group_id": 2030, "email": "3GPPLiaison@etsi.org" }, # Susanna Kooistra is Liaison Contact in 3gpp-tsgran-ran2 +# { "role_name": "liaison_contact", "person_id": 107943, "group_id": 2023, "email": "3GPPLiaison@etsi.org" }, # Susanna Kooistra is Liaison Contact in 3gpp-tsgsa +# { "role_name": "liaison_contact", "person_id": 111440, "group_id": 2023, "email": "georg.mayer.huawei@gmx.com" }, # Georg Mayer is Liaison Contact in 3gpp-tsgsa +# { "role_name": "liaison_contact", "person_id": 107943, "group_id": 2024, "email": "3GPPLiaison@etsi.org" }, # Susanna Kooistra is Liaison Contact in 3gpp-tsgsa-sa2 +# { "role_name": "liaison_contact", "person_id": 107737, "group_id": 2024, "email": "lionel.morand@orange.com" }, # Lionel Morand is Liaison Contact in 3gpp-tsgsa-sa2 +# { "role_name": "liaison_contact", "person_id": 107943, "group_id": 2025, "email": "3GPPLiaison@etsi.org" }, # Susanna Kooistra is Liaison Contact in 3gpp-tsgsa-sa3 +# { "role_name": "liaison_contact", "person_id": 107737, "group_id": 2025, "email": "lionel.morand@orange.com" }, # Lionel Morand is Liaison Contact in 3gpp-tsgsa-sa3 +# { "role_name": "liaison_contact", "person_id": 107737, "group_id": 1902, "email": "lionel.morand@orange.com" }, # Lionel Morand is Liaison Contact in 3gpp-tsgsa-sa4 +# { "role_name": "liaison_contact", "person_id": 107943, "group_id": 1902, "email": "3GPPLiaison@etsi.org" }, # Susanna Kooistra is Liaison Contact in 3gpp-tsgsa-sa4 +# { "role_name": "liaison_contact", "person_id": 107943, "group_id": 2031, "email": "3GPPLiaison@etsi.org" }, # Susanna Kooistra is Liaison Contact in 3gpp-tsgt-wg2 +# { "role_name": "liaison_contact", "person_id": 107737, "group_id": 2031, "email": "lionel.morand@orange.com" }, # Lionel Morand is Liaison Contact in 3gpp-tsgt-wg2 +# { "role_name": "liaison_contact", "person_id": 106345, "group_id": 1396, "email": "Menachem.Dodge@ecitele.com" }, # Menachem Dodge is Liaison Contact in adslmib +# { "role_name": "liaison_contact", "person_id": 108054, "group_id": 1956, "email": "shengjiang@bupt.edu.cn" }, # Sheng Jiang is Liaison Contact in anima +# { "role_name": "liaison_contact", "person_id": 11834, "group_id": 1956, "email": "tte@cs.fau.de" }, # Toerless Eckert is Liaison Contact in anima +# { "role_name": "liaison_contact", "person_id": 21684, "group_id": 1805, "email": "barryleiba@computer.org" }, # Barry Leiba is Liaison Contact in appsawg +# { "role_name": "liaison_contact", "person_id": 102154, "group_id": 1805, "email": "alexey.melnikov@isode.com" }, # Alexey Melnikov is Liaison Contact in appsawg +# { "role_name": "liaison_contact", "person_id": 107279, "group_id": 1805, "email": "yaojk@cnnic.cn" }, # Jiankang Yao is Liaison Contact in appsawg +# { "role_name": "liaison_contact", "person_id": 100754, "group_id": 941, "email": "tom.taylor@rogers.com" }, # Tom Taylor is Liaison Contact in avt +# { "role_name": "liaison_contact", "person_id": 105873, "group_id": 941, "email": "ron.even.tlv@gmail.com" }, # Roni Even is Liaison Contact in avt +# { "role_name": "liaison_contact", "person_id": 105097, "group_id": 1813, "email": "keith.drage@alcatel-lucent.com" }, # Keith Drage is Liaison Contact in avtext +# { "role_name": "liaison_contact", "person_id": 101923, "group_id": 1813, "email": "jonathan@vidyo.com" }, # Jonathan Lennox is Liaison Contact in avtext +# { "role_name": "liaison_contact", "person_id": 108279, "group_id": 1960, "email": "martin.vigoureux@alcatel-lucent.com" }, # Martin Vigoureux is Liaison Contact in bess +# { "role_name": "liaison_contact", "person_id": 109666, "group_id": 66, "email": "g.white@cablelabs.com" }, # Greg White is Liaison Contact in cablelabs +# { "role_name": "liaison_contact", "person_id": 117421, "group_id": 1933, "email": "chairman@dmtf.org" }, # Winston Bumpus is Liaison Contact in dmtf +# { "role_name": "liaison_contact", "person_id": 127961, "group_id": 1739, "email": "statements@ietf.org" }, # statements@ietf.org is Liaison Contact in drinks +# { "role_name": "liaison_contact", "person_id": 109505, "group_id": 1787, "email": "bernie@ietf.hoeneisen.ch" }, # Bernie Hoeneisen is Liaison Contact in e2md +# { "role_name": "liaison_contact", "person_id": 109059, "group_id": 1787, "email": "ray.bellis@nominet.org.uk" }, # Ray Bellis is Liaison Contact in e2md +# { "role_name": "liaison_contact", "person_id": 116529, "group_id": 1919, "email": "istvan@ecma-interational.org" }, # Istvan Sebestyen is Liaison Contact in ecma-tc39 +# { "role_name": "liaison_contact", "person_id": 127964, "group_id": 1919, "email": "johnneumann.openstrat@gmail.com" }, # John Neuman is Liaison Contact in ecma-tc39 +# { "role_name": "liaison_contact", "person_id": 106012, "group_id": 1643, "email": "marc.linsner@cisco.com" }, # Marc Linsner is Liaison Contact in ecrit +# { "role_name": "liaison_contact", "person_id": 107084, "group_id": 1643, "email": "rmarshall@telecomsys.com" }, # Roger Marshall is Liaison Contact in ecrit +# { "role_name": "liaison_contact", "person_id": 116363, "group_id": 1915, "email": "e2nasupport@etsi.org" }, # Sonia Compans is Liaison Contact in etsi-e2na +# { "role_name": "liaison_contact", "person_id": 126473, "group_id": 2261, "email": "isgsupport@etsi.org" }, # Sonia Compan is Liaison Contact in etsi-isg-sai +# { "role_name": "liaison_contact", "person_id": 128316, "group_id": 2301, "email": "GSMALiaisons@gsma.com" }, # David Pollington is Liaison Contact in gsma-ztc +# { "role_name": "liaison_contact", "person_id": 3056, "group_id": 1875, "email": "shares@ndzh.com" }, # Susan Hares is Liaison Contact in i2rs +# { "role_name": "liaison_contact", "person_id": 105046, "group_id": 1875, "email": "jhaas@pfrc.org" }, # Jeffrey Haas is Liaison Contact in i2rs +# { "role_name": "liaison_contact", "person_id": 120845, "group_id": 61, "email": "tale@dd.org" }, # David Lawrence is Liaison Contact in icann-board-of-directors +# { "role_name": "liaison_contact", "person_id": 112851, "group_id": 2105, "email": "pthaler@broadcom.com" }, # Patricia Thaler is Liaison Contact in ieee-802 +# { "role_name": "liaison_contact", "person_id": 127968, "group_id": 2105, "email": "p.nikolich@ieee.org" }, # Paul Nikolich is Liaison Contact in ieee-802 +# { "role_name": "liaison_contact", "person_id": 19651, "group_id": 63, "email": "glenn.parsons@ericsson.com" }, # Glenn Parsons is Liaison Contact in ieee-802-1 +# { "role_name": "liaison_contact", "person_id": 123875, "group_id": 63, "email": "JMessenger@advaoptical.com" }, # John Messenger is Liaison Contact in ieee-802-1 +# { "role_name": "liaison_contact", "person_id": 127968, "group_id": 63, "email": "p.nikolich@ieee.org" }, # Paul Nikolich is Liaison Contact in ieee-802-1 +# { "role_name": "liaison_contact", "person_id": 117415, "group_id": 1862, "email": "Adrian.P.Stephens@intel.com" }, # Adrian Stephens is Liaison Contact in ieee-802-11 +# { "role_name": "liaison_contact", "person_id": 106284, "group_id": 1862, "email": "dstanley@agere.com" }, # Dorothy Stanley is Liaison Contact in ieee-802-11 +# { "role_name": "liaison_contact", "person_id": 128345, "group_id": 2302, "email": "liaison@iowngf.org" }, # Forum Iown is Liaison Contact in iown-global-forum +# { "role_name": "liaison_contact", "person_id": 117428, "group_id": 1939, "email": "walter.fumy@bdr.de" }, # Walter Fumy is Liaison Contact in iso-iec-jtc1-sc27 +# { "role_name": "liaison_contact", "person_id": 117429, "group_id": 1939, "email": "krystyna.passia@din.de" }, # Krystyna Passia is Liaison Contact in iso-iec-jtc1-sc27 +# { "role_name": "liaison_contact", "person_id": 151289, "group_id": 50, "email": "koike@itscj.ipsj.or.jp" }, # Mayumi Koike is Liaison Contact in iso-iec-jtc1-sc29 +# { "role_name": "liaison_contact", "person_id": 151289, "group_id": 2110, "email": "koike@itscj.ipsj.or.jp" }, # Mayumi Koike is Liaison Contact in iso-iec-jtc1-sc29-wg1 +# { "role_name": "liaison_contact", "person_id": 114435, "group_id": 74, "email": "watanabe@itscj.ipsj.or.jp" }, # Shinji Watanabe is Liaison Contact in iso-iec-jtc1-sc29-wg11 +# { "role_name": "liaison_contact", "person_id": 112106, "group_id": 49, "email": "jooran@kisi.or.kr" }, # Jooran Lee is Liaison Contact in iso-iec-jtc1-sc6 +# { "role_name": "liaison_contact", "person_id": 113587, "group_id": 49, "email": "dykim@cnu.kr" }, # Chungnam University is Liaison Contact in iso-iec-jtc1-sc6 +# { "role_name": "liaison_contact", "person_id": 117427, "group_id": 1938, "email": "secretariat@jtc1-sc7.org" }, # Witold Suryn is Liaison Contact in iso-iec-jtc1-sc7 +# { "role_name": "liaison_contact", "person_id": 117426, "group_id": 1938, "email": "chair@jtc1-sc7.org" }, # Francois Coallier is Liaison Contact in iso-iec-jtc1-sc7 +# { "role_name": "liaison_contact", "person_id": 127971, "group_id": 68, "email": "sabine.donnardcusse@afnor.org" }, # sabine.donnardcusse@afnor.org is Liaison Contact in isotc46 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2057, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 1890, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-r +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2058, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-r-wp5a +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2059, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-r-wp5d +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2060, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-r-wp8a +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2061, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-r-wp8f +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 51, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2063, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-fg-cloud +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 1860, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-fg-dist +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2064, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-fg-iptv +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2065, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-fg-ngnm +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2062, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-ipv6-group +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 1872, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-jca-cloud +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 1874, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-jca-cop +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2066, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-jca-idm +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 1927, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-jca-sdn +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 65, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-mpls +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 52, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-ngn +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2067, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-ngnmfg +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 77, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-11 +# { "role_name": "liaison_contact", "person_id": 128236, "group_id": 77, "email": "denis.andreev@itu.int" }, # Denis ANDREEV is Liaison Contact in itu-t-sg-11 +# { "role_name": "liaison_contact", "person_id": 107300, "group_id": 77, "email": "tatiana.kurakova@itu.int" }, # Tatiana Kurakova is Liaison Contact in itu-t-sg-11 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2074, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-11-q5 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2075, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-11-wp2 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 84, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-12 +# { "role_name": "liaison_contact", "person_id": 102900, "group_id": 84, "email": "acmorton@att.com" }, # Al Morton is Liaison Contact in itu-t-sg-12 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2076, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-12-q12 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2077, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-12-q17 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2082, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-13-q11 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2078, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-13-q3 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2079, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-13-q5 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2080, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-13-q7 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2081, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-13-q9 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2083, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-13-wp3 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2084, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-13-wp4 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2085, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-13-wp5 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2086, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-14 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 62, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-15 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2087, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-15-q1 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2092, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-15-q10 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2093, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-15-q11 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2094, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-15-q12 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2095, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-15-q14 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2096, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-15-q15 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2088, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-15-q3 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2089, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-15-q4 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2090, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-15-q6 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2091, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-15-q9 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2097, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-15-wp1 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2098, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-15-wp3 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 72, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-16 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2101, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-16-q10 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 1987, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-16-q3 +# { "role_name": "liaison_contact", "person_id": 118539, "group_id": 1987, "email": "rosa.angelesleondev@itu.int" }, # Rosa De Vivero is Liaison Contact in itu-t-sg-16-q3 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2099, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-16-q8 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2100, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-16-q9 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 76, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-17 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2102, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-17-q2 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 1937, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-17-q4 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 1954, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-17-tsb +# { "role_name": "liaison_contact", "person_id": 12898, "group_id": 1954, "email": "youki-k@is.aist-nara.ac.jp" }, # Youki Kadobayashi is Liaison Contact in itu-t-sg-17-tsb +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 78, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-2 +# { "role_name": "liaison_contact", "person_id": 127962, "group_id": 78, "email": "dr.guinena@ntra.gov.eg" }, # dr.guinena@ntra.gov.eg is Liaison Contact in itu-t-sg-2 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2103, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-20 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2073, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-2-q1 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 79, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-3 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2068, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-4 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2000, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-5 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2069, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-6 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2070, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-7 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2071, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-8 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 2072, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-sg-9 +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 82, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-tsag +# { "role_name": "liaison_contact", "person_id": 127957, "group_id": 82, "email": "tsbtsag@itu.int" }, # Bilel Jamoussi is Liaison Contact in itu-t-tsag +# { "role_name": "liaison_cc_contact", "person_id": 127958, "group_id": 1846, "email": "itu-t-liaison@iab.org" }, # itu-t liaison is Liaison CC Contact in itu-t-wp-5-13 +# { "role_name": "liaison_contact", "person_id": 10083, "group_id": 1882, "email": "paul.hoffman@vpnc.org" }, # Paul Hoffman is Liaison Contact in json +# { "role_name": "liaison_contact", "person_id": 111178, "group_id": 1882, "email": "mamille2@cisco.com" }, # Matthew Miller is Liaison Contact in json +# { "role_name": "liaison_contact", "person_id": 106881, "group_id": 1593, "email": "vach.kompella@alcatel.com" }, # Vach Kompella is Liaison Contact in l2vpn +# { "role_name": "liaison_contact", "person_id": 19987, "group_id": 1593, "email": "danny@arbor.net" }, # Danny McPherson is Liaison Contact in l2vpn +# { "role_name": "liaison_contact", "person_id": 2329, "group_id": 1593, "email": "stbryant@cisco.com" }, # Stewart Bryant is Liaison Contact in l2vpn +# { "role_name": "liaison_contact", "person_id": 101552, "group_id": 1593, "email": "Shane.Amante@Level3.com" }, # Shane Amante is Liaison Contact in l2vpn +# { "role_name": "liaison_contact", "person_id": 110305, "group_id": 1877, "email": "jason.weil@twcable.com" }, # Jason Weil is Liaison Contact in lmap +# { "role_name": "liaison_contact", "person_id": 6699, "group_id": 1877, "email": "dromasca@avaya.com" }, # Dan Romascanu is Liaison Contact in lmap +# { "role_name": "liaison_contact", "person_id": 127969, "group_id": 69, "email": "madkins@fb.com" }, # Mike Adkins is Liaison Contact in maawg +# { "role_name": "liaison_contact", "person_id": 127970, "group_id": 69, "email": "technical-chair@mailman.m3aawg.org" }, # technical-chair@mailman.m3aawg.org is Liaison Contact in maawg +# { "role_name": "liaison_contact", "person_id": 112512, "group_id": 75, "email": "rraghu@ciena.com" }, # Raghu Ranganathan is Liaison Contact in mef +# { "role_name": "liaison_contact", "person_id": 119947, "group_id": 1755, "email": "mrw@lilacglade.org" }, # Margaret Cullen is Liaison Contact in mif +# { "role_name": "liaison_contact", "person_id": 109884, "group_id": 1755, "email": "denghui02@hotmail.com" }, # Hui Deng is Liaison Contact in mif +# { "role_name": "liaison_contact", "person_id": 128292, "group_id": 1936, "email": "james.olthoff@nist.gov" }, # James Olthoff is Liaison Contact in nist +# { "role_name": "liaison_contact", "person_id": 104183, "group_id": 1537, "email": "john.loughney@nokia.com" }, # John Loughney is Liaison Contact in nsis +# { "role_name": "liaison_contact", "person_id": 105786, "group_id": 1840, "email": "matthew.bocci@nokia.com" }, # Matthew Bocci is Liaison Contact in nvo3 +# { "role_name": "liaison_contact", "person_id": 112438, "group_id": 1840, "email": "bensons@queuefull.net" }, # Benson Schliesser is Liaison Contact in nvo3 +# { "role_name": "liaison_contact", "person_id": 107943, "group_id": 2296, "email": "3GPPLiaison@etsi.org" }, # Susanna Kooistra is Liaison Contact in o3gpptsgran3 +# { "role_name": "liaison_contact", "person_id": 127966, "group_id": 1941, "email": "chet.ensign@oasis-open.org" }, # chet.ensign@oasis-open.org is Liaison Contact in oasis +# { "role_name": "liaison_contact", "person_id": 117423, "group_id": 1935, "email": "soley@omg.org" }, # Richard Soley is Liaison Contact in omg +# { "role_name": "liaison_contact", "person_id": 127963, "group_id": 1858, "email": "dan.pitt@opennetworkingfoundation.org" }, # dan.pitt@opennetworkingfoundation.org is Liaison Contact in onf +# { "role_name": "liaison_contact", "person_id": 108304, "group_id": 1599, "email": "gunter.van_de_velde@nokia.com" }, # Gunter Van de Velde is Liaison Contact in opsec +# { "role_name": "liaison_contact", "person_id": 111647, "group_id": 1599, "email": "kk@google.com" }, # Chittimaneni Kk is Liaison Contact in opsec +# { "role_name": "liaison_contact", "person_id": 111656, "group_id": 1599, "email": "warren@kumari.net" }, # Warren Kumari is Liaison Contact in opsec +# { "role_name": "liaison_contact", "person_id": 106471, "group_id": 1188, "email": "dbrungard@att.com" }, # Deborah Brungard is Liaison Contact in ospf +# { "role_name": "liaison_contact", "person_id": 104198, "group_id": 1188, "email": "adrian@olddog.co.uk" }, # Adrian Farrel is Liaison Contact in ospf +# { "role_name": "liaison_contact", "person_id": 104816, "group_id": 1188, "email": "akr@cisco.com" }, # Abhay Roy is Liaison Contact in ospf +# { "role_name": "liaison_contact", "person_id": 10784, "group_id": 1188, "email": "acee@redback.com" }, # Acee Lindem is Liaison Contact in ospf +# { "role_name": "liaison_contact", "person_id": 108123, "group_id": 1819, "email": "Gabor.Bajko@nokia.com" }, # Gabor Bajko is Liaison Contact in paws +# { "role_name": "liaison_contact", "person_id": 106987, "group_id": 1819, "email": "br@brianrosen.net" }, # Brian Rosen is Liaison Contact in paws +# { "role_name": "liaison_cc_contact", "person_id": 122823, "group_id": 1630, "email": "ketant.ietf@gmail.com" }, # Ketan Talaulikar is Liaison CC Contact in pce +# { "role_name": "liaison_contact", "person_id": 125031, "group_id": 1630, "email": "andrew.stone@nokia.com" }, # Andrew Stone is Liaison Contact in pce +# { "role_name": "liaison_contact", "person_id": 108213, "group_id": 1630, "email": "julien.meuric@orange.com" }, # Julien Meuric is Liaison Contact in pce +# { "role_name": "liaison_contact", "person_id": 111477, "group_id": 1630, "email": "dd@dhruvdhody.com" }, # Dhruv Dhody is Liaison Contact in pce +# { "role_name": "liaison_contact", "person_id": 112773, "group_id": 1701, "email": "lars.eggert@nokia.com" }, # Lars Eggert is Liaison Contact in pcn +# { "role_name": "liaison_contact", "person_id": 12671, "group_id": 1437, "email": "adamson@itd.nrl.navy.mil" }, # Brian Adamson is Liaison Contact in rmt +# { "role_name": "liaison_contact", "person_id": 100609, "group_id": 1437, "email": "lorenzo@vicisano.net" }, # Lorenzo Vicisano is Liaison Contact in rmt +# { "role_name": "liaison_contact", "person_id": 115213, "group_id": 1730, "email": "maria.ines.robles@ericsson.com" }, # Ines Robles is Liaison Contact in roll +# { "role_name": "liaison_contact", "person_id": 110721, "group_id": 1820, "email": "ted.ietf@gmail.com" }, # Ted Hardie is Liaison Contact in rtcweb +# { "role_name": "liaison_contact", "person_id": 104294, "group_id": 1820, "email": "magnus.westerlund@ericsson.com" }, # Magnus Westerlund is Liaison Contact in rtcweb +# { "role_name": "liaison_contact", "person_id": 105791, "group_id": 1820, "email": "fluffy@iii.ca" }, # Cullen Jennings is Liaison Contact in rtcweb +# { "role_name": "liaison_contact", "person_id": 105906, "group_id": 1910, "email": "james.n.guichard@futurewei.com" }, # Jim Guichard is Liaison Contact in sfc +# { "role_name": "liaison_contact", "person_id": 3862, "group_id": 1910, "email": "jmh@joelhalpern.com" }, # Joel Halpern is Liaison Contact in sfc +# { "role_name": "liaison_contact", "person_id": 127960, "group_id": 1462, "email": "sipcore@ietf.org" }, # sipcore@ietf.org is Liaison Contact in sip +# { "role_name": "liaison_contact", "person_id": 103769, "group_id": 1762, "email": "adam@nostrum.com" }, # Adam Roach is Liaison Contact in sipcore +# { "role_name": "liaison_contact", "person_id": 108554, "group_id": 1762, "email": "pkyzivat@alum.mit.edu" }, # Paul Kyzivat is Liaison Contact in sipcore +# { "role_name": "liaison_contact", "person_id": 103539, "group_id": 1542, "email": "gonzalo.camarillo@ericsson.com" }, # Gonzalo Camarillo is Liaison Contact in sipping +# { "role_name": "liaison_contact", "person_id": 103612, "group_id": 1542, "email": "jf.mule@cablelabs.com" }, # Jean-Francois Mule is Liaison Contact in sipping +# { "role_name": "liaison_contact", "person_id": 3862, "group_id": 1905, "email": "jmh@joelhalpern.com" }, # Joel Halpern is Liaison Contact in spring +# { "role_name": "liaison_contact", "person_id": 109802, "group_id": 1905, "email": "aretana.ietf@gmail.com" }, # Alvaro Retana is Liaison Contact in spring +# { "role_name": "liaison_contact", "person_id": 107172, "group_id": 1905, "email": "bruno.decraene@orange.com" }, # Bruno Decraene is Liaison Contact in spring +# { "role_name": "liaison_contact", "person_id": 5376, "group_id": 1899, "email": "housley@vigilsec.com" }, # Russ Housley is Liaison Contact in stir +# { "role_name": "liaison_contact", "person_id": 103961, "group_id": 1899, "email": "rjsparks@nostrum.com" }, # Robert Sparks is Liaison Contact in stir +# { "role_name": "liaison_contact", "person_id": 117430, "group_id": 1940, "email": "admin@trustedcomputinggroup.org" }, # Lindsay Adamson is Liaison Contact in tcg +# { "role_name": "liaison_contact", "person_id": 110932, "group_id": 1985, "email": "oscar.gonzalezdedios@telefonica.com" }, # Oscar de Dios is Liaison Contact in teas +# { "role_name": "liaison_contact", "person_id": 10064, "group_id": 1985, "email": "lberger@labn.net" }, # Lou Berger is Liaison Contact in teas +# { "role_name": "liaison_contact", "person_id": 114351, "group_id": 1985, "email": "vbeeram@juniper.net" }, # Vishnu Beeram is Liaison Contact in teas +# { "role_name": "liaison_contact", "person_id": 117422, "group_id": 1934, "email": "j.hietala@opengroup.org" }, # Jim Hietala is Liaison Contact in the-open-group +# { "role_name": "liaison_contact", "person_id": 106414, "group_id": 1709, "email": "yaakovjstein@gmail.com" }, # Yaakov Stein is Liaison Contact in tictoc +# { "role_name": "liaison_contact", "person_id": 4857, "group_id": 1709, "email": "kodonog@pobox.com" }, # Karen O'Donoghue is Liaison Contact in tictoc +# { "role_name": "liaison_contact", "person_id": 144713, "group_id": 2420, "email": "liaisons@tmforum.org" }, # liaisons@tmforum.org is Liaison Contact in tmforum +# { "role_name": "liaison_contact", "person_id": 112773, "group_id": 1324, "email": "lars@eggert.org" }, # Lars Eggert is Liaison Contact in tsv +# { "role_name": "liaison_contact", "person_id": 112104, "group_id": 53, "email": "rick@unicode.org" }, # Rick McGowan is Liaison Contact in unicode +# { "role_name": "liaison_contact", "person_id": 105907, "group_id": 1864, "email": "stpeter@stpeter.im" }, # Peter Saint-Andre is Liaison Contact in videocodec +# { "role_name": "liaison_contact", "person_id": 120261, "group_id": 54, "email": "wseltzer@w3.org" }, # Wendy Seltzer is Liaison Contact in w3c +# { "role_name": "liaison_contact", "person_id": 112103, "group_id": 54, "email": "plh@w3.org" }, # Philippe Le Hégaret is Liaison Contact in w3c +# { "role_name": "liaison_contact", "person_id": 107520, "group_id": 1957, "email": "shida@ntt-at.com" }, # Shida Schubert is Liaison Contact in webpush +# { "role_name": "liaison_contact", "person_id": 110049, "group_id": 1957, "email": "jhildebr@cisco.com" }, # Joe Hildebrand is Liaison Contact in webpush +# { "role_name": "liaison_contact", "person_id": 103769, "group_id": 1601, "email": "adam@nostrum.com" }, # Adam Roach is Liaison Contact in xcon +# { "role_name": "liaison_contact", "person_id": 107520, "group_id": 1815, "email": "shida@ntt-at.com" }, # Shida Schubert is Liaison Contact in xrblock +# { "role_name": "liaison_contact", "person_id": 6699, "group_id": 1815, "email": "dromasca@avaya.com" }, # Dan Romascanu is Liaison Contact in xrblock +# ]} diff --git a/ietf/group/migrations/0007_used_roles.py b/ietf/group/migrations/0007_used_roles.py new file mode 100644 index 0000000000..0dfa79fa03 --- /dev/null +++ b/ietf/group/migrations/0007_used_roles.py @@ -0,0 +1,49 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +from django.db import migrations + + +def forward(apps, schema_editor): + Group = apps.get_model("group", "Group") + GroupFeatures = apps.get_model("group", "GroupFeatures") + iab = Group.objects.get(acronym="iab") + iab.used_roles = [ + "chair", + "delegate", + "exofficio", + "liaison", + "liaison_coordinator", + "member", + ] + iab.save() + GroupFeatures.objects.filter(type_id="ietf").update( + default_used_roles=[ + "ad", + "member", + "comdir", + "delegate", + "execdir", + "recman", + "secr", + "chair", + ] + ) + + +def reverse(apps, schema_editor): + Group = apps.get_model("group", "Group") + iab = Group.objects.get(acronym="iab") + iab.used_roles = [] + iab.save() + # Intentionally not putting trac-* back into grouptype ietf default_used_roles + + +class Migration(migrations.Migration): + dependencies = [ + ("group", "0006_remove_liason_contacts"), + ("name", "0018_alter_rolenames"), + ] + + operations = [ + migrations.RunPython(forward, reverse), + ] diff --git a/ietf/group/migrations/0008_alter_group_used_roles_and_more.py b/ietf/group/migrations/0008_alter_group_used_roles_and_more.py new file mode 100644 index 0000000000..28f345df00 --- /dev/null +++ b/ietf/group/migrations/0008_alter_group_used_roles_and_more.py @@ -0,0 +1,107 @@ +# Generated by Django 4.2.23 on 2025-08-15 16:46 + +from django.db import migrations, models +import ietf.group.models +import ietf.name.models +import ietf.utils.db +import ietf.utils.validators + + +class Migration(migrations.Migration): + + dependencies = [ + ("group", "0007_used_roles"), + ] + + operations = [ + migrations.AlterField( + model_name="group", + name="used_roles", + field=models.JSONField( + blank=True, + default=list, + help_text="Leave an empty list to get the group_type's default used roles", + max_length=256, + ), + ), + migrations.AlterField( + model_name="groupfeatures", + name="admin_roles", + field=ietf.utils.db.EmptyAwareJSONField( + default=ietf.group.models.default_admin_roles, max_length=64 + ), + ), + migrations.AlterField( + model_name="groupfeatures", + name="default_used_roles", + field=ietf.utils.db.EmptyAwareJSONField(default=list, max_length=256), + ), + migrations.AlterField( + model_name="groupfeatures", + name="docman_roles", + field=ietf.utils.db.EmptyAwareJSONField( + default=ietf.group.models.default_docman_roles, max_length=128 + ), + ), + migrations.AlterField( + model_name="groupfeatures", + name="groupman_authroles", + field=ietf.utils.db.EmptyAwareJSONField( + default=ietf.group.models.default_groupman_authroles, max_length=128 + ), + ), + migrations.AlterField( + model_name="groupfeatures", + name="groupman_roles", + field=ietf.utils.db.EmptyAwareJSONField( + default=ietf.group.models.default_groupman_roles, max_length=128 + ), + ), + migrations.AlterField( + model_name="groupfeatures", + name="material_types", + field=ietf.utils.db.EmptyAwareJSONField( + default=ietf.group.models.default_material_types, max_length=64 + ), + ), + migrations.AlterField( + model_name="groupfeatures", + name="matman_roles", + field=ietf.utils.db.EmptyAwareJSONField( + default=ietf.group.models.default_matman_roles, max_length=128 + ), + ), + migrations.AlterField( + model_name="groupfeatures", + name="role_order", + field=ietf.utils.db.EmptyAwareJSONField( + default=ietf.group.models.default_role_order, + help_text="The order in which roles are shown, for instance on photo pages. Enter valid JSON.", + max_length=128, + ), + ), + migrations.AlterField( + model_name="groupfeatures", + name="session_purposes", + field=ietf.utils.db.EmptyAwareJSONField( + default=list, + help_text="Allowed session purposes for this group type", + max_length=256, + validators=[ + ietf.utils.validators.JSONForeignKeyListValidator( + ietf.name.models.SessionPurposeName + ) + ], + ), + ), + migrations.AlterField( + model_name="grouphistory", + name="used_roles", + field=models.JSONField( + blank=True, + default=list, + help_text="Leave an empty list to get the group_type's default used roles", + max_length=256, + ), + ), + ] diff --git a/ietf/group/milestones.py b/ietf/group/milestones.py index 64ebb389e2..52f2eaebee 100644 --- a/ietf/group/milestones.py +++ b/ietf/group/milestones.py @@ -369,7 +369,7 @@ def save_milestone_form(f): email_milestones_changed(request, group, changes, states) if milestone_set == "charter": - return redirect('ietf.doc.views_doc.document_main', name=group.charter.canonical_name()) + return redirect('ietf.doc.views_doc.document_main', name=group.charter.name) else: return HttpResponseRedirect(group.about_url()) else: @@ -399,7 +399,7 @@ def save_milestone_form(f): can_change_uses_milestone_dates=can_change_uses_milestone_dates)) @login_required -def reset_charter_milestones(request, group_type, acronym): +def reset_charter_milestones(request, acronym, group_type=None): """Reset charter milestones to the currently in-use milestones.""" group = get_group_or_404(acronym, group_type) if not group.features.has_milestones: diff --git a/ietf/group/models.py b/ietf/group/models.py index 52549e8cc1..a7e3c6616e 100644 --- a/ietf/group/models.py +++ b/ietf/group/models.py @@ -3,7 +3,6 @@ import email.utils -import jsonfield import os import re @@ -21,7 +20,7 @@ AgendaTypeName, AgendaFilterTypeName, ExtResourceName, SessionPurposeName, AppealArtifactTypeName ) from ietf.person.models import Email, Person -from ietf.utils.db import IETFJSONField +from ietf.utils.db import EmptyAwareJSONField from ietf.utils.mail import formataddr, send_mail_text from ietf.utils import log from ietf.utils.models import ForeignKey, OneToOneField @@ -46,7 +45,7 @@ class GroupInfo(models.Model): unused_states = models.ManyToManyField('doc.State', help_text="Document states that have been disabled for the group.", blank=True) unused_tags = models.ManyToManyField(DocTagName, help_text="Document tags that have been disabled for the group.", blank=True) - used_roles = jsonfield.JSONField(max_length=256, blank=True, default=[], help_text="Leave an empty list to get the group_type's default used roles") + used_roles = models.JSONField(max_length=256, blank=True, default=list, help_text="Leave an empty list to get the group_type's default used roles") uses_milestone_dates = models.BooleanField(default=True) @@ -112,6 +111,9 @@ def active_wgs(self): def closed_wgs(self): return self.wgs().exclude(state__in=Group.ACTIVE_STATE_IDS) + def areas(self): + return self.get_queryset().filter(type="area") + def with_meetings(self): return self.get_queryset().filter(type__features__has_meetings=True) @@ -235,6 +237,36 @@ def chat_archive_url(self): ) +# JSONFields need callable defaults that work with migrations to avoid sharing +# data structures between instances. These helpers provide that. +def default_material_types(): + return ["slides"] + + +def default_admin_roles(): + return ["chair"] + + +def default_docman_roles(): + return ["ad", "chair", "delegate", "secr"] + + +def default_groupman_roles(): + return ["ad", "chair"] + + +def default_groupman_authroles(): + return ["Secretariat"] + + +def default_matman_roles(): + return ["ad", "chair", "delegate", "secr"] + + +def default_role_order(): + return ["chair", "secr", "member"] + + class GroupFeatures(models.Model): type = OneToOneField(GroupTypeName, primary_key=True, null=False, related_name='features') #history = HistoricalRecords() @@ -268,16 +300,16 @@ class GroupFeatures(models.Model): agenda_type = models.ForeignKey(AgendaTypeName, null=True, default="ietf", on_delete=CASCADE) about_page = models.CharField(max_length=64, blank=False, default="ietf.group.views.group_about" ) default_tab = models.CharField(max_length=64, blank=False, default="ietf.group.views.group_about" ) - material_types = IETFJSONField(max_length=64, accepted_empty_values=[[], {}], blank=False, default=["slides"]) - default_used_roles = IETFJSONField(max_length=256, accepted_empty_values=[[], {}], blank=False, default=[]) - admin_roles = IETFJSONField(max_length=64, accepted_empty_values=[[], {}], blank=False, default=["chair"]) # Trac Admin - docman_roles = IETFJSONField(max_length=128, accepted_empty_values=[[], {}], blank=False, default=["ad","chair","delegate","secr"]) - groupman_roles = IETFJSONField(max_length=128, accepted_empty_values=[[], {}], blank=False, default=["ad","chair",]) - groupman_authroles = IETFJSONField(max_length=128, accepted_empty_values=[[], {}], blank=False, default=["Secretariat",]) - matman_roles = IETFJSONField(max_length=128, accepted_empty_values=[[], {}], blank=False, default=["ad","chair","delegate","secr"]) - role_order = IETFJSONField(max_length=128, accepted_empty_values=[[], {}], blank=False, default=["chair","secr","member"], - help_text="The order in which roles are shown, for instance on photo pages. Enter valid JSON.") - session_purposes = IETFJSONField(max_length=256, accepted_empty_values=[[], {}], blank=False, default=[], + material_types = EmptyAwareJSONField(max_length=64, accepted_empty_values=[[], {}], blank=False, default=default_material_types) + default_used_roles = EmptyAwareJSONField(max_length=256, accepted_empty_values=[[], {}], blank=False, default=list) + admin_roles = EmptyAwareJSONField(max_length=64, accepted_empty_values=[[], {}], blank=False, default=default_admin_roles) # Trac Admin + docman_roles = EmptyAwareJSONField(max_length=128, accepted_empty_values=[[], {}], blank=False, default=default_docman_roles) + groupman_roles = EmptyAwareJSONField(max_length=128, accepted_empty_values=[[], {}], blank=False, default=default_groupman_roles) + groupman_authroles = EmptyAwareJSONField(max_length=128, accepted_empty_values=[[], {}], blank=False, default=default_groupman_authroles) + matman_roles = EmptyAwareJSONField(max_length=128, accepted_empty_values=[[], {}], blank=False, default=default_matman_roles) + role_order = EmptyAwareJSONField(max_length=128, accepted_empty_values=[[], {}], blank=False, default=default_role_order, + help_text="The order in which roles are shown, for instance on photo pages. Enter valid JSON.") + session_purposes = EmptyAwareJSONField(max_length=256, accepted_empty_values=[[], {}], blank=False, default=list, help_text="Allowed session purposes for this group type", validators=[JSONForeignKeyListValidator(SessionPurposeName)]) @@ -462,6 +494,8 @@ def notify_rfceditor_of_group_name_change(sender, instance=None, **kwargs): current = Group.objects.get(pk=instance.pk) except Group.DoesNotExist: return + if current.type_id == "sdo": + return addr = settings.RFC_EDITOR_GROUP_NOTIFICATION_EMAIL if addr and instance.name != current.name: msg = """ diff --git a/ietf/group/serializers.py b/ietf/group/serializers.py new file mode 100644 index 0000000000..e789ba46bf --- /dev/null +++ b/ietf/group/serializers.py @@ -0,0 +1,50 @@ +# Copyright The IETF Trust 2024-2026, All Rights Reserved +"""django-rest-framework serializers""" + +from drf_spectacular.utils import extend_schema_field +from rest_framework import serializers + +from ietf.person.models import Email +from .models import Group, Role + + +class GroupSerializer(serializers.ModelSerializer): + class Meta: + model = Group + fields = ["acronym", "name", "type", "list_email"] + + +class AreaDirectorSerializer(serializers.Serializer): + """Serialize an area director + + Works with Email or Role + """ + + name = serializers.SerializerMethodField() + email = serializers.SerializerMethodField() + + @extend_schema_field(serializers.CharField) + def get_name(self, instance: Email | Role): + person = getattr(instance, 'person', None) + return person.plain_name() if person else None + + @extend_schema_field(serializers.EmailField) + def get_email(self, instance: Email | Role): + if isinstance(instance, Role): + return instance.email.email_address() + return instance.email_address() + + +class AreaSerializer(serializers.ModelSerializer): + ads = serializers.SerializerMethodField() + + class Meta: + model = Group + fields = ["acronym", "name", "ads"] + + @extend_schema_field(AreaDirectorSerializer(many=True)) + def get_ads(self, area: Group): + return AreaDirectorSerializer( + area.ads if area.is_active else Role.objects.none(), + many=True, + ).data diff --git a/ietf/group/tasks.py b/ietf/group/tasks.py new file mode 100644 index 0000000000..ada83e80e2 --- /dev/null +++ b/ietf/group/tasks.py @@ -0,0 +1,232 @@ +# Copyright The IETF Trust 2024, All Rights Reserved +# +# Celery task definitions +# +import shutil + +from celery import shared_task +from pathlib import Path + +from django.conf import settings +from django.template.loader import render_to_string +from django.utils import timezone + +from ietf.doc.storage_utils import store_file +from ietf.liaisons.models import LiaisonStatement +from ietf.utils import log +from ietf.utils.test_runner import disable_coverage + +from .models import Group, GroupHistory +from .utils import fill_in_charter_info, fill_in_wg_drafts, fill_in_wg_roles, save_group_in_history +from .views import extract_last_name, roles + + +@shared_task +def generate_wg_charters_files_task(): + areas = Group.objects.filter(type="area", state="active").order_by("name") + groups = ( + Group.objects.filter(type="wg", state="active") + .exclude(parent=None) + .order_by("acronym") + ) + for group in groups: + fill_in_charter_info(group) + fill_in_wg_roles(group) + fill_in_wg_drafts(group) + for area in areas: + area.groups = [g for g in groups if g.parent_id == area.pk] + charter_path = Path(settings.CHARTER_PATH) + charters_file = charter_path / "1wg-charters.txt" + charters_file.write_text( + render_to_string("group/1wg-charters.txt", {"areas": areas}), + encoding="utf8", + ) + charters_by_acronym_file = charter_path / "1wg-charters-by-acronym.txt" + charters_by_acronym_file.write_text( + render_to_string("group/1wg-charters-by-acronym.txt", {"groups": groups}), + encoding="utf8", + ) + + with charters_file.open("rb") as f: + store_file("indexes", "1wg-charters.txt", f, allow_overwrite=True) + with charters_by_acronym_file.open("rb") as f: + store_file("indexes", "1wg-charters-by-acronym.txt", f, allow_overwrite=True) + + charter_copy_dests = [ + getattr(settings, "CHARTER_COPY_PATH", None), + getattr(settings, "CHARTER_COPY_OTHER_PATH", None), + getattr(settings, "CHARTER_COPY_THIRD_PATH", None), + ] + for charter_copy_dest in charter_copy_dests: + if charter_copy_dest is not None: + if not Path(charter_copy_dest).is_dir(): + log.log( + f"Error copying 1wg-charter files to {charter_copy_dest}: it does not exist or is not a directory" + ) + else: + try: + shutil.copy2(charters_file, charter_copy_dest) + except IOError as err: + log.log(f"Error copying {charters_file} to {charter_copy_dest}: {err}") + try: + shutil.copy2(charters_by_acronym_file, charter_copy_dest) + except IOError as err: + log.log( + f"Error copying {charters_by_acronym_file} to {charter_copy_dest}: {err}" + ) + + +@shared_task +def generate_wg_summary_files_task(): + # Active WGs (all should have a parent, but filter to be sure) + groups = ( + Group.objects.filter(type="wg", state="active") + .exclude(parent=None) + .order_by("acronym") + ) + # Augment groups with chairs list + for group in groups: + group.chairs = sorted(roles(group, "chair"), key=extract_last_name) + + # Active areas with one or more active groups in them + areas = Group.objects.filter( + type="area", + state="active", + group__in=groups, + ).distinct().order_by("name") + # Augment areas with their groups + for area in areas: + area.groups = [g for g in groups if g.parent_id == area.pk] + summary_path = Path(settings.GROUP_SUMMARY_PATH) + summary_file = summary_path / "1wg-summary.txt" + summary_file.write_text( + render_to_string("group/1wg-summary.txt", {"areas": areas}), + encoding="utf8", + ) + summary_by_acronym_file = summary_path / "1wg-summary-by-acronym.txt" + summary_by_acronym_file.write_text( + render_to_string( + "group/1wg-summary-by-acronym.txt", + {"areas": areas, "groups": groups}, + ), + encoding="utf8", + ) + + with summary_file.open("rb") as f: + store_file("indexes", "1wg-summary.txt", f, allow_overwrite=True) + with summary_by_acronym_file.open("rb") as f: + store_file("indexes", "1wg-summary-by-acronym.txt", f, allow_overwrite=True) + +@shared_task +@disable_coverage() +def run_once_adjust_liaison_groups(): # pragma: no cover + log.log("Starting run_once_adjust_liaison_groups") + if all( + [ + Group.objects.filter( + acronym__in=[ + "3gpp-tsg-ct", + "3gpp-tsg-ran-wg1", + "3gpp-tsg-ran-wg4", + "3gpp-tsg-sa", + "3gpp-tsg-sa-wg5", + "3gpp-tsgct", # duplicates 3gpp-tsg-ct above already + "3gpp-tsgct-ct1", # will normalize all acronyms to hyphenated form + "3gpp-tsgct-ct3", # and consistently match the name + "3gpp-tsgct-ct4", # (particularly use of WG) + "3gpp-tsgran", + "3gpp-tsgran-ran2", + "3gpp-tsgsa", # duplicates 3gpp-tsg-sa above + "3gpp-tsgsa-sa2", # will normalize + "3gpp-tsgsa-sa3", + "3gpp-tsgsa-sa4", + "3gpp-tsgt-wg2", + ] + ).count() + == 16, + not Group.objects.filter( + acronym__in=[ + "3gpp-tsg-ran-wg3", + "3gpp-tsg-ct-wg1", + "3gpp-tsg-ct-wg3", + "3gpp-tsg-ct-wg4", + "3gpp-tsg-ran", + "3gpp-tsg-ran-wg2", + "3gpp-tsg-sa-wg2", + "3gpp-tsg-sa-wg3", + "3gpp-tsg-sa-wg4", + "3gpp-tsg-t-wg2", + ] + ).exists(), + Group.objects.filter(acronym="o3gpptsgran3").exists(), + not LiaisonStatement.objects.filter( + to_groups__acronym__in=["3gpp-tsgct", "3gpp-tsgsa"] + ).exists(), + not LiaisonStatement.objects.filter( + from_groups__acronym="3gpp-tsgct" + ).exists(), + LiaisonStatement.objects.filter(from_groups__acronym="3gpp-tsgsa").count() + == 1, + LiaisonStatement.objects.get(from_groups__acronym="3gpp-tsgsa").pk == 1448, + ] + ): + for old_acronym, new_acronym, new_name in ( + ("o3gpptsgran3", "3gpp-tsg-ran-wg3", "3GPP TSG RAN WG3"), + ("3gpp-tsgct-ct1", "3gpp-tsg-ct-wg1", "3GPP TSG CT WG1"), + ("3gpp-tsgct-ct3", "3gpp-tsg-ct-wg3", "3GPP TSG CT WG3"), + ("3gpp-tsgct-ct4", "3gpp-tsg-ct-wg4", "3GPP TSG CT WG4"), + ("3gpp-tsgran", "3gpp-tsg-ran", "3GPP TSG RAN"), + ("3gpp-tsgran-ran2", "3gpp-tsg-ran-wg2", "3GPP TSG RAN WG2"), + ("3gpp-tsgsa-sa2", "3gpp-tsg-sa-wg2", "3GPP TSG SA WG2"), + ("3gpp-tsgsa-sa3", "3gpp-tsg-sa-wg3", "3GPP TSG SA WG3"), + ("3gpp-tsgsa-sa4", "3gpp-tsg-sa-wg4", "3GPP TSG SA WG4"), + ("3gpp-tsgt-wg2", "3gpp-tsg-t-wg2", "3GPP TSG T WG2"), + ): + group = Group.objects.get(acronym=old_acronym) + save_group_in_history(group) + group.time = timezone.now() + group.acronym = new_acronym + group.name = new_name + if old_acronym.startswith("3gpp-tsgct-"): + group.parent = Group.objects.get(acronym="3gpp-tsg-ct") + elif old_acronym.startswith("3gpp-tsgsa-"): + group.parent = Group.objects.get(acronym="3gpp-tsg-sa") + group.save() + group.groupevent_set.create( + time=group.time, + by_id=1, # (System) + type="info_changed", + desc=f"acronym changed from {old_acronym} to {new_acronym}, name set to {new_name}", + ) + + for acronym, new_name in (("3gpp-tsg-ct", "3GPP TSG CT"),): + group = Group.objects.get(acronym=acronym) + save_group_in_history(group) + group.time = timezone.now() + group.name = new_name + group.save() + group.groupevent_set.create( + time=group.time, + by_id=1, # (System) + type="info_changed", + desc=f"name set to {new_name}", + ) + + ls = LiaisonStatement.objects.get(pk=1448) + ls.from_groups.remove(Group.objects.get(acronym="3gpp-tsgsa")) + ls.from_groups.add(Group.objects.get(acronym="3gpp-tsg-sa")) + + # Rewriting history to effectively merge the histories of the duplicate groups + GroupHistory.objects.filter(parent__acronym="3gpp-tsgsa").update( + parent=Group.objects.get(acronym="3gpp-tsg-sa") + ) + GroupHistory.objects.filter(parent__acronym="3gpp-tsgct").update( + parent=Group.objects.get(acronym="3gpp-tsg-ct") + ) + + deleted = Group.objects.filter( + acronym__in=["3gpp-tsgsa", "3gpp-tsgct"] + ).delete() + log.log(f"Deleted Groups: {deleted}") + else: + log.log("* Refusing to continue as preconditions have changed") diff --git a/ietf/group/templatetags/group_filters.py b/ietf/group/templatetags/group_filters.py index c9481b767b..bf2ad71949 100644 --- a/ietf/group/templatetags/group_filters.py +++ b/ietf/group/templatetags/group_filters.py @@ -37,3 +37,10 @@ def role_person_link(role, **kwargs): plain_name = role.person.plain_name() email = role.email.address return {'name': name, 'plain_name': plain_name, 'email': email, 'title': title, 'class': cls} + +@register.filter +def name_with_conditional_acronym(group): + if group.type_id in ("sdo", "isoc", "individ", "nomcom", "ietf", "irtf", ): + return group.name + else: + return f"{group.name} ({group.acronym})" diff --git a/ietf/group/tests.py b/ietf/group/tests.py index 233cde55e6..229744388c 100644 --- a/ietf/group/tests.py +++ b/ietf/group/tests.py @@ -1,15 +1,10 @@ # Copyright The IETF Trust 2013-2020, All Rights Reserved # -*- coding: utf-8 -*- -import io -import os import datetime import json +from unittest import mock -from tempfile import NamedTemporaryFile - -from django.core.management import call_command -from django.conf import settings from django.urls import reverse as urlreverse from django.db.models import Q from django.test import Client @@ -20,10 +15,17 @@ from ietf.doc.factories import DocumentFactory, WgDraftFactory, EditorialDraftFactory from ietf.doc.models import DocEvent, RelatedDocument, Document from ietf.group.models import Role, Group -from ietf.group.utils import get_group_role_emails, get_child_group_role_emails, get_group_ad_emails +from ietf.group.utils import ( + get_group_role_emails, + get_child_group_role_emails, + get_group_ad_emails, + get_group_email_aliases, + GroupAliasGenerator, + role_holder_emails, +) from ietf.group.factories import GroupFactory, RoleFactory from ietf.person.factories import PersonFactory, EmailFactory -from ietf.person.models import Person +from ietf.person.models import Email, Person from ietf.utils.test_utils import login_testing_unauthorized, TestCase class StreamTests(TestCase): @@ -63,13 +65,60 @@ def test_stream_edit(self): self.assertTrue(Role.objects.filter(name="delegate", group__acronym=stream_acronym, email__address="ad2@ietf.org")) +class GroupLeadershipTests(TestCase): + def test_leadership_wg(self): + # setup various group states + bof_role = RoleFactory( + group__type_id="wg", group__state_id="bof", name_id="chair" + ) + proposed_role = RoleFactory( + group__type_id="wg", group__state_id="proposed", name_id="chair" + ) + active_role = RoleFactory( + group__type_id="wg", group__state_id="active", name_id="chair" + ) + conclude_role = RoleFactory( + group__type_id="wg", group__state_id="conclude", name_id="chair" + ) + url = urlreverse( + "ietf.group.views.group_leadership", kwargs={"group_type": "wg"} + ) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + self.assertContains(r, "Group Leadership") + self.assertContains(r, bof_role.person.last_name()) + self.assertContains(r, proposed_role.person.last_name()) + self.assertContains(r, active_role.person.last_name()) + self.assertNotContains(r, conclude_role.person.last_name()) + + def test_leadership_wg_csv(self): + url = urlreverse( + "ietf.group.views.group_leadership_csv", kwargs={"group_type": "wg"} + ) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + self.assertEqual(r["Content-Type"], "text/csv") + self.assertContains(r, "Chairman, Sops") + + def test_leadership_rg(self): + role = RoleFactory(group__type_id="rg", name_id="chair") + url = urlreverse( + "ietf.group.views.group_leadership", kwargs={"group_type": "rg"} + ) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + self.assertContains(r, "Group Leadership") + self.assertContains(r, role.person.last_name()) + self.assertNotContains(r, "Chairman, Sops") + + class GroupStatsTests(TestCase): def setUp(self): super().setUp() a = WgDraftFactory() b = WgDraftFactory() RelatedDocument.objects.create( - source=a, target=b.docalias.first(), relationship_id="refnorm" + source=a, target=b, relationship_id="refnorm" ) def test_group_stats(self): @@ -95,7 +144,7 @@ def setUp(self): a = WgDraftFactory() b = WgDraftFactory() RelatedDocument.objects.create( - source=a, target=b.docalias.first(), relationship_id="refnorm" + source=a, target=b, relationship_id="refnorm" ) def test_group_document_dependencies(self): @@ -128,25 +177,13 @@ def test_group_document_dependencies(self): class GenerateGroupAliasesTests(TestCase): - def setUp(self): - super().setUp() - self.doc_aliases_file = NamedTemporaryFile(delete=False, mode='w+') - self.doc_aliases_file.close() - self.doc_virtual_file = NamedTemporaryFile(delete=False, mode='w+') - self.doc_virtual_file.close() - self.saved_draft_aliases_path = settings.GROUP_ALIASES_PATH - self.saved_draft_virtual_path = settings.GROUP_VIRTUAL_PATH - settings.GROUP_ALIASES_PATH = self.doc_aliases_file.name - settings.GROUP_VIRTUAL_PATH = self.doc_virtual_file.name - - def tearDown(self): - settings.GROUP_ALIASES_PATH = self.saved_draft_aliases_path - settings.GROUP_VIRTUAL_PATH = self.saved_draft_virtual_path - os.unlink(self.doc_aliases_file.name) - os.unlink(self.doc_virtual_file.name) - super().tearDown() - - def testManagementCommand(self): + def test_generator_class(self): + """The GroupAliasGenerator should generate the same lists as the old mgmt cmd""" + # clean out test fixture group roles we don't need for this test + Role.objects.filter( + group__acronym__in=["farfut", "iab", "ietf", "irtf", "ise", "ops", "rsab", "rsoc", "sops"] + ).delete() + a_month_ago = timezone.now() - datetime.timedelta(30) a_decade_ago = timezone.now() - datetime.timedelta(3650) role1 = RoleFactory(name_id='ad', group__type_id='area', group__acronym='myth', group__state_id='active') @@ -163,12 +200,11 @@ def testManagementCommand(self): recent = GroupFactory(type_id='wg', acronym='recent', parent=area, state_id='conclude', time=a_month_ago) recentchair = PersonFactory(user__username='recentchair') recent.role_set.create(name_id='chair', person=recentchair, email=recentchair.email()) - wayold = GroupFactory(type_id='wg', acronym='recent', parent=area, state_id='conclude', time=a_decade_ago) + wayold = GroupFactory(type_id='wg', acronym='wayold', parent=area, state_id='conclude', time=a_decade_ago) wayoldchair = PersonFactory(user__username='wayoldchair') wayold.role_set.create(name_id='chair', person=wayoldchair, email=wayoldchair.email()) - role2 = RoleFactory(name_id='ad', group__type_id='area', group__acronym='done', group__state_id='conclude') - done = role2.group - done_ad = role2.person + # create a "done" group that should not be included anywhere + RoleFactory(name_id='ad', group__type_id='area', group__acronym='done', group__state_id='conclude') irtf = Group.objects.get(acronym='irtf') testrg = GroupFactory(type_id='rg', acronym='testrg', parent=irtf) testrgchair = PersonFactory(user__username='testrgchair') @@ -176,77 +212,88 @@ def testManagementCommand(self): testrag = GroupFactory(type_id='rg', acronym='testrag', parent=irtf) testragchair = PersonFactory(user__username='testragchair') testrag.role_set.create(name_id='chair', person=testragchair, email=testragchair.email()) - individual = PersonFactory() - - args = [ ] - kwargs = { } - out = io.StringIO() - call_command("generate_group_aliases", *args, **kwargs, stdout=out, stderr=out) - self.assertFalse(out.getvalue()) - - with open(settings.GROUP_ALIASES_PATH) as afile: - acontent = afile.read() - self.assertTrue('xfilter-' + area.acronym + '-ads' in acontent) - self.assertTrue('xfilter-' + area.acronym + '-chairs' in acontent) - self.assertTrue('xfilter-' + mars.acronym + '-ads' in acontent) - self.assertTrue('xfilter-' + mars.acronym + '-chairs' in acontent) - self.assertTrue('xfilter-' + ames.acronym + '-ads' in acontent) - self.assertTrue('xfilter-' + ames.acronym + '-chairs' in acontent) - self.assertTrue(all([x in acontent for x in [ - 'xfilter-' + area.acronym + '-ads', - 'xfilter-' + area.acronym + '-chairs', - 'xfilter-' + mars.acronym + '-ads', - 'xfilter-' + mars.acronym + '-chairs', - 'xfilter-' + ames.acronym + '-ads', - 'xfilter-' + ames.acronym + '-chairs', - 'xfilter-' + recent.acronym + '-ads', - 'xfilter-' + recent.acronym + '-chairs', - ]])) - self.assertFalse(all([x in acontent for x in [ - 'xfilter-' + done.acronym + '-ads', - 'xfilter-' + done.acronym + '-chairs', - 'xfilter-' + wayold.acronym + '-ads', - 'xfilter-' + wayold.acronym + '-chairs', - ]])) - - with open(settings.GROUP_VIRTUAL_PATH) as vfile: - vcontent = vfile.read() - self.assertTrue(all([x in vcontent for x in [ - ad.email_address(), - marschair.email_address(), - marssecr.email_address(), - ameschair.email_address(), - recentchair.email_address(), - testrgchair.email_address(), - testragchair.email_address(), - ]])) - self.assertFalse(all([x in vcontent for x in [ - done_ad.email_address(), - wayoldchair.email_address(), - individual.email_address(), - ]])) - self.assertTrue(all([x in vcontent for x in [ - 'xfilter-' + area.acronym + '-ads', - 'xfilter-' + area.acronym + '-chairs', - 'xfilter-' + mars.acronym + '-ads', - 'xfilter-' + mars.acronym + '-chairs', - 'xfilter-' + ames.acronym + '-ads', - 'xfilter-' + ames.acronym + '-chairs', - 'xfilter-' + recent.acronym + '-ads', - 'xfilter-' + recent.acronym + '-chairs', - 'xfilter-' + testrg.acronym + '-chairs', - 'xfilter-' + testrag.acronym + '-chairs', - testrg.acronym + '-chairs@ietf.org', - testrg.acronym + '-chairs@irtf.org', - testrag.acronym + '-chairs@ietf.org', - testrag.acronym + '-chairs@irtf.org', - ]])) - self.assertFalse(all([x in vcontent for x in [ - 'xfilter-' + done.acronym + '-ads', - 'xfilter-' + done.acronym + '-chairs', - 'xfilter-' + wayold.acronym + '-ads', - 'xfilter-' + wayold.acronym + '-chairs', - ]])) + + output = [(alias, (domains, alist)) for alias, domains, alist in GroupAliasGenerator()] + alias_dict = dict(output) + self.maxDiff = None + self.assertEqual(len(alias_dict), len(output)) # no duplicate aliases + expected_dict = { + area.acronym + "-ads": (["ietf"], [ad.email_address()]), + area.acronym + "-chairs": (["ietf"], [ad.email_address(), marschair.email_address(), marssecr.email_address(), ameschair.email_address()]), + mars.acronym + "-ads": (["ietf"], [ad.email_address()]), + mars.acronym + "-chairs": (["ietf"], [marschair.email_address(), marssecr.email_address()]), + ames.acronym + "-ads": (["ietf"], [ad.email_address()]), + ames.acronym + "-chairs": (["ietf"], [ameschair.email_address()]), + recent.acronym + "-ads": (["ietf"], [ad.email_address()]), + recent.acronym + "-chairs": (["ietf"], [recentchair.email_address()]), + testrg.acronym + "-chairs": (["ietf", "irtf"], [testrgchair.email_address()]), + testrag.acronym + "-chairs": (["ietf", "irtf"], [testragchair.email_address()]), + } + # Sort lists for comparison + self.assertEqual( + {k: (sorted(doms), sorted(addrs)) for k, (doms, addrs) in alias_dict.items()}, + {k: (sorted(doms), sorted(addrs)) for k, (doms, addrs) in expected_dict.items()}, + ) + + @mock.patch("ietf.group.utils.GroupAliasGenerator") + def test_get_group_email_aliases(self, mock_alias_gen_cls): + GroupFactory(name="agroup", type_id="rg") + GroupFactory(name="bgroup") + GroupFactory(name="cgroup", type_id="rg") + GroupFactory(name="dgroup") + + mock_alias_gen_cls.return_value = [ + ("bgroup-chairs", ["ietf"], ["c1@example.com", "c2@example.com"]), + ("agroup-ads", ["ietf", "irtf"], ["ad@example.com"]), + ("bgroup-ads", ["ietf"], ["ad@example.com"]), + ] + # order is important - should be by acronym, otherwise left in order returned by generator + self.assertEqual( + get_group_email_aliases(None, None), + [ + { + "acronym": "agroup", + "alias_type": "-ads", + "expansion": "ad@example.com", + }, + { + "acronym": "bgroup", + "alias_type": "-chairs", + "expansion": "c1@example.com, c2@example.com", + }, + { + "acronym": "bgroup", + "alias_type": "-ads", + "expansion": "ad@example.com", + }, + ], + ) + self.assertQuerySetEqual( + mock_alias_gen_cls.call_args[0][0], + Group.objects.all(), + ordered=False, + ) + + # test other parameter combinations but we already checked that the alias generator's + # output will be passed through, so don't re-test the processing + get_group_email_aliases("agroup", None) + self.assertQuerySetEqual( + mock_alias_gen_cls.call_args[0][0], + Group.objects.filter(acronym="agroup"), + ordered=False, + ) + get_group_email_aliases(None, "wg") + self.assertQuerySetEqual( + mock_alias_gen_cls.call_args[0][0], + Group.objects.filter(type_id="wg"), + ordered=False, + ) + get_group_email_aliases("agroup", "wg") + self.assertQuerySetEqual( + mock_alias_gen_cls.call_args[0][0], + Group.objects.none(), + ordered=False, + ) class GroupRoleEmailTests(TestCase): @@ -286,3 +333,41 @@ def test_group_ad_emails(self): self.assertGreater(len(emails), 0) for item in emails: self.assertIn('@', item) + + def test_role_holder_emails(self): + # The test fixtures create a bunch of addresses that pollute this test's results - disable them + Email.objects.update(active=False) + + role_holders = [ + RoleFactory(name_id="member", group__type_id=gt).person + for gt in [ + "ag", + "area", + "dir", + "iab", + "ietf", + "irtf", + "nomcom", + "rg", + "team", + "wg", + "rag", + ] + ] + # Expect an additional active email to be included + EmailFactory( + person=role_holders[0], + active=True, + ) + # Do not expect an inactive email to be included + EmailFactory( + person=role_holders[1], + active=False, + ) + # Do not expect address on a role-holder for a different group type + RoleFactory(name_id="member", group__type_id="adhoc") # arbitrary type not in the of-interest list + + self.assertCountEqual( + role_holder_emails(), + Email.objects.filter(active=True, person__in=role_holders), + ) diff --git a/ietf/group/tests_info.py b/ietf/group/tests_info.py index 136e195494..3f24e2e3d6 100644 --- a/ietf/group/tests_info.py +++ b/ietf/group/tests_info.py @@ -1,22 +1,23 @@ -# Copyright The IETF Trust 2009-2023, All Rights Reserved +# Copyright The IETF Trust 2009-2024, All Rights Reserved # -*- coding: utf-8 -*- -import os import calendar import datetime import io import bleach +from unittest import mock -from unittest.mock import patch +from unittest.mock import call, patch from pathlib import Path from pyquery import PyQuery -from tempfile import NamedTemporaryFile import debug # pyflakes:ignore from django.conf import settings +from django.http import Http404, HttpResponse from django.test import RequestFactory +from django.test.utils import override_settings from django.urls import reverse as urlreverse from django.urls import NoReverseMatch from django.utils import timezone @@ -26,14 +27,17 @@ from ietf.community.models import CommunityList from ietf.community.utils import reset_name_contains_index_for_rule -from ietf.doc.factories import WgDraftFactory, IndividualDraftFactory, CharterFactory, BallotDocEventFactory -from ietf.doc.models import Document, DocAlias, DocEvent, State +from ietf.doc.factories import WgDraftFactory, RgDraftFactory, IndividualDraftFactory, CharterFactory, BallotDocEventFactory +from ietf.doc.models import Document, DocEvent, State +from ietf.doc.storage_utils import retrieve_str from ietf.doc.utils_charter import charter_name_for_group from ietf.group.admin import GroupForm as AdminGroupForm from ietf.group.factories import (GroupFactory, RoleFactory, GroupEventFactory, DatedGroupMilestoneFactory, DatelessGroupMilestoneFactory) from ietf.group.forms import GroupForm from ietf.group.models import Group, GroupEvent, GroupMilestone, GroupStateTransitions, Role +from ietf.group.tasks import generate_wg_charters_files_task, generate_wg_summary_files_task +from ietf.group.views import response_from_file from ietf.group.utils import save_group_in_history, setup_default_community_list_for_group from ietf.meeting.factories import SessionFactory from ietf.name.models import DocTagName, GroupStateName, GroupTypeName, ExtResourceName, RoleName @@ -56,7 +60,13 @@ def pklist(docs): return [ str(doc.pk) for doc in docs.all() ] class GroupPagesTests(TestCase): - settings_temp_path_overrides = TestCase.settings_temp_path_overrides + ['CHARTER_PATH'] + settings_temp_path_overrides = TestCase.settings_temp_path_overrides + [ + "CHARTER_PATH", + "CHARTER_COPY_PATH", + "CHARTER_COPY_OTHER_PATH", # Note: not explicitly testing use of + "CHARTER_COPY_THIRD_PATH", # either of these settings + "GROUP_SUMMARY_PATH", + ] def test_active_groups(self): area = GroupFactory.create(type_id='area') @@ -71,7 +81,7 @@ def test_active_groups(self): self.assertContains(r, group.name) self.assertContains(r, escape(group.ad_role().person.name)) - for t in ('rg','area','ag', 'rag', 'dir','review','team','program','iabasg','adm','rfcedtyp'): # See issue 5120 + for t in ('rg','area','ag', 'rag', 'dir','review','team','program','iabasg','iabworkshop','adm','rfcedtyp'): # See issue 5120 g = GroupFactory.create(type_id=t,state_id='active') if t in ['dir','review']: g.parent = GroupFactory.create(type_id='area',state_id='active') @@ -80,6 +90,12 @@ def test_active_groups(self): r = self.client.get(url) self.assertEqual(r.status_code, 200) self.assertContains(r, g.acronym) + if t == "area": + q = PyQuery(r.content) + wg_url = urlreverse("ietf.group.views.active_groups", kwargs=dict(group_type="wg")) + href = f"{wg_url}#{g.acronym.upper()}" + self.assertEqual(q(f"h2#id-{g.acronym} a").attr("href"), href) + self.assertEqual(q(f'h2#id-{g.acronym} a[href="{href}"]').text(), f"({g.acronym.upper()})") url = urlreverse('ietf.group.views.active_groups', kwargs=dict()) r = self.client.get(url) @@ -87,7 +103,7 @@ def test_active_groups(self): self.assertContains(r, "Directorate") self.assertContains(r, "AG") - for slug in GroupTypeName.objects.exclude(slug__in=['wg','rg','ag','rag','area','dir','review','team','program','adhoc','ise','adm','iabasg','rfcedtyp', 'edwg', 'edappr']).values_list('slug',flat=True): + for slug in GroupTypeName.objects.exclude(slug__in=['wg','rg','ag','rag','area','dir','review','team','program','adhoc','ise','adm','iabasg','iabworkshop','rfcedtyp', 'edwg', 'edappr']).values_list('slug',flat=True): with self.assertRaises(NoReverseMatch): url=urlreverse('ietf.group.views.active_groups', kwargs=dict(group_type=slug)) @@ -110,48 +126,204 @@ def test_group_home(self): self.assertContains(r, draft.name) self.assertContains(r, draft.title) - def test_wg_summaries(self): - group = CharterFactory(group__type_id='wg',group__parent=GroupFactory(type_id='area')).group - RoleFactory(group=group,name_id='chair',person=PersonFactory()) - RoleFactory(group=group,name_id='ad',person=PersonFactory()) + def test_response_from_file(self): + # n.b., GROUP_SUMMARY_PATH is a temp dir that will be cleaned up automatically + fp = Path(settings.GROUP_SUMMARY_PATH) / "some-file.txt" + fp.write_text("This is a charters file with an é") + r = response_from_file(fp) + self.assertEqual(r.status_code, 200) + self.assertEqual(r.headers["Content-Type"], "text/plain; charset=utf-8") + self.assertEqual(r.content.decode("utf8"), "This is a charters file with an é") + # now try with a nonexistent file + fp.unlink() + with self.assertRaises(Http404): + response_from_file(fp) + + @patch("ietf.group.views.response_from_file") + def test_wg_summary_area(self, mock): + r = self.client.get( + urlreverse("ietf.group.views.wg_summary_area", kwargs={"group_type": "rg"}) + ) # not wg + self.assertEqual(r.status_code, 404) + self.assertFalse(mock.called) + mock.return_value = HttpResponse("yay") + r = self.client.get( + urlreverse("ietf.group.views.wg_summary_area", kwargs={"group_type": "wg"}) + ) + self.assertEqual(r.status_code, 200) + self.assertEqual(r.content.decode(), "yay") + self.assertEqual(mock.call_args, call(Path(settings.GROUP_SUMMARY_PATH) / "1wg-summary.txt")) + + @patch("ietf.group.views.response_from_file") + def test_wg_summary_acronym(self, mock): + r = self.client.get( + urlreverse( + "ietf.group.views.wg_summary_acronym", kwargs={"group_type": "rg"} + ) + ) # not wg + self.assertEqual(r.status_code, 404) + self.assertFalse(mock.called) + mock.return_value = HttpResponse("yay") + r = self.client.get( + urlreverse( + "ietf.group.views.wg_summary_acronym", kwargs={"group_type": "wg"} + ) + ) + self.assertEqual(r.status_code, 200) + self.assertEqual(r.content.decode(), "yay") + self.assertEqual( + mock.call_args, call(Path(settings.GROUP_SUMMARY_PATH) / "1wg-summary-by-acronym.txt") + ) + + @patch("ietf.group.views.response_from_file") + def test_wg_charters(self, mock): + r = self.client.get( + urlreverse("ietf.group.views.wg_charters", kwargs={"group_type": "rg"}) + ) # not wg + self.assertEqual(r.status_code, 404) + self.assertFalse(mock.called) + mock.return_value = HttpResponse("yay") + r = self.client.get( + urlreverse("ietf.group.views.wg_charters", kwargs={"group_type": "wg"}) + ) + self.assertEqual(r.status_code, 200) + self.assertEqual(r.content.decode(), "yay") + self.assertEqual(mock.call_args, call(Path(settings.CHARTER_PATH) / "1wg-charters.txt")) + + @patch("ietf.group.views.response_from_file") + def test_wg_charters_by_acronym(self, mock): + r = self.client.get( + urlreverse( + "ietf.group.views.wg_charters_by_acronym", kwargs={"group_type": "rg"} + ) + ) # not wg + self.assertEqual(r.status_code, 404) + self.assertFalse(mock.called) + mock.return_value = HttpResponse("yay") + r = self.client.get( + urlreverse( + "ietf.group.views.wg_charters_by_acronym", kwargs={"group_type": "wg"} + ) + ) + self.assertEqual(r.status_code, 200) + self.assertEqual(r.content.decode(), "yay") + self.assertEqual( + mock.call_args, call(Path(settings.CHARTER_PATH) / "1wg-charters-by-acronym.txt") + ) + def test_generate_wg_charters_files_task(self): + group = CharterFactory( + group__type_id="wg", group__parent=GroupFactory(type_id="area") + ).group + RoleFactory(group=group, name_id="chair", person=PersonFactory()) + RoleFactory(group=group, name_id="ad", person=PersonFactory()) chair = Email.objects.filter(role__group=group, role__name="chair")[0] + ( + Path(settings.CHARTER_PATH) / f"{group.charter.name}-{group.charter.rev}.txt" + ).write_text("This is a charter.") - with (Path(settings.CHARTER_PATH) / ("%s-%s.txt" % (group.charter.canonical_name(), group.charter.rev))).open("w") as f: - f.write("This is a charter.") + generate_wg_charters_files_task() + wg_charters_contents = (Path(settings.CHARTER_PATH) / "1wg-charters.txt").read_text( + encoding="utf8" + ) + self.assertIn(group.acronym, wg_charters_contents) + self.assertIn(group.name, wg_charters_contents) + self.assertIn(group.ad_role().person.plain_name(), wg_charters_contents) + self.assertIn(chair.address, wg_charters_contents) + self.assertIn("This is a charter.", wg_charters_contents) + wg_charters_copy = ( + Path(settings.CHARTER_COPY_PATH) / "1wg-charters.txt" + ).read_text(encoding="utf8") + self.assertEqual(wg_charters_copy, wg_charters_contents) + + wg_charters_by_acronym_contents = ( + Path(settings.CHARTER_PATH) / "1wg-charters-by-acronym.txt" + ).read_text(encoding="utf8") + self.assertIn(group.acronym, wg_charters_by_acronym_contents) + self.assertIn(group.name, wg_charters_by_acronym_contents) + self.assertIn(group.ad_role().person.plain_name(), wg_charters_by_acronym_contents) + self.assertIn(chair.address, wg_charters_by_acronym_contents) + self.assertIn("This is a charter.", wg_charters_by_acronym_contents) + wg_charters_by_acronymcopy = ( + Path(settings.CHARTER_COPY_PATH) / "1wg-charters-by-acronym.txt" + ).read_text(encoding="utf8") + self.assertEqual(wg_charters_by_acronymcopy, wg_charters_by_acronym_contents) + + def test_generate_wg_charters_files_task_without_copy(self): + """Test disabling charter file copying + + Note that these tests mostly check that errors are not encountered. Because they unset + the CHARTER_COPY_PATH or set it to a non-directory destination, it's not clear where to + look to see whether the files were (incorrectly) copied somewhere. + """ + group = CharterFactory( + group__type_id="wg", group__parent=GroupFactory(type_id="area") + ).group + ( + Path(settings.CHARTER_PATH) / f"{group.charter.name}-{group.charter.rev}.txt" + ).write_text("This is a charter.") + + # No directory set + with override_settings(): + del settings.CHARTER_COPY_PATH + generate_wg_charters_files_task() + # n.b., CHARTER_COPY_PATH is set again outside the with block + self.assertTrue((Path(settings.CHARTER_PATH) / "1wg-charters.txt").exists()) + self.assertFalse((Path(settings.CHARTER_COPY_PATH) / "1wg-charters.txt").exists()) + self.assertTrue( + (Path(settings.CHARTER_PATH) / "1wg-charters-by-acronym.txt").exists() + ) + self.assertFalse( + (Path(settings.CHARTER_COPY_PATH) / "1wg-charters-by-acronym.txt").exists() + ) + (Path(settings.CHARTER_PATH) / "1wg-charters.txt").unlink() + (Path(settings.CHARTER_PATH) / "1wg-charters-by-acronym.txt").unlink() + + # Set to a file, not a directory + not_a_dir = Path(settings.CHARTER_COPY_PATH) / "not-a-dir.txt" + not_a_dir.write_text("Not a dir") + with override_settings(CHARTER_COPY_PATH=str(not_a_dir)): + generate_wg_charters_files_task() + # n.b., CHARTER_COPY_PATH is set again outside the with block + self.assertTrue((Path(settings.CHARTER_PATH) / "1wg-charters.txt").exists()) + self.assertFalse((Path(settings.CHARTER_COPY_PATH) / "1wg-charters.txt").exists()) + self.assertTrue( + (Path(settings.CHARTER_PATH) / "1wg-charters-by-acronym.txt").exists() + ) + self.assertFalse( + (Path(settings.CHARTER_COPY_PATH) / "1wg-charters-by-acronym.txt").exists() + ) + self.assertEqual(not_a_dir.read_text(), "Not a dir") - url = urlreverse('ietf.group.views.wg_summary_area', kwargs=dict(group_type="wg")) - r = self.client.get(url) - self.assertEqual(r.status_code, 200) - self.assertContains(r, group.parent.name) - self.assertContains(r, group.acronym) - self.assertContains(r, group.name) - self.assertContains(r, chair.address) + def test_generate_wg_summary_files_task(self): + group = CharterFactory(group__type_id='wg',group__parent=GroupFactory(type_id='area')).group + RoleFactory(group=group,name_id='chair',person=PersonFactory()) + RoleFactory(group=group,name_id='ad',person=PersonFactory()) - url = urlreverse('ietf.group.views.wg_summary_acronym', kwargs=dict(group_type="wg")) - r = self.client.get(url) - self.assertEqual(r.status_code, 200) - self.assertContains(r, group.acronym) - self.assertContains(r, group.name) - self.assertContains(r, chair.address) - - url = urlreverse('ietf.group.views.wg_charters', kwargs=dict(group_type="wg")) - r = self.client.get(url) - self.assertEqual(r.status_code, 200) - self.assertContains(r, group.acronym) - self.assertContains(r, group.name) - self.assertContains(r, group.ad_role().person.plain_name()) - self.assertContains(r, chair.address) - self.assertContains(r, "This is a charter.") + chair = Email.objects.filter(role__group=group, role__name="chair")[0] - url = urlreverse('ietf.group.views.wg_charters_by_acronym', kwargs=dict(group_type="wg")) - r = self.client.get(url) - self.assertEqual(r.status_code, 200) - self.assertContains(r, group.acronym) - self.assertContains(r, group.name) - self.assertContains(r, group.ad_role().person.plain_name()) - self.assertContains(r, chair.address) - self.assertContains(r, "This is a charter.") + generate_wg_summary_files_task() + + for summary_by_area_contents in [ + ( + Path(settings.GROUP_SUMMARY_PATH) / "1wg-summary.txt" + ).read_text(encoding="utf8"), + retrieve_str("indexes", "1wg-summary.txt") + ]: + self.assertIn(group.parent.name, summary_by_area_contents) + self.assertIn(group.acronym, summary_by_area_contents) + self.assertIn(group.name, summary_by_area_contents) + self.assertIn(chair.address, summary_by_area_contents) + + for summary_by_acronym_contents in [ + ( + Path(settings.GROUP_SUMMARY_PATH) / "1wg-summary-by-acronym.txt" + ).read_text(encoding="utf8"), + retrieve_str("indexes", "1wg-summary-by-acronym.txt") + ]: + self.assertIn(group.acronym, summary_by_acronym_contents) + self.assertIn(group.name, summary_by_acronym_contents) + self.assertIn(chair.address, summary_by_acronym_contents) def test_chartering_groups(self): group = CharterFactory(group__type_id='wg',group__parent=GroupFactory(type_id='area'),states=[('charter','intrev')]).group @@ -241,6 +413,7 @@ def test_group_documents(self): self.assertContains(r, draft3.name) for ah in draft3.action_holders.all(): self.assertContains(r, escape(ah.name)) + self.assertContains(r, "Active with the IESG Internet-Draft") # draft3 is pub-req hence should have such a divider self.assertContains(r, 'for 173 days', count=1) # the old_dah should be tagged self.assertContains(r, draft4.name) self.assertNotContains(r, draft5.name) @@ -253,6 +426,25 @@ def test_group_documents(self): q = PyQuery(r.content) self.assertTrue(any([draft2.name in x.attrib['href'] for x in q('table td a.track-untrack-doc')])) + # Let's also check the IRTF stream + rg = GroupFactory(type_id='rg') + setup_default_community_list_for_group(rg) + rgDraft = RgDraftFactory(group=rg) + rgDraft4 = RgDraftFactory(group=rg) + rgDraft4.set_state(State.objects.get(slug='irsg-w')) + rgDraft7 = RgDraftFactory(group=rg) + rgDraft7.set_state(State.objects.get(type='draft-stream-%s' % rgDraft7.stream_id, slug='dead')) + for url in group_urlreverse_list(rg, 'ietf.group.views.group_documents'): + with self.settings(DOC_ACTION_HOLDER_MAX_AGE_DAYS=20): + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + self.assertContains(r, rgDraft.name) + self.assertContains(r, rg.name) + self.assertContains(r, rg.acronym) + self.assertNotContains(r, draft3.name) # As draft3 is a WG draft, it should not be listed here + self.assertContains(r, rgDraft4.name) + self.assertNotContains(r, rgDraft7.name) + # test the txt version too while we're at it for url in group_urlreverse_list(group, 'ietf.group.views.group_documents_txt'): r = self.client.get(url) @@ -264,8 +456,9 @@ def test_group_charter(self): group = CharterFactory().group draft = WgDraftFactory(group=group) - with (Path(settings.CHARTER_PATH) / ("%s-%s.txt" % (group.charter.canonical_name(), group.charter.rev))).open("w") as f: - f.write("This is a charter.") + ( + Path(settings.CHARTER_PATH) / f"{group.charter.name}-{group.charter.rev}.txt" + ).write_text("This is a charter.") milestone = GroupMilestone.objects.create( group=group, @@ -350,6 +543,25 @@ def verify_can_edit_group(url, group, username): for username in list(set(interesting_users)-set(can_edit[group.type_id])): verify_cannot_edit_group(url, group, username) + def test_group_about_team_parent(self): + """Team about page should show parent when parent is not an area""" + GroupFactory(type_id='team', parent=GroupFactory(type_id='area', acronym='gen')) + GroupFactory(type_id='team', parent=GroupFactory(type_id='ietf', acronym='iab')) + GroupFactory(type_id='team', parent=None) + + for team in Group.objects.filter(type='team').select_related('parent'): + url = urlreverse('ietf.group.views.group_about', kwargs=dict(acronym=team.acronym)) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + if team.parent and team.parent.type_id != 'area': + self.assertContains(r, 'Parent') + self.assertContains(r, team.parent.acronym) + elif team.parent and team.parent.type_id == 'area': + self.assertContains(r, team.parent.name) + self.assertNotContains(r, '>Parent<') + else: + self.assertNotContains(r, '>Parent<') + def test_group_about_personnel(self): """Correct personnel should appear on the group About page""" group = GroupFactory() @@ -385,7 +597,6 @@ def test_materials(self): type_id="slides", ) doc.set_state(State.objects.get(type="slides", slug="active")) - DocAlias.objects.create(name=doc.name).docs.add(doc) for url in group_urlreverse_list(group, 'ietf.group.views.materials'): r = self.client.get(url) @@ -668,8 +879,9 @@ def test_edit_info(self): self.assertTrue(len(q('form .is-invalid')) > 0) # edit info - with (Path(settings.CHARTER_PATH) / ("%s-%s.txt" % (group.charter.canonical_name(), group.charter.rev))).open("w") as f: - f.write("This is a charter.") + ( + Path(settings.CHARTER_PATH) / f"{group.charter.name}-{group.charter.rev}.txt" + ).write_text("This is a charter.") area = group.parent ad = Person.objects.get(name="Areað Irector") state = GroupStateName.objects.get(slug="bof") @@ -711,7 +923,9 @@ def test_edit_info(self): self.assertEqual(group.list_archive, "archive.mars") self.assertEqual(group.description, '') - self.assertTrue((Path(settings.CHARTER_PATH) / ("%s-%s.txt" % (group.charter.canonical_name(), group.charter.rev))).exists()) + self.assertTrue( + (Path(settings.CHARTER_PATH) / f"{group.charter.name}-{group.charter.rev}.txt").exists() + ) self.assertEqual(len(outbox), 2) self.assertTrue('Personnel change' in outbox[0]['Subject']) for prefix in ['ad1','ad2','aread','marschairman','marsdelegate']: @@ -1265,7 +1479,7 @@ def create_test_milestones(self): RoleFactory(group=group,name_id='chair',person=PersonFactory(user__username='marschairman')) draft = WgDraftFactory(group=group) - m1 = GroupMilestone.objects.create(id=1, + m1 = GroupMilestone.objects.create( group=group, desc="Test 1", due=date_today(DEADLINE_TZINFO), @@ -1273,7 +1487,7 @@ def create_test_milestones(self): state_id="active") m1.docs.set([draft]) - m2 = GroupMilestone.objects.create(id=2, + m2 = GroupMilestone.objects.create( group=group, desc="Test 2", due=date_today(DEADLINE_TZINFO), @@ -1414,13 +1628,14 @@ def test_accept_milestone(self): events_before = group.groupevent_set.count() # add - r = self.client.post(url, { 'prefix': "m1", - 'm1-id': m1.id, - 'm1-desc': m1.desc, - 'm1-due': m1.due.strftime("%B %Y"), - 'm1-resolved': m1.resolved, - 'm1-docs': pklist(m1.docs), - 'm1-review': "accept", + mstr = f"m{m1.id}" + r = self.client.post(url, { 'prefix': mstr, + f'{mstr}-id': m1.id, + f'{mstr}-desc': m1.desc, + f'{mstr}-due': m1.due.strftime("%B %Y"), + f'{mstr}-resolved': m1.resolved, + f'{mstr}-docs': pklist(m1.docs), + f'{mstr}-review': "accept", 'action': "save", }) self.assertEqual(r.status_code, 302) @@ -1440,13 +1655,14 @@ def test_delete_milestone(self): events_before = group.groupevent_set.count() # delete - r = self.client.post(url, { 'prefix': "m1", - 'm1-id': m1.id, - 'm1-desc': m1.desc, - 'm1-due': m1.due.strftime("%B %Y"), - 'm1-resolved': "", - 'm1-docs': pklist(m1.docs), - 'm1-delete': "checked", + mstr = f"m{m1.id}" + r = self.client.post(url, { 'prefix': mstr, + f'{mstr}-id': m1.id, + f'{mstr}-desc': m1.desc, + f'{mstr}-due': m1.due.strftime("%B %Y"), + f'{mstr}-resolved': "", + f'{mstr}-docs': pklist(m1.docs), + f'{mstr}-delete': "checked", 'action': "save", }) self.assertEqual(r.status_code, 302) @@ -1469,13 +1685,14 @@ def test_edit_milestone(self): due = self.last_day_of_month(date_today(DEADLINE_TZINFO) + datetime.timedelta(days=365)) + mstr = f"m{m1.id}" # faulty post - r = self.client.post(url, { 'prefix': "m1", - 'm1-id': m1.id, - 'm1-desc': "", # no description - 'm1-due': due.strftime("%B %Y"), - 'm1-resolved': "", - 'm1-docs': doc_pks, + r = self.client.post(url, { 'prefix': mstr, + f'{mstr}-id': m1.id, + f'{mstr}-desc': "", # no description + f'{mstr}-due': due.strftime("%B %Y"), + f'{mstr}-resolved': "", + f'{mstr}-docs': doc_pks, 'action': "save", }) self.assertEqual(r.status_code, 200) @@ -1487,13 +1704,13 @@ def test_edit_milestone(self): # edit mailbox_before = len(outbox) - r = self.client.post(url, { 'prefix': "m1", - 'm1-id': m1.id, - 'm1-desc': "Test 2 - changed", - 'm1-due': due.strftime("%B %Y"), - 'm1-resolved': "Done", - 'm1-resolved_checkbox': "checked", - 'm1-docs': doc_pks, + r = self.client.post(url, { 'prefix': mstr, + f'{mstr}-id': m1.id, + f'{mstr}-desc': "Test 2 - changed", + f'{mstr}-due': due.strftime("%B %Y"), + f'{mstr}-resolved': "Done", + f'{mstr}-resolved_checkbox': "checked", + f'{mstr}-docs': doc_pks, 'action': "save", }) self.assertEqual(r.status_code, 302) @@ -1764,58 +1981,72 @@ def setUp(self): PersonFactory(user__username='plain') GroupFactory(acronym='mars',parent=GroupFactory(type_id='area')) GroupFactory(acronym='ames',parent=GroupFactory(type_id='area')) - self.group_alias_file = NamedTemporaryFile(delete=False) - self.group_alias_file.write(b"""# Generated by hand at 2015-02-12_16:30:52 -virtual.ietf.org anything -mars-ads@ietf.org xfilter-mars-ads -expand-mars-ads@virtual.ietf.org aread@example.org -mars-chairs@ietf.org xfilter-mars-chairs -expand-mars-chairs@virtual.ietf.org mars_chair@ietf.org -ames-ads@ietf.org xfilter-mars-ads -expand-ames-ads@virtual.ietf.org aread@example.org -ames-chairs@ietf.org xfilter-mars-chairs -expand-ames-chairs@virtual.ietf.org mars_chair@ietf.org -""") - self.group_alias_file.close() - self.saved_group_virtual_path = settings.GROUP_VIRTUAL_PATH - settings.GROUP_VIRTUAL_PATH = self.group_alias_file.name - - def tearDown(self): - settings.GROUP_VIRTUAL_PATH = self.saved_group_virtual_path - os.unlink(self.group_alias_file.name) - super().tearDown() - - def testAliases(self): + + @mock.patch("ietf.group.views.get_group_email_aliases") + def testAliases(self, mock_get_aliases): url = urlreverse('ietf.group.urls_info_details.redirect.email', kwargs=dict(acronym="mars")) r = self.client.get(url) self.assertEqual(r.status_code, 302) + mock_get_aliases.return_value = [ + {"acronym": "mars", "alias_type": "-ads", "expansion": "aread@example.org"}, + {"acronym": "mars", "alias_type": "-chairs", "expansion": "mars_chair@ietf.org"}, + ] for testdict in [dict(acronym="mars"),dict(acronym="mars",group_type="wg")]: url = urlreverse('ietf.group.urls_info_details.redirect.email', kwargs=testdict) r = self.client.get(url,follow=True) + self.assertEqual( + mock_get_aliases.call_args, + mock.call(testdict.get("acronym", None), testdict.get("group_type", None)), + ) self.assertTrue(all([x in unicontent(r) for x in ['mars-ads@','mars-chairs@']])) self.assertFalse(any([x in unicontent(r) for x in ['ames-ads@','ames-chairs@']])) url = urlreverse('ietf.group.views.email_aliases', kwargs=dict()) login_testing_unauthorized(self, "plain", url) + + mock_get_aliases.return_value = [ + {"acronym": "mars", "alias_type": "-ads", "expansion": "aread@example.org"}, + {"acronym": "mars", "alias_type": "-chairs", "expansion": "mars_chair@ietf.org"}, + {"acronym": "ames", "alias_type": "-ads", "expansion": "aread@example.org"}, + {"acronym": "ames", "alias_type": "-chairs", "expansion": "mars_chair@ietf.org"}, + ] r = self.client.get(url) self.assertTrue(r.status_code,200) + self.assertEqual(mock_get_aliases.call_args, mock.call(None, None)) self.assertTrue(all([x in unicontent(r) for x in ['mars-ads@','mars-chairs@','ames-ads@','ames-chairs@']])) url = urlreverse('ietf.group.views.email_aliases', kwargs=dict(group_type="wg")) + mock_get_aliases.return_value = [ + {"acronym": "mars", "alias_type": "-ads", "expansion": "aread@example.org"}, + {"acronym": "mars", "alias_type": "-chairs", "expansion": "mars_chair@ietf.org"}, + {"acronym": "ames", "alias_type": "-ads", "expansion": "aread@example.org"}, + {"acronym": "ames", "alias_type": "-chairs", "expansion": "mars_chair@ietf.org"}, + ] r = self.client.get(url) self.assertEqual(r.status_code,200) + self.assertEqual(mock_get_aliases.call_args, mock.call(None, "wg")) self.assertContains(r, 'mars-ads@') url = urlreverse('ietf.group.views.email_aliases', kwargs=dict(group_type="rg")) + mock_get_aliases.return_value = [] r = self.client.get(url) self.assertEqual(r.status_code,200) + self.assertEqual(mock_get_aliases.call_args, mock.call(None, "rg")) self.assertNotContains(r, 'mars-ads@') - def testExpansions(self): + @mock.patch("ietf.group.views.get_group_email_aliases") + def testExpansions(self, mock_get_aliases): + mock_get_aliases.return_value = [ + {"acronym": "mars", "alias_type": "-ads", "expansion": "aread@example.org"}, + {"acronym": "mars", "alias_type": "-chairs", "expansion": "mars_chair@ietf.org"}, + {"acronym": "ames", "alias_type": "-ads", "expansion": "aread@example.org"}, + {"acronym": "ames", "alias_type": "-chairs", "expansion": "mars_chair@ietf.org"}, + ] url = urlreverse('ietf.group.views.email', kwargs=dict(acronym="mars")) r = self.client.get(url) self.assertEqual(r.status_code,200) + self.assertEqual(mock_get_aliases.call_args, mock.call("mars", None)) self.assertContains(r, 'Email aliases') self.assertContains(r, 'mars-ads@ietf.org') self.assertContains(r, 'group_personnel_change') @@ -2040,8 +2271,17 @@ def test_admin_acronym_validation(self): self.assertTrue(form.is_valid()) form = AdminGroupForm({'acronym':'shouldfail-','name':'should fail','type':'wg','state':'active','used_roles':'[]','time':now}) self.assertIn('acronym',form.errors) + form = AdminGroupForm({'acronym':'shouldfail-','name':'should fail','type':'sdo','state':'active','used_roles':'[]','time':now}) + self.assertIn('acronym',form.errors) form = AdminGroupForm({'acronym':'-shouldfail','name':'should fail','type':'wg','state':'active','used_roles':'[]','time':now}) self.assertIn('acronym',form.errors) + form = AdminGroupForm({'acronym':'-shouldfail','name':'should fail','type':'sdo','state':'active','used_roles':'[]','time':now}) + self.assertIn('acronym',form.errors) + # SDO groups (and only SDO groups) can have a leading number + form = AdminGroupForm({'acronym':'3gpp-should-pass','name':'should pass','type':'sdo','state':'active','used_roles':'[]','time':now}) + self.assertTrue(form.is_valid()) + form = AdminGroupForm({'acronym':'123shouldfail','name':'should fail','type':'wg','state':'active','used_roles':'[]','time':now}) + self.assertIn('acronym',form.errors) wg = GroupFactory(acronym='bad-idea', type_id='wg') # There are some existing wg and programs with hyphens in their acronyms. form = AdminGroupForm({'acronym':wg.acronym,'name':wg.name,'type':wg.type_id,'state':wg.state_id,'used_roles':str(wg.used_roles),'time':now},instance=wg) diff --git a/ietf/group/tests_review.py b/ietf/group/tests_review.py index a03b806f8f..bb9b79a416 100644 --- a/ietf/group/tests_review.py +++ b/ietf/group/tests_review.py @@ -815,3 +815,170 @@ def test_reset_next_reviewer(self): self.assertEqual(NextReviewerInTeam.objects.get(team=group).next_reviewer, reviewers[target_index].person) self.client.logout() target_index += 2 + +class RequestsHistoryTests(TestCase): + def test_requests_history_overview_page(self): + # Make assigned assignment + review_req = ReviewRequestFactory(state_id='assigned') + assignment = ReviewAssignmentFactory(review_request=review_req, + state_id='assigned', + reviewer=EmailFactory(), + assigned_on = review_req.time) + group = review_req.team + + for url in [urlreverse(ietf.group.views.review_requests_history, + kwargs={ 'acronym': group.acronym }), + urlreverse(ietf.group.views.review_requests_history, + kwargs={ 'acronym': group.acronym , + 'group_type': group.type_id}), + urlreverse(ietf.group.views.review_requests_history, + kwargs={ 'acronym': group.acronym }) + + '?since=3m', + urlreverse(ietf.group.views.review_requests_history, + kwargs={ 'acronym': group.acronym , + 'group_type': group.type_id }) + + '?since=3m']: + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + self.assertContains(r, review_req.doc.name) + self.assertContains(r, 'Assigned') + self.assertContains(r, escape(assignment.reviewer.person.name)) + + url = urlreverse(ietf.group.views.review_requests_history, + kwargs={ 'acronym': group.acronym }) + + assignment.state = ReviewAssignmentStateName.objects.get(slug="completed") + assignment.result = ReviewResultName.objects.get(slug="ready") + assignment.save() + + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + self.assertContains(r, review_req.doc.name) + self.assertContains(r, 'Assigned') + self.assertContains(r, 'Completed') + + def test_requests_history_filter_page(self): + # First assignment as assigned + review_req = ReviewRequestFactory(state_id = 'assigned', + doc = DocumentFactory()) + assignment = ReviewAssignmentFactory(review_request = review_req, + state_id = 'assigned', + reviewer = EmailFactory(), + assigned_on = review_req.time) + group = review_req.team + + # Second assignment in same group as accepted + review_req2 = ReviewRequestFactory(state_id = 'assigned', + team = review_req.team, + doc = DocumentFactory()) + assignment2 = ReviewAssignmentFactory(review_request = review_req2, + state_id='accepted', + reviewer = EmailFactory(), + assigned_on = review_req2.time) + + # Modify the assignment to be completed, and mark it ready + assignment2.state = ReviewAssignmentStateName.objects.get(slug="completed") + assignment2.result = ReviewResultName.objects.get(slug="ready") + assignment2.save() + + # Check that we have all information when we do not filter + url = urlreverse(ietf.group.views.review_requests_history, + kwargs={ 'acronym': group.acronym }) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + self.assertContains(r, review_req.doc.name) + self.assertContains(r, review_req2.doc.name) + self.assertContains(r, 'data-text="Assigned"') + self.assertContains(r, 'data-text="Accepted"') + self.assertContains(r, 'data-text="Completed"') + self.assertContains(r, 'data-text="Ready"') + self.assertContains(r, escape(assignment.reviewer.person.name)) + self.assertContains(r, escape(assignment2.reviewer.person.name)) + + # Check first reviewer history + for url in [urlreverse(ietf.group.views.review_requests_history, + kwargs={ 'acronym': group.acronym }) + + '?reviewer_email=' + str(assignment.reviewer), + urlreverse(ietf.group.views.review_requests_history, + kwargs={ 'acronym': group.acronym , + 'group_type': group.type_id}) + + '?reviewer_email=' + str(assignment.reviewer)]: + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + self.assertContains(r, review_req.doc.name) + self.assertNotContains(r, review_req2.doc.name) + self.assertContains(r, 'data-text="Assigned"') + self.assertNotContains(r, 'data-text="Accepted"') + self.assertNotContains(r, 'data-text="Completed"') + self.assertNotContains(r, 'data-text="Ready"') + self.assertContains(r, escape(assignment.reviewer.person.name)) + self.assertNotContains(r, escape(assignment2.reviewer.person.name)) + + # Check second reviewer history + for url in [urlreverse(ietf.group.views.review_requests_history, + kwargs={ 'acronym': group.acronym }) + + '?reviewer_email=' + str(assignment2.reviewer), + urlreverse(ietf.group.views.review_requests_history, + kwargs={ 'acronym': group.acronym , + 'group_type': group.type_id}) + + '?reviewer_email=' + str(assignment2.reviewer)]: + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + self.assertNotContains(r, review_req.doc.name) + self.assertContains(r, review_req2.doc.name) + self.assertNotContains(r, 'data-text="Assigned"') + self.assertContains(r, 'data-text="Accepted"') + self.assertContains(r, 'data-text="Completed"') + self.assertContains(r, 'data-text="Ready"') + self.assertNotContains(r, escape(assignment.reviewer.person.name)) + self.assertContains(r, escape(assignment2.reviewer.person.name)) + + # Check for reviewer that does not have anything + url = urlreverse(ietf.group.views.review_requests_history, + kwargs={ 'acronym': group.acronym }) + '?reviewer_email=nobody@nowhere.example.org' + + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + self.assertNotContains(r, review_req.doc.name) + self.assertNotContains(r, 'data-text="Assigned"') + self.assertNotContains(r, 'data-text="Accepted"') + self.assertNotContains(r, 'data-text="Completed"') + + def test_requests_history_invalid_filter_parameters(self): + # First assignment as assigned + review_req = ReviewRequestFactory(state_id="assigned", doc=DocumentFactory()) + group = review_req.team + url = urlreverse( + "ietf.group.views.review_requests_history", + kwargs={"acronym": group.acronym}, + ) + invalid_reviewer_emails = [ + "%00null@example.com", # urlencoded null character + "null@exa%00mple.com", # urlencoded null character + "\x00null@example.com", # literal null character + "null@ex\x00ample.com", # literal null character + ] + for invalid_email in invalid_reviewer_emails: + r = self.client.get( + url + f"?reviewer_email={invalid_email}" + ) + self.assertEqual( + r.status_code, + 400, + f"should return a 400 response for reviewer_email={repr(invalid_email)}" + ) + + invalid_since_choices = [ + "forever", # not an option + "all\x00", # literal null character + "a%00ll", # urlencoded null character + ] + for invalid_since in invalid_since_choices: + r = self.client.get( + url + f"?since={invalid_since}" + ) + self.assertEqual( + r.status_code, + 400, + f"should return a 400 response for since={repr(invalid_since)}" + ) diff --git a/ietf/group/tests_serializers.py b/ietf/group/tests_serializers.py new file mode 100644 index 0000000000..b584a17ae2 --- /dev/null +++ b/ietf/group/tests_serializers.py @@ -0,0 +1,96 @@ +# Copyright The IETF Trust 2026, All Rights Reserved +from ietf.group.factories import RoleFactory, GroupFactory +from ietf.group.serializers import ( + AreaDirectorSerializer, + AreaSerializer, + GroupSerializer, +) +from ietf.person.factories import EmailFactory +from ietf.utils.test_utils import TestCase + + +class GroupSerializerTests(TestCase): + def test_serializes(self): + wg = GroupFactory() + serialized = GroupSerializer(wg).data + self.assertEqual( + serialized, + { + "acronym": wg.acronym, + "name": wg.name, + "type": "wg", + "list_email": wg.list_email, + }, + ) + + +class AreaDirectorSerializerTests(TestCase): + def test_serializes_role(self): + """Should serialize a Role correctly""" + role = RoleFactory(group__type_id="area", name_id="ad") + serialized = AreaDirectorSerializer(role).data + self.assertEqual( + serialized, + {"email": role.email.email_address(), "name": role.person.plain_name()}, + ) + + def test_serializes_email(self): + """Should serialize an Email correctly""" + email = EmailFactory() + serialized = AreaDirectorSerializer(email).data + self.assertEqual( + serialized, + { + "email": email.email_address(), + "name": email.person.plain_name() if email.person else None, + }, + ) + + +class AreaSerializerTests(TestCase): + def test_serializes_active_area(self): + """Should serialize an active area correctly""" + area = GroupFactory(type_id="area", state_id="active") + serialized = AreaSerializer(area).data + self.assertEqual( + serialized, + { + "acronym": area.acronym, + "name": area.name, + "ads": [], + }, + ) + ad_roles = RoleFactory.create_batch(2, group=area, name_id="ad") + serialized = AreaSerializer(area).data + self.assertEqual(serialized["acronym"], area.acronym) + self.assertEqual(serialized["name"], area.name) + self.assertCountEqual( + serialized["ads"], + [ + {"email": ad.email.email_address(), "name": ad.person.plain_name()} + for ad in ad_roles + ], + ) + + def test_serializes_inactive_area(self): + """Should serialize an inactive area correctly""" + area = GroupFactory(type_id="area", state_id="conclude") + serialized = AreaSerializer(area).data + self.assertEqual( + serialized, + { + "acronym": area.acronym, + "name": area.name, + "ads": [], + }, + ) + RoleFactory.create_batch(2, group=area, name_id="ad") + serialized = AreaSerializer(area).data + self.assertEqual( + serialized, + { + "acronym": area.acronym, + "name": area.name, + "ads": [], + }, + ) diff --git a/ietf/group/urls.py b/ietf/group/urls.py index b2af8d9e2b..8354aba063 100644 --- a/ietf/group/urls.py +++ b/ietf/group/urls.py @@ -24,6 +24,7 @@ url(r'^about/status/edit/$', views.group_about_status_edit), url(r'^about/status/meeting/(?P\d+)/$', views.group_about_status_meeting), url(r'^history/$',views.history), + url(r'^requestshistory/$',views.review_requests_history), url(r'^history/addcomment/$',views.add_comment), url(r'^email/$', views.email), url(r'^deps\.json$', views.dependencies), @@ -57,7 +58,9 @@ group_urls = [ - url(r'^$', views.active_groups), + url(r'^$', views.active_groups), + url(r'^leadership/(?P(wg|rg))/$', views.group_leadership), + url(r'^leadership/(?P(wg|rg))/csv/$', views.group_leadership_csv), url(r'^groupstats.json', views.group_stats_data, None, 'ietf.group.views.group_stats_data'), url(r'^groupmenu.json', views.group_menu_data, None, 'ietf.group.views.group_menu_data'), url(r'^chartering/$', views.chartering_groups), diff --git a/ietf/group/utils.py b/ietf/group/utils.py index f7e4b2f175..6777ed1933 100644 --- a/ietf/group/utils.py +++ b/ietf/group/utils.py @@ -1,12 +1,13 @@ # Copyright The IETF Trust 2012-2023, All Rights Reserved # -*- coding: utf-8 -*- +import datetime - -import io -import os +from itertools import chain +from pathlib import Path from django.db.models import Q from django.shortcuts import get_object_or_404 +from django.utils import timezone from django.utils.html import format_html from django.utils.safestring import mark_safe from django.urls import reverse as urlreverse @@ -15,13 +16,13 @@ from ietf.community.models import CommunityList, SearchRule from ietf.community.utils import reset_name_contains_index_for_rule, can_manage_community_list -from ietf.doc.models import Document, State +from ietf.doc.models import Document, State, RelatedDocument from ietf.group.models import Group, RoleHistory, Role, GroupFeatures, GroupEvent from ietf.ietfauth.utils import has_role from ietf.name.models import GroupTypeName, RoleName from ietf.person.models import Email from ietf.review.utils import can_manage_review_requests_for_team -from ietf.utils import log +from ietf.utils import log, markdown from ietf.utils.history import get_history_object_for, copy_many_to_many_for_history from ietf.doc.templatetags.ietf_filters import is_valid_url from functools import reduce @@ -55,15 +56,14 @@ def get_charter_text(group): if (h.rev > c.rev and not (c_appr and not h_appr)) or (h_appr and not c_appr): c = h - filename = os.path.join(c.get_file_path(), "%s-%s.txt" % (c.canonical_name(), c.rev)) + filename = Path(c.get_file_path()) / f"{c.name}-{c.rev}.txt" try: - with io.open(filename, 'rb') as f: - text = f.read() - try: - text = text.decode('utf8') - except UnicodeDecodeError: - text = text.decode('latin1') - return text + text = filename.read_bytes() + try: + text = text.decode('utf8') + except UnicodeDecodeError: + text = text.decode('latin1') + return text except IOError: return 'Error Loading Group Charter' @@ -154,17 +154,23 @@ def can_manage_materials(user, group): def can_manage_session_materials(user, group, session): return has_role(user, 'Secretariat') or (group.has_role(user, group.features.matman_roles) and not session.is_material_submission_cutoff()) -# Maybe this should be cached... def can_manage_some_groups(user): if not user.is_authenticated: return False + authroles = set( + chain.from_iterable( + GroupFeatures.objects.values_list("groupman_authroles", flat=True) + ) + ) + extra_role_qs = dict() for gf in GroupFeatures.objects.all(): - for authrole in gf.groupman_authroles: - if has_role(user, authrole): - return True - if Role.objects.filter(name__in=gf.groupman_roles, group__type_id=gf.type_id, person__user=user).exists(): - return True - return False + extra_role_qs[f"{gf.type_id} groupman roles"] = Q( + name__in=gf.groupman_roles, + group__type_id=gf.type_id, + group__state__in=["active", "bof", "proposed"], + ) + return has_role(user, authroles, extra_role_qs=extra_role_qs) + def can_provide_status_update(user, group): if not group.features.acts_like_wg: @@ -191,7 +197,7 @@ def setup_default_community_list_for_group(group): community_list=clist, rule_type="group_rfc", group=group, - state=State.objects.get(slug="rfc", type="draft"), + state=State.objects.get(slug="published", type="rfc"), ) SearchRule.objects.create( community_list=clist, @@ -230,9 +236,11 @@ def construct_group_menu_context(request, group, selected, group_type, others): import ietf.group.views entries.append(("Review requests", urlreverse(ietf.group.views.review_requests, kwargs=kwargs))) entries.append(("Reviewers", urlreverse(ietf.group.views.reviewer_overview, kwargs=kwargs))) - + entries.append(("Reviews History", urlreverse(ietf.group.views.review_requests_history, kwargs=kwargs))) if group.features.has_meetings: entries.append(("Meetings", urlreverse("ietf.group.views.meetings", kwargs=kwargs))) + if group.acronym in ["iesg"]: + entries.append(("Working Groups", urlreverse("ietf.iesg.views.working_groups"))) if group.acronym in ["iab", "iesg"]: entries.append(("Statements", urlreverse("ietf.group.views.statements", kwargs=kwargs))) entries.append(("Appeals", urlreverse("ietf.group.views.appeals", kwargs=kwargs))) @@ -243,7 +251,6 @@ def construct_group_menu_context(request, group, selected, group_type, others): if is_valid_url(group.list_archive): entries.append((mark_safe("List archive »"), group.list_archive)) - # actions actions = [] @@ -355,3 +362,188 @@ def update_role_set(group, role_name, new_value, by): e.save() return added, removed + + +class GroupAliasGenerator: + days = 5 * 365 + active_states = ["active", "bof", "proposed"] + group_types = [ + "wg", + "rg", + "rag", + "dir", + "team", + "review", + "program", + "rfcedtyp", + "edappr", + "edwg", + ] # This should become groupfeature driven... + no_ad_group_types = ["rg", "rag", "team", "program", "rfcedtyp", "edappr", "edwg"] + + def __init__(self, group_queryset=None): + if group_queryset is None: + self.group_queryset = Group.objects.all() + else: + self.group_queryset = group_queryset + + def __iter__(self): + show_since = timezone.now() - datetime.timedelta(days=self.days) + + # Loop through each group type and build -ads and -chairs entries + for g in self.group_types: + domains = ["ietf"] + if g in ("rg", "rag"): + domains.append("irtf") + if g == "program": + domains.append("iab") + + entries = self.group_queryset.filter(type=g).all() + active_entries = entries.filter(state__in=self.active_states) + inactive_recent_entries = entries.exclude( + state__in=self.active_states + ).filter(time__gte=show_since) + interesting_entries = active_entries | inactive_recent_entries + + for e in interesting_entries.distinct().iterator(): + name = e.acronym + + # Research groups, teams, and programs do not have -ads lists + if not g in self.no_ad_group_types: + ad_emails = get_group_ad_emails(e) + if ad_emails: + yield name + "-ads", domains, list(ad_emails) + # All group types have -chairs lists + chair_emails = get_group_role_emails(e, ["chair", "secr"]) + if chair_emails: + yield name + "-chairs", domains, list(chair_emails) + + # The area lists include every chair in active working groups in the area + areas = self.group_queryset.filter(type="area").all() + active_areas = areas.filter(state__in=self.active_states) + for area in active_areas: + name = area.acronym + area_ad_emails = get_group_role_emails(area, ["pre-ad", "ad", "chair"]) + if area_ad_emails: + yield name + "-ads", ["ietf"], list(area_ad_emails) + chair_emails = get_child_group_role_emails(area, ["chair", "secr"]) | area_ad_emails + if chair_emails: + yield name + "-chairs", ["ietf"], list(chair_emails) + + # Other groups with chairs that require Internet-Draft submission approval + gtypes = GroupTypeName.objects.values_list("slug", flat=True) + special_groups = self.group_queryset.filter( + type__features__req_subm_approval=True, acronym__in=gtypes, state="active" + ) + for group in special_groups: + chair_emails = get_group_role_emails(group, ["chair", "delegate"]) + if chair_emails: + yield group.acronym + "-chairs", ["ietf"], list(chair_emails) + + +def get_group_email_aliases(acronym, group_type): + aliases = [] + group_queryset = Group.objects.all() + if acronym: + group_queryset = group_queryset.filter(acronym=acronym) + if group_type: + group_queryset = group_queryset.filter(type__slug=group_type) + for (alias, _, alist) in GroupAliasGenerator(group_queryset): + acro, _hyphen, alias_type = alias.partition("-") + expansion = ", ".join(sorted(alist)) + aliases.append({ + "acronym": acro, + "alias_type": f"-{alias_type}" if alias_type else "", + "expansion": expansion, + }) + return sorted(aliases, key=lambda a: a["acronym"]) + + +def role_holder_emails(): + """Get queryset of active Emails for group role holders""" + group_types_of_interest = [ + "ag", + "area", + "dir", + "iab", + "ietf", + "irtf", + "nomcom", + "rg", + "team", + "wg", + "rag", + ] + roles = Role.objects.filter( + group__state__slug="active", + group__type__in=group_types_of_interest, + ) + emails = Email.objects.filter(active=True).exclude( + address__startswith="unknown-email-" + ) + return emails.filter(person__role__in=roles).distinct() + + +def fill_in_charter_info(group, include_drafts=False): + group.areadirector = getattr(group.ad_role(),'email',None) + + personnel = {} + for r in Role.objects.filter(group=group).order_by('person__name').select_related("email", "person", "name"): + if r.name_id not in personnel: + personnel[r.name_id] = [] + personnel[r.name_id].append(r) + + if group.parent and group.parent.type_id == "area" and group.ad_role() and "ad" not in personnel: + ad_roles = list(Role.objects.filter(group=group.parent, name="ad", person=group.ad_role().person)) + if ad_roles: + personnel["ad"] = ad_roles + + group.personnel = [] + for role_name_slug, roles in personnel.items(): + label = roles[0].name.name + if len(roles) > 1: + if label.endswith("y"): + label = label[:-1] + "ies" + else: + label += "s" + + group.personnel.append((role_name_slug, label, roles)) + + group.personnel.sort(key=lambda t: t[2][0].name.order) + + milestone_state = "charter" if group.state_id == "proposed" else "active" + group.milestones = group.groupmilestone_set.filter(state=milestone_state) + if group.uses_milestone_dates: + group.milestones = group.milestones.order_by('resolved', 'due') + else: + group.milestones = group.milestones.order_by('resolved', 'order') + + if group.charter: + group.charter_text = get_charter_text(group) + else: + group.charter_text = "Not chartered yet." + group.charter_html = markdown.markdown(group.charter_text) + + +def fill_in_wg_roles(group): + def get_roles(slug, default): + for role_slug, label, roles in group.personnel: + if slug == role_slug: + return roles + return default + + group.chairs = get_roles("chair", []) + ads = get_roles("ad", []) + group.areadirector = ads[0] if ads else None + group.techadvisors = get_roles("techadv", []) + group.editors = get_roles("editor", []) + group.secretaries = get_roles("secr", []) + + +def fill_in_wg_drafts(group): + group.drafts = Document.objects.filter(type_id="draft", group=group).order_by("name") + group.rfcs = Document.objects.filter(type_id="rfc", group=group).order_by("rfc_number") + for rfc in group.rfcs: + # TODO: remote_field? + rfc.remote_field = RelatedDocument.objects.filter(source=rfc,relationship_id__in=['obs','updates']).distinct() + rfc.invrel = RelatedDocument.objects.filter(target=rfc,relationship_id__in=['obs','updates']).distinct() diff --git a/ietf/group/views.py b/ietf/group/views.py index 593c649bb1..8561a5059f 100644 --- a/ietf/group/views.py +++ b/ietf/group/views.py @@ -35,33 +35,42 @@ import copy +import csv import datetime import itertools -import io import math -import re import json +import types from collections import OrderedDict, defaultdict +from pathlib import Path from simple_history.utils import update_change_reason from django import forms from django.conf import settings from django.contrib.auth.decorators import login_required -from django.db.models import Q, Count, OuterRef, Subquery -from django.http import HttpResponse, HttpResponseRedirect, Http404, JsonResponse +from django.db.models import Count, F, OuterRef, Prefetch, Q, Subquery, TextField, Value +from django.db.models.functions import Coalesce +from django.http import ( + HttpResponse, + HttpResponseRedirect, + Http404, + JsonResponse, + HttpResponseBadRequest, +) from django.shortcuts import render, redirect, get_object_or_404 from django.template.loader import render_to_string from django.urls import reverse as urlreverse from django.utils import timezone from django.utils.html import escape from django.views.decorators.cache import cache_page, cache_control +from django.urls import reverse import debug # pyflakes:ignore from ietf.community.models import CommunityList, EmailSubscription from ietf.community.utils import docs_tracked_by_community_list -from ietf.doc.models import DocTagName, State, DocAlias, RelatedDocument, Document, DocEvent +from ietf.doc.models import DocTagName, State, RelatedDocument, Document, DocEvent from ietf.doc.templatetags.ietf_filters import clean_whitespace from ietf.doc.utils import get_chartering_type, get_tags_for_stream_id from ietf.doc.utils_charter import charter_name_for_group, replace_charter_of_replaced_group @@ -73,16 +82,18 @@ from ietf.group.mails import email_admin_re_charter, email_personnel_change, email_comment from ietf.group.models import ( Group, Role, GroupEvent, GroupStateTransitions, ChangeStateGroupEvent, GroupFeatures, AppealArtifact ) -from ietf.group.utils import (get_charter_text, can_manage_all_groups_of_type, +from ietf.group.utils import (can_manage_all_groups_of_type, milestone_reviewer_for_group_type, can_provide_status_update, can_manage_materials, group_attribute_change_desc, construct_group_menu_context, get_group_materials, save_group_in_history, can_manage_group, update_role_set, - get_group_or_404, setup_default_community_list_for_group, ) + get_group_or_404, setup_default_community_list_for_group, fill_in_charter_info, + get_group_email_aliases) # from ietf.ietfauth.utils import has_role, is_authorized_in_group from ietf.mailtrigger.utils import gather_relevant_expansions from ietf.meeting.helpers import get_meeting +from ietf.meeting.models import ImportantDate, SchedTimeSessAssignment, SchedulingEvent from ietf.meeting.utils import group_sessions from ietf.name.models import GroupTypeName, StreamName from ietf.person.models import Email, Person @@ -91,11 +102,9 @@ from ietf.review.policies import get_reviewer_queue_policy from ietf.review.utils import (can_manage_review_requests_for_team, can_access_review_stats_for_team, - extract_revision_ordered_review_requests_for_documents_and_replaced, assign_review_request_to_reviewer, close_review_request, - suggested_review_requests_for_team, unavailable_periods_to_list, current_unavailable_periods_for_reviewers, @@ -116,6 +125,7 @@ from ietf.mailtrigger.utils import gather_address_lists from ietf.mailtrigger.models import Recipient from ietf.settings import MAILING_LIST_INFO_URL +from ietf.utils.decorators import ignore_view_kwargs from ietf.utils.response import permission_denied from ietf.utils.text import strip_suffix from ietf.utils import markdown @@ -128,89 +138,17 @@ def roles(group, role_name): return Role.objects.filter(group=group, name=role_name).select_related("email", "person") -def fill_in_charter_info(group, include_drafts=False): - group.areadirector = getattr(group.ad_role(),'email',None) - - personnel = {} - for r in Role.objects.filter(group=group).order_by('person__name').select_related("email", "person", "name"): - if r.name_id not in personnel: - personnel[r.name_id] = [] - personnel[r.name_id].append(r) - - if group.parent and group.parent.type_id == "area" and group.ad_role() and "ad" not in personnel: - ad_roles = list(Role.objects.filter(group=group.parent, name="ad", person=group.ad_role().person)) - if ad_roles: - personnel["ad"] = ad_roles - - group.personnel = [] - for role_name_slug, roles in personnel.items(): - label = roles[0].name.name - if len(roles) > 1: - if label.endswith("y"): - label = label[:-1] + "ies" - else: - label += "s" - - group.personnel.append((role_name_slug, label, roles)) - - group.personnel.sort(key=lambda t: t[2][0].name.order) - - milestone_state = "charter" if group.state_id == "proposed" else "active" - group.milestones = group.groupmilestone_set.filter(state=milestone_state) - if group.uses_milestone_dates: - group.milestones = group.milestones.order_by('resolved', 'due') - else: - group.milestones = group.milestones.order_by('resolved', 'order') - - if group.charter: - group.charter_text = get_charter_text(group) - else: - group.charter_text = "Not chartered yet." - group.charter_html = markdown.markdown(group.charter_text) - def extract_last_name(role): return role.person.name_parts()[3] -def fill_in_wg_roles(group): - def get_roles(slug, default): - for role_slug, label, roles in group.personnel: - if slug == role_slug: - return roles - return default - - group.chairs = get_roles("chair", []) - ads = get_roles("ad", []) - group.areadirector = ads[0] if ads else None - group.techadvisors = get_roles("techadv", []) - group.editors = get_roles("editor", []) - group.secretaries = get_roles("secr", []) - -def fill_in_wg_drafts(group): - aliases = DocAlias.objects.filter(docs__type="draft", docs__group=group).prefetch_related('docs').order_by("name") - group.drafts = [] - group.rfcs = [] - for a in aliases: - if a.name.startswith("draft"): - group.drafts.append(a) - else: - group.rfcs.append(a) - a.remote_field = RelatedDocument.objects.filter(source=a.document,relationship_id__in=['obs','updates']).distinct() - a.invrel = RelatedDocument.objects.filter(target=a,relationship_id__in=['obs','updates']).distinct() - - -def check_group_email_aliases(): - pattern = re.compile(r'expand-(.*?)(-\w+)@.*? +(.*)$') - tot_count = 0 - good_count = 0 - with io.open(settings.GROUP_VIRTUAL_PATH,"r") as virtual_file: - for line in virtual_file.readlines(): - m = pattern.match(line) - tot_count += 1 - if m: - good_count += 1 - if good_count > 50 and tot_count < 3*good_count: - return True - return False + +def response_from_file(fpath: Path) -> HttpResponse: + """Helper to shovel a file back in an HttpResponse""" + try: + content = fpath.read_bytes() + except IOError: + raise Http404 + return HttpResponse(content, content_type="text/plain; charset=utf-8") # --- View functions --------------------------------------------------- @@ -218,58 +156,26 @@ def check_group_email_aliases(): def wg_summary_area(request, group_type): if group_type != "wg": raise Http404 - areas = Group.objects.filter(type="area", state="active").order_by("name") - for area in areas: - area.groups = Group.objects.filter(parent=area, type="wg", state="active").order_by("acronym") - for group in area.groups: - group.chairs = sorted(roles(group, "chair"), key=extract_last_name) - - areas = [a for a in areas if a.groups] + return response_from_file(Path(settings.GROUP_SUMMARY_PATH) / "1wg-summary.txt") - return render(request, 'group/1wg-summary.txt', - { 'areas': areas }, - content_type='text/plain; charset=UTF-8') def wg_summary_acronym(request, group_type): if group_type != "wg": raise Http404 - areas = Group.objects.filter(type="area", state="active").order_by("name") - groups = Group.objects.filter(type="wg", state="active").order_by("acronym").select_related("parent") - for group in groups: - group.chairs = sorted(roles(group, "chair"), key=extract_last_name) - return render(request, 'group/1wg-summary-by-acronym.txt', - { 'areas': areas, - 'groups': groups }, - content_type='text/plain; charset=UTF-8') + return response_from_file(Path(settings.GROUP_SUMMARY_PATH) / "1wg-summary-by-acronym.txt") + -@cache_page ( 60 * 60, cache="slowpages" ) def wg_charters(request, group_type): if group_type != "wg": raise Http404 - areas = Group.objects.filter(type="area", state="active").order_by("name") - for area in areas: - area.groups = Group.objects.filter(parent=area, type="wg", state="active").order_by("name") - for group in area.groups: - fill_in_charter_info(group) - fill_in_wg_roles(group) - fill_in_wg_drafts(group) - return render(request, 'group/1wg-charters.txt', - { 'areas': areas }, - content_type='text/plain; charset=UTF-8') - -@cache_page ( 60 * 60, cache="slowpages" ) + return response_from_file(Path(settings.CHARTER_PATH) / "1wg-charters.txt") + + def wg_charters_by_acronym(request, group_type): if group_type != "wg": raise Http404 + return response_from_file(Path(settings.CHARTER_PATH) / "1wg-charters-by-acronym.txt") - groups = Group.objects.filter(type="wg", state="active").exclude(parent=None).order_by("acronym") - for group in groups: - fill_in_charter_info(group) - fill_in_wg_roles(group) - fill_in_wg_drafts(group) - return render(request, 'group/1wg-charters-by-acronym.txt', - { 'groups': groups }, - content_type='text/plain; charset=UTF-8') def active_groups(request, group_type=None): @@ -291,7 +197,7 @@ def active_groups(request, group_type=None): return active_dirs(request) elif group_type == "review": return active_review_dirs(request) - elif group_type in ("program", "iabasg"): + elif group_type in ("program", "iabasg","iabworkshop"): return active_iab(request) elif group_type == "adm": return active_adm(request) @@ -314,6 +220,7 @@ def active_group_types(request): "area", "program", "iabasg", + "iabworkshop" "adm", ] ) @@ -338,13 +245,22 @@ def active_review_dirs(request): return render(request, 'group/active_review_dirs.html', {'dirs' : dirs }) def active_teams(request): - teams = Group.objects.filter(type="team", state="active").order_by("name") + parent_type_order = {"area": 1, "adm": 3, None: 4} + + def team_sort_key(group): + type_id = group.parent.type_id if group.parent else None + return (parent_type_order.get(type_id, 2), group.parent.name if group.parent else "", group.name) + + teams = sorted( + Group.objects.filter(type="team", state="active").select_related("parent"), + key=team_sort_key, + ) for group in teams: group.chairs = sorted(roles(group, "chair"), key=extract_last_name) - return render(request, 'group/active_teams.html', {'teams' : teams }) + return render(request, 'group/active_teams.html', {'teams': teams}) def active_iab(request): - iabgroups = Group.objects.filter(type__in=("program","iabasg"), state="active").order_by("-type_id","name") + iabgroups = Group.objects.filter(type__in=("program","iabasg","iabworkshop"), state="active").order_by("-type_id","name") for group in iabgroups: group.leads = sorted(roles(group, "lead"), key=extract_last_name) return render(request, 'group/active_iabgroups.html', {'iabgroups' : iabgroups }) @@ -379,7 +295,7 @@ def active_wgs(request): if group.list_subscribe.startswith('http'): group.list_subscribe_url = group.list_subscribe elif group.list_email.endswith('@ietf.org'): - group.list_subscribe_url = MAILING_LIST_INFO_URL % {'list_addr':group.list_email.split('@')[0]} + group.list_subscribe_url = MAILING_LIST_INFO_URL % {'list_addr':group.list_email.split('@')[0].lower(),'domain':'ietf.org'} else: group.list_subscribe_url = "mailto:"+group.list_subscribe @@ -433,35 +349,86 @@ def chartering_groups(request): dict(charter_states=charter_states, group_types=group_types)) + def concluded_groups(request): sections = OrderedDict() - sections['WGs'] = Group.objects.filter(type='wg', state="conclude").select_related("state", "charter").order_by("parent__name","acronym") - sections['RGs'] = Group.objects.filter(type='rg', state="conclude").select_related("state", "charter").order_by("parent__name","acronym") - sections['BOFs'] = Group.objects.filter(type='wg', state="bof-conc").select_related("state", "charter").order_by("parent__name","acronym") - sections['AGs'] = Group.objects.filter(type='ag', state="conclude").select_related("state", "charter").order_by("parent__name","acronym") - sections['RAGs'] = Group.objects.filter(type='rag', state="conclude").select_related("state", "charter").order_by("parent__name","acronym") - sections['Directorates'] = Group.objects.filter(type='dir', state="conclude").select_related("state", "charter").order_by("parent__name","acronym") - sections['Review teams'] = Group.objects.filter(type='review', state="conclude").select_related("state", "charter").order_by("parent__name","acronym") - sections['Teams'] = Group.objects.filter(type='team', state="conclude").select_related("state", "charter").order_by("parent__name","acronym") - sections['Programs'] = Group.objects.filter(type='program', state="conclude").select_related("state", "charter").order_by("parent__name","acronym") + sections["WGs"] = ( + Group.objects.filter(type="wg", state="conclude") + .select_related("state", "charter") + .order_by("parent__name", "acronym") + ) + sections["RGs"] = ( + Group.objects.filter(type="rg", state="conclude") + .select_related("state", "charter") + .order_by("parent__name", "acronym") + ) + sections["BOFs"] = ( + Group.objects.filter(type="wg", state="bof-conc") + .select_related("state", "charter") + .order_by("parent__name", "acronym") + ) + sections["AGs"] = ( + Group.objects.filter(type="ag", state="conclude") + .select_related("state", "charter") + .order_by("parent__name", "acronym") + ) + sections["RAGs"] = ( + Group.objects.filter(type="rag", state="conclude") + .select_related("state", "charter") + .order_by("parent__name", "acronym") + ) + sections["Directorates"] = ( + Group.objects.filter(type="dir", state="conclude") + .select_related("state", "charter") + .order_by("parent__name", "acronym") + ) + sections["Review teams"] = ( + Group.objects.filter(type="review", state="conclude") + .select_related("state", "charter") + .order_by("parent__name", "acronym") + ) + sections["Teams"] = ( + Group.objects.filter(type="team", state="conclude") + .select_related("state", "charter") + .order_by("parent__name", "acronym") + ) + sections["Programs"] = ( + Group.objects.filter(type="program", state="conclude") + .select_related("state", "charter") + .order_by("parent__name", "acronym") + ) for name, groups in sections.items(): - # add start/conclusion date d = dict((g.pk, g) for g in groups) for g in groups: g.start_date = g.conclude_date = None - for e in ChangeStateGroupEvent.objects.filter(group__in=groups, state="active").order_by("-time"): + # Some older BOFs were created in the "active" state, so consider both "active" and "bof" + # ChangeStateGroupEvents when finding the start date. A group with _both_ "active" and "bof" + # events should not be in the "bof-conc" state so this shouldn't cause a problem (if it does, + # we'll need to clean up the data) + for e in ChangeStateGroupEvent.objects.filter( + group__in=groups, + state__in=["active", "bof"] if name == "BOFs" else ["active"], + ).order_by("-time"): d[e.group_id].start_date = e.time - for e in ChangeStateGroupEvent.objects.filter(group__in=groups, state="conclude").order_by("time"): + # Similarly, some older BOFs were concluded into the "conclude" state and the event was never + # fixed, so consider both "conclude" and "bof-conc" ChangeStateGroupEvents when finding the + # concluded date. A group with _both_ "conclude" and "bof-conc" events should not be in the + # "bof-conc" state so this shouldn't cause a problem (if it does, we'll need to clean up the + # data) + for e in ChangeStateGroupEvent.objects.filter( + group__in=groups, + state__in=["bof-conc", "conclude"] if name == "BOFs" else ["conclude"], + ).order_by("time"): d[e.group_id].conclude_date = e.time - return render(request, 'group/concluded_groups.html', - dict(sections=sections)) + return render(request, "group/concluded_groups.html", dict(sections=sections)) + def prepare_group_documents(request, group, clist): found_docs, meta = prepare_document_table(request, docs_tracked_by_community_list(clist), request.GET, max_results=500) @@ -474,8 +441,8 @@ def prepare_group_documents(request, group, clist): # non-WG drafts and call for WG adoption are considered related if (d.group != group or (d.stream_id and d.get_state_slug("draft-stream-%s" % d.stream_id) in ("c-adopt", "wg-cand"))): - if d.get_state_slug() != "expired": - d.search_heading = "Related Internet-Draft" + if (d.type_id == "draft" and d.get_state_slug() not in ["expired","rfc"]) or d.type_id == "rfc": + d.search_heading = "Related Internet-Drafts and RFCs" docs_related.append(d) else: if not (d.get_state_slug('draft-iesg') == "dead" or (d.stream_id and d.get_state_slug("draft-stream-%s" % d.stream_id) == "dead")): @@ -485,6 +452,47 @@ def prepare_group_documents(request, group, clist): return docs, meta, docs_related, meta_related +def get_leadership(group_type): + people = Person.objects.filter( + role__name__slug="chair", + role__group__type=group_type, + role__group__state__slug__in=("active", "bof", "proposed"), + ).distinct() + leaders = [] + for person in people: + parts = person.name_parts() + groups = [ + r.group.acronym + for r in person.role_set.filter( + name__slug="chair", + group__type=group_type, + group__state__slug__in=("active", "bof", "proposed"), + ) + ] + entry = {"name": "%s, %s" % (parts[3], parts[1]), "groups": ", ".join(groups)} + leaders.append(entry) + return sorted(leaders, key=lambda a: a["name"]) + + +def group_leadership(request, group_type=None): + context = {} + context["leaders"] = get_leadership(group_type) + context["group_type"] = group_type + return render(request, "group/group_leadership.html", context) + + +def group_leadership_csv(request, group_type=None): + leaders = get_leadership(group_type) + response = HttpResponse(content_type="text/csv") + response["Content-Disposition"] = ( + f'attachment; filename="group_leadership_{group_type}.csv"' + ) + writer = csv.writer(response, dialect=csv.excel, delimiter=str(",")) + writer.writerow(["Name", "Groups"]) + for leader in leaders: + writer.writerow([leader["name"], leader["groups"]]) + return response + def group_home(request, acronym, group_type=None): group = get_group_or_404(acronym, group_type) kwargs = dict(acronym=group.acronym) @@ -534,9 +542,8 @@ def group_documents_txt(request, acronym, group_type=None): rows = [] for d in itertools.chain(docs, docs_related): - rfc_number = d.rfc_number() - if rfc_number != None: - name = rfc_number + if d.type_id == "rfc": + name = str(d.rfc_number) else: name = "%s-%s" % (d.name, d.rev) @@ -664,21 +671,6 @@ def group_about_status_edit(request, acronym, group_type=None): } ) -def get_group_email_aliases(acronym, group_type): - if acronym: - pattern = re.compile(r'expand-(%s)(-\w+)@.*? +(.*)$'%acronym) - else: - pattern = re.compile(r'expand-(.*?)(-\w+)@.*? +(.*)$') - - aliases = [] - with io.open(settings.GROUP_VIRTUAL_PATH,"r") as virtual_file: - for line in virtual_file.readlines(): - m = pattern.match(line) - if m: - if acronym or not group_type or Group.objects.filter(acronym=m.group(1),type__slug=group_type): - aliases.append({'acronym':m.group(1),'alias_type':m.group(2),'expansion':m.group(3)}) - return aliases - def email(request, acronym, group_type=None): group = get_group_or_404(acronym, group_type) @@ -706,6 +698,61 @@ def history(request, acronym, group_type=None): "can_add_comment": can_add_comment, })) + +class RequestsHistoryParamsForm(forms.Form): + SINCE_CHOICES = ( + (None, "1 month"), + ("3m", "3 months"), + ("6m", "6 months"), + ("1y", "1 year"), + ("2y", "2 years"), + ("all", "All"), + ) + + reviewer_email = forms.EmailField(required=False) + since = forms.ChoiceField(choices=SINCE_CHOICES, required=False) + +def review_requests_history(request, acronym, group_type=None): + group = get_group_or_404(acronym, group_type) + if not group.features.has_reviews: + raise Http404 + + params = RequestsHistoryParamsForm(request.GET) + if not params.is_valid(): + return HttpResponseBadRequest("Invalid parameters") + + reviewer_email = params.cleaned_data["reviewer_email"] or None + if reviewer_email: + history = ReviewAssignment.history.model.objects.filter( + review_request__team__acronym=acronym, + reviewer=reviewer_email) + else: + history = ReviewAssignment.history.model.objects.filter( + review_request__team__acronym=acronym) + reviewer_email = '' + + since = params.cleaned_data["since"] or None + if since != "all": + date_limit = { + None: datetime.timedelta(days=31), + "3m": datetime.timedelta(days=31 * 3), + "6m": datetime.timedelta(days=180), + "1y": datetime.timedelta(days=365), + "2y": datetime.timedelta(days=2 * 365), + }[since] + + history = history.filter(review_request__time__gte=datetime_today(DEADLINE_TZINFO) - date_limit) + + return render(request, 'group/review_requests_history.html', + construct_group_menu_context(request, group, "reviews history", group_type, { + "group": group, + "acronym": acronym, + "history": history, + "since_choices": params.SINCE_CHOICES, + "since": since, + "reviewer_email": reviewer_email + })) + def materials(request, acronym, group_type=None): group = get_group_or_404(acronym, group_type) if not group.features.has_nonsession_materials: @@ -745,14 +792,31 @@ def dependencies(request, acronym, group_type=None): source__type="draft", relationship__slug__startswith="ref", ) - - both_rfcs = Q(source__states__slug="rfc", target__docs__states__slug="rfc") - inactive = Q(source__states__slug__in=["expired", "repl"]) + rfc_or_subseries = {"rfc", "bcp", "fyi", "std"} + both_rfcs = Q(source__type_id="rfc", target__type_id__in=rfc_or_subseries) + pre_rfc_draft_to_rfc = Q( + source__states__type="draft", + source__states__slug="rfc", + target__type_id__in=rfc_or_subseries, + ) + both_pre_rfcs = Q( + source__states__type="draft", + source__states__slug="rfc", + target__type_id="draft", + target__states__type="draft", + target__states__slug="rfc", + ) + inactive = Q( + source__states__type="draft", + source__states__slug__in=["expired", "repl"], + ) attractor = Q(target__name__in=["rfc5000", "rfc5741"]) - removed = Q(source__states__slug__in=["auth-rm", "ietf-rm"]) + removed = Q(source__states__type="draft", source__states__slug__in=["auth-rm", "ietf-rm"]) relations = ( RelatedDocument.objects.filter(references) .exclude(both_rfcs) + .exclude(pre_rfc_draft_to_rfc) + .exclude(both_pre_rfcs) .exclude(inactive) .exclude(attractor) .exclude(removed) @@ -760,29 +824,28 @@ def dependencies(request, acronym, group_type=None): links = set() for x in relations: - target_state = x.target.document.get_state_slug("draft") - if target_state != "rfc" or x.is_downref(): + always_include = x.target.type_id not in rfc_or_subseries and x.target.get_state_slug("draft") != "rfc" + if always_include or x.is_downref(): links.add(x) replacements = RelatedDocument.objects.filter( relationship__slug="replaces", - target__docs__in=[x.target.document for x in links], + target__in=[x.target for x in links], ) for x in replacements: links.add(x) - nodes = set([x.source for x in links]).union([x.target.document for x in links]) + nodes = set([x.source for x in links]).union([x.target for x in links]) graph = { "nodes": [ { - "id": x.canonical_name(), - "rfc": x.get_state("draft").slug == "rfc", - "post-wg": not x.get_state("draft-iesg").slug - in ["idexists", "watching", "dead"], - "expired": x.get_state("draft").slug == "expired", - "replaced": x.get_state("draft").slug == "repl", - "group": x.group.acronym if x.group.acronym != "none" else "", + "id": x.became_rfc().name if x.became_rfc() else x.name, + "rfc": x.type_id == "rfc" or x.became_rfc() is not None, + "post-wg": x.get_state_slug("draft-iesg") not in ["idexists", "dead"], + "expired": x.get_state_slug("draft") == "expired", + "replaced": x.get_state_slug("draft") == "repl", + "group": x.group.acronym if x.group and x.group.acronym != "none" else "", "url": x.get_absolute_url(), "level": x.intended_std_level.name if x.intended_std_level @@ -794,8 +857,8 @@ def dependencies(request, acronym, group_type=None): ], "links": [ { - "source": x.source.canonical_name(), - "target": x.target.document.canonical_name(), + "source": x.source.became_rfc().name if x.source.became_rfc() else x.source.name, + "target": x.target.became_rfc().name if x.target.became_rfc() else x.target.name, "rel": "downref" if x.is_downref() else x.relationship.slug, } for x in links @@ -823,21 +886,70 @@ def meetings(request, acronym, group_type=None): four_years_ago = timezone.now() - datetime.timedelta(days=4 * 365) - sessions = ( - group.session_set.with_current_status() - .filter( - meeting__date__gt=four_years_ago - if group.acronym != "iab" - else datetime.date(1970, 1, 1), - type__in=["regular", "plenary", "other"], - ) - .filter( - current_status__in=["sched", "schedw", "appr", "canceled"], + stsas = SchedTimeSessAssignment.objects.filter( + session__type__in=["regular", "plenary", "other"], + session__group=group) + if group.acronym not in ["iab", "iesg"]: + stsas = stsas.filter(session__meeting__date__gt=four_years_ago) + stsas = stsas.annotate(sessionstatus=Coalesce( + Subquery( + SchedulingEvent.objects.filter( + session=OuterRef("session__pk") + ).order_by( + '-time', '-id' + ).values('status')[:1]), + Value(''), + output_field=TextField()) + ).filter( + sessionstatus__in=["sched", "schedw", "appr", "canceled"], + session__meeting__schedule=F("schedule") + ).distinct().select_related( + "session", "session__group", "session__group__parent", "session__meeting__type", "timeslot" + ).prefetch_related( + "session__materials", + "session__materials__states", + Prefetch("session__materials", + queryset=Document.objects.exclude(states__type=F("type"), states__slug='deleted').order_by('presentations__order').prefetch_related('states'), + to_attr="prefetched_active_materials" + ), + ) + + stsas = list(stsas) + + for stsa in stsas: + stsa.session._otsa = stsa + stsa.session.official_timeslotassignment = types.MethodType(lambda self:self._otsa, stsa.session) + stsa.session.current_status = stsa.sessionstatus + + sessions = sorted( + set([stsa.session for stsa in stsas]), + key=lambda x: ( + x._otsa.timeslot.time, + x._otsa.timeslot.type_id, + x._otsa.session.group.parent.name if x._otsa.session.group.parent else None, + x._otsa.session.name ) ) - sessions = list(sessions) + + meeting_seen = None + for s in sessions: + if s.meeting != meeting_seen: + meeting_seen = s.meeting + order = 1 + s._oim = order + s.order_in_meeting = types.MethodType(lambda self:self._oim, s) + order += 1 + + + revsub_dates_by_meeting = dict(ImportantDate.objects.filter(name_id="revsub", meeting__session__in=sessions).distinct().values_list("meeting_id","date")) + for s in sessions: s.order_number = s.order_in_meeting() + if s.meeting.pk in revsub_dates_by_meeting: + cutoff_date = revsub_dates_by_meeting[s.meeting.pk] + else: + cutoff_date = s.meeting.date + datetime.timedelta(days=s.meeting.submission_correction_day_offset) + s.cached_is_cutoff = date_today(datetime.UTC) > cutoff_date future, in_progress, recent, past = group_sessions(sessions) @@ -845,7 +957,7 @@ def meetings(request, acronym, group_type=None): can_always_edit = has_role(request.user, ["Secretariat", "Area Director"]) far_past = [] - if group.acronym == "iab": + if group.acronym in ["iab", "iesg"]: recent_past = [] for s in past: if s.time >= four_years_ago: @@ -854,6 +966,18 @@ def meetings(request, acronym, group_type=None): far_past.append(s) past = recent_past + # Add calendar actions + cal_actions = [] + + cal_actions.append(dict( + label='Download as .ics', + url=reverse('ietf.meeting.views.upcoming_ical')+"?show="+group.acronym) + ) + cal_actions.append(dict( + label='Subscribe with webcal', + url='webcal://'+request.get_host()+reverse('ietf.meeting.views.upcoming_ical')+"?show="+group.acronym) + ) + return render( request, "group/meetings.html", @@ -871,6 +995,7 @@ def meetings(request, acronym, group_type=None): "far_past": far_past, "can_edit": can_edit, "can_always_edit": can_always_edit, + "cal_actions": cal_actions, }, ), ) @@ -1282,7 +1407,10 @@ def stream_documents(request, acronym): editable = has_role(request.user, "Secretariat") or group.has_role(request.user, "chair") stream = StreamName.objects.get(slug=acronym) - qs = Document.objects.filter(states__type="draft", states__slug__in=["active", "rfc"], stream=acronym) + qs = Document.objects.filter(stream=acronym).filter( + Q(type_id="draft", states__type="draft", states__slug="active") + | Q(type_id="rfc") + ).distinct() docs, meta = prepare_document_table(request, qs, max_results=1000) return render(request, 'group/stream_documents.html', {'stream':stream, 'docs':docs, 'meta':meta, 'editable':editable } ) @@ -1332,16 +1460,36 @@ def stream_edit(request, acronym): ) -@cache_control(public=True, max_age=30*60) +@cache_control(public=True, max_age=30 * 60) @cache_page(30 * 60) def group_menu_data(request): - groups = Group.objects.filter(state="active", parent__state="active").filter(Q(type__features__acts_like_wg=True)|Q(type_id__in=['program','iabasg'])|Q(parent__acronym='ietfadminllc')|Q(parent__acronym='rfceditor')).order_by("-type_id","acronym") + groups = ( + Group.objects.filter(state="active", parent__state="active") + .filter( + Q(type__features__acts_like_wg=True) + | Q(type_id__in=["program", "iabasg", "iabworkshop"]) + | Q(parent__acronym="ietfadminllc") + | Q(parent__acronym="rfceditor") + ) + .order_by("-type_id", "acronym") + .select_related("type") + ) groups_by_parent = defaultdict(list) for g in groups: - url = urlreverse("ietf.group.views.group_home", kwargs={ 'group_type': g.type_id, 'acronym': g.acronym }) -# groups_by_parent[g.parent_id].append({ 'acronym': g.acronym, 'name': escape(g.name), 'url': url }) - groups_by_parent[g.parent_id].append({ 'acronym': g.acronym, 'name': escape(g.name), 'type': escape(g.type.verbose_name or g.type.name), 'url': url }) + url = urlreverse( + "ietf.group.views.group_home", + kwargs={"group_type": g.type_id, "acronym": g.acronym}, + ) + # groups_by_parent[g.parent_id].append({ 'acronym': g.acronym, 'name': escape(g.name), 'url': url }) + groups_by_parent[g.parent_id].append( + { + "acronym": g.acronym, + "name": escape(g.name), + "type": escape(g.type.verbose_name or g.type.name), + "url": url, + } + ) iab = Group.objects.get(acronym="iab") groups_by_parent[iab.pk].insert( @@ -1350,12 +1498,15 @@ def group_menu_data(request): "acronym": iab.acronym, "name": iab.name, "type": "Top Level Group", - "url": urlreverse("ietf.group.views.group_home", kwargs={"acronym": iab.acronym}) - } + "url": urlreverse( + "ietf.group.views.group_home", kwargs={"acronym": iab.acronym} + ), + }, ) return JsonResponse(groups_by_parent) + @cache_control(public=True, max_age=30 * 60) @cache_page(30 * 60) def group_stats_data(request, years="3", only_active=True): @@ -2101,14 +2252,24 @@ def statements(request, acronym, group_type=None): if not acronym in ["iab", "iesg"]: raise Http404 group = get_group_or_404(acronym, group_type) - statements = group.document_set.filter(type_id="statement").annotate( - published=Subquery( - DocEvent.objects.filter( - doc=OuterRef("pk"), - type="published_statement" - ).order_by("-time").values("time")[:1] + statements = ( + group.document_set.filter(type_id="statement") + .annotate( + published=Subquery( + DocEvent.objects.filter(doc=OuterRef("pk"), type="published_statement") + .order_by("-time") + .values("time")[:1] + ) ) - ).order_by("-published") + .annotate( + status=Subquery( + Document.states.through.objects.filter( + document_id=OuterRef("pk"), state__type="statement" + ).values_list("state__slug", flat=True)[:1] + ) + ) + .order_by("status", "-published") + ) return render( request, "group/statements.html", @@ -2144,7 +2305,8 @@ def appeals(request, acronym, group_type=None): ), ) -def appeal_artifact(request, acronym, artifact_id, group_type=None): +@ignore_view_kwargs("group_type") +def appeal_artifact(request, acronym, artifact_id): artifact = get_object_or_404(AppealArtifact, pk=artifact_id) if artifact.is_markdown(): artifact_html = markdown.markdown(artifact.bits.tobytes().decode("utf-8")) @@ -2163,7 +2325,8 @@ def appeal_artifact(request, acronym, artifact_id, group_type=None): ) @role_required("Secretariat") -def appeal_artifact_markdown(request, acronym, artifact_id, group_type=None): +@ignore_view_kwargs("group_type") +def appeal_artifact_markdown(request, acronym, artifact_id): artifact = get_object_or_404(AppealArtifact, pk=artifact_id) if artifact.is_markdown(): return HttpResponse(artifact.bits, content_type=artifact.content_type) diff --git a/ietf/help/tests_views.py b/ietf/help/tests_views.py deleted file mode 100644 index ee80dad865..0000000000 --- a/ietf/help/tests_views.py +++ /dev/null @@ -1,21 +0,0 @@ -from pyquery import PyQuery - -from django.urls import reverse - -import debug # pyflakes:ignore - -from ietf.utils.test_utils import TestCase -from ietf.doc.models import StateType - -class HelpPageTests(TestCase): - - def test_state_index(self): - url = reverse('ietf.help.views.state_index') - r = self.client.get(url) - q = PyQuery(r.content) - content = [ e.text for e in q('#content table td a ') ] - names = StateType.objects.values_list('slug', flat=True) - # The following doesn't cover all doc types, only a selection - for name in names: - if not '-' in name: - self.assertIn(name, content) diff --git a/ietf/help/urls.py b/ietf/help/urls.py index f1cc625fa7..90ce7e12e9 100644 --- a/ietf/help/urls.py +++ b/ietf/help/urls.py @@ -2,10 +2,10 @@ from ietf.help import views from ietf.utils.urls import url +from django.views.generic import RedirectView urlpatterns = [ url(r'^state/(?P[-\w]+)/(?P[-\w]+)/?$', views.state), url(r'^state/(?P[-\w]+)/?$', views.state), - url(r'^state/?$', views.state_index), + url(r'^state/?$', RedirectView.as_view(url='/doc/help/state/', permanent=True)), ] - diff --git a/ietf/help/views.py b/ietf/help/views.py index 6b10f9f6c3..493bf0dcf1 100644 --- a/ietf/help/views.py +++ b/ietf/help/views.py @@ -1,23 +1,11 @@ # Copyright The IETF Trust 2007, All Rights Reserved -from django.shortcuts import get_object_or_404, render - import debug # pyflakes:ignore -from ietf.doc.models import State, StateType from ietf.name.models import StreamName +from django.shortcuts import redirect -def state_index(request): - types = StateType.objects.all() - names = [ type.slug for type in types ] - for type in types: - if "-" in type.slug and type.slug.split('-',1)[0] in names: - type.stategroups = None - else: - groups = StateType.objects.filter(slug__startswith=type.slug) - type.stategroups = [ g.slug[len(type.slug)+1:] for g in groups if not g == type ] or "" - - return render(request, 'help/state_index.html', {"types": types}) +# This is just a redirect to the new URL under /doc; can probably go away eventually. def state(request, doc, type=None): if type: @@ -25,6 +13,5 @@ def state(request, doc, type=None): if type in streams: type = "stream-%s" % type slug = "%s-%s" % (doc,type) if type else doc - statetype = get_object_or_404(StateType, slug=slug) - states = State.objects.filter(used=True, type=statetype).order_by('order') - return render(request, 'help/states.html', {"doc": doc, "type": statetype, "states":states} ) + return redirect('/doc/help/state/%s' % slug, permanent = True) + \ No newline at end of file diff --git a/ietf/idindex/index.py b/ietf/idindex/index.py index cda8310b45..19eb29d4da 100644 --- a/ietf/idindex/index.py +++ b/ietf/idindex/index.py @@ -14,7 +14,7 @@ import debug # pyflakes:ignore -from ietf.doc.models import Document, DocEvent, DocumentAuthor, RelatedDocument, DocAlias, State +from ietf.doc.models import Document, DocEvent, DocumentAuthor, RelatedDocument, State from ietf.doc.models import LastCallDocEvent, NewRevisionDocEvent from ietf.doc.models import IESG_SUBSTATE_TAGS from ietf.doc.templatetags.ietf_filters import clean_whitespace @@ -31,15 +31,18 @@ def formatted_rev_date(name): t = revision_time.get(name) return t.strftime("%Y-%m-%d") if t else "" - rfc_aliases = dict(DocAlias.objects.filter(name__startswith="rfc", - docs__states=State.objects.get(type="draft", slug="rfc")).values_list("docs__name", "name")) + rfcs = dict() + for rfc in Document.objects.filter(type_id="rfc"): + draft = rfc.came_from_draft() + if draft is not None: + rfcs[draft.name] = rfc.name - replacements = dict(RelatedDocument.objects.filter(target__docs__states=State.objects.get(type="draft", slug="repl"), + replacements = dict(RelatedDocument.objects.filter(target__states=State.objects.get(type="draft", slug="repl"), relationship="replaces").values_list("target__name", "source__name")) # we need a distinct to prevent the queries below from multiplying the result - all_ids = Document.objects.filter(type="draft").order_by('name').exclude(name__startswith="rfc").distinct() + all_ids = Document.objects.filter(type="draft").order_by('name').distinct() res = ["\nInternet-Drafts Status Summary\n"] @@ -48,7 +51,7 @@ def add_line(f1, f2, f3, f4): res.append(f1 + "\t" + f2 + "\t" + f3 + "\t" + f4) - inactive_states = ["idexists", "pub", "watching", "dead"] + inactive_states = ["idexists", "pub", "dead"] excludes = list(State.objects.filter(type="draft", slug__in=["rfc","repl"])) includes = list(State.objects.filter(type="draft-iesg").exclude(slug__in=inactive_states)) @@ -77,9 +80,9 @@ def add_line(f1, f2, f3, f4): last_field = "" if s.slug == "rfc": - a = rfc_aliases.get(name) - if a: - last_field = a[3:] + rfc = rfcs.get(name) + if rfc: + last_field = rfc[3:] # Rework this to take advantage of having the number at hand already. elif s.slug == "repl": state += " replaced by " + replacements.get(name, "0") @@ -108,14 +111,17 @@ def file_types_for_drafts(): def all_id2_txt(): # this returns a lot of data so try to be efficient - drafts = Document.objects.filter(type="draft").exclude(name__startswith="rfc").order_by('name') + drafts = Document.objects.filter(type="draft").order_by('name') drafts = drafts.select_related('group', 'group__parent', 'ad', 'intended_std_level', 'shepherd', ) drafts = drafts.prefetch_related("states") - rfc_aliases = dict(DocAlias.objects.filter(name__startswith="rfc", - docs__states=State.objects.get(type="draft", slug="rfc")).values_list("docs__name", "name")) + rfcs = dict() + for rfc in Document.objects.filter(type_id="rfc"): + draft = rfc.came_from_draft() + if draft is not None: + rfcs[draft.name] = rfc.name - replacements = dict(RelatedDocument.objects.filter(target__docs__states=State.objects.get(type="draft", slug="repl"), + replacements = dict(RelatedDocument.objects.filter(target__states=State.objects.get(type="draft", slug="repl"), relationship="replaces").values_list("target__name", "source__name")) revision_time = dict(DocEvent.objects.filter(type="new_revision", doc__name__startswith="draft-").order_by('time').values_list("doc__name", "time")) @@ -164,9 +170,9 @@ def all_id2_txt(): # 4 rfc_number = "" if state == "rfc": - a = rfc_aliases.get(d.name) - if a: - rfc_number = a[3:] + rfc = rfcs.get(d.name) + if rfc: + rfc_number = rfc[3:] fields.append(rfc_number) # 5 repl = "" @@ -270,7 +276,7 @@ def active_drafts_index_by_group(extra_values=()): groups = [g for g in groups_dict.values() if hasattr(g, "active_drafts")] groups.sort(key=lambda g: g.acronym) - fallback_time = datetime.datetime(1950, 1, 1, tzinfo=datetime.timezone.utc) + fallback_time = datetime.datetime(1950, 1, 1, tzinfo=datetime.UTC) for g in groups: g.active_drafts.sort(key=lambda d: d.get("initial_rev_time", fallback_time)) @@ -296,6 +302,6 @@ def id_index_txt(with_abstracts=False): return render_to_string("idindex/id_index.txt", { 'groups': groups, - 'time': timezone.now().astimezone(datetime.timezone.utc).strftime("%Y-%m-%d %H:%M:%S %Z"), + 'time': timezone.now().astimezone(datetime.UTC).strftime("%Y-%m-%d %H:%M:%S %Z"), 'with_abstracts': with_abstracts, }) diff --git a/ietf/idindex/tasks.py b/ietf/idindex/tasks.py new file mode 100644 index 0000000000..2f5f1871d7 --- /dev/null +++ b/ietf/idindex/tasks.py @@ -0,0 +1,99 @@ +# Copyright The IETF Trust 2024, All Rights Reserved +# +# Celery task definitions +# +import os +import shutil + +import debug # pyflakes:ignore + +from celery import shared_task +from contextlib import AbstractContextManager +from pathlib import Path +from tempfile import NamedTemporaryFile +from typing import List + +from django.conf import settings + +from ietf.doc.storage_utils import store_file + +from .index import all_id_txt, all_id2_txt, id_index_txt + + +class TempFileManager(AbstractContextManager): + def __init__(self, tmpdir=None) -> None: + self.cleanup_list: set[Path] = set() + self.dir = tmpdir + + def make_temp_file(self, content): + with NamedTemporaryFile(mode="wt", delete=False, dir=self.dir) as tf: + tf_path = Path(tf.name) + self.cleanup_list.add(tf_path) + tf.write(content) + return tf_path + + def move_into_place(self, src_path: Path, dest_path: Path, hardlink_dirs: List[Path] = []): + shutil.move(src_path, dest_path) + dest_path.chmod(0o644) + self.cleanup_list.remove(src_path) + for path in hardlink_dirs: + target = path / dest_path.name + target.unlink(missing_ok=True) + os.link(dest_path, target) # until python>=3.10 + with dest_path.open("rb") as f: + store_file("indexes", dest_path.name, f, allow_overwrite=True) + + def cleanup(self): + for tf_path in self.cleanup_list: + tf_path.unlink(missing_ok=True) + + def __exit__(self, exc_type, exc_val, exc_tb): + self.cleanup() + return False # False: do not suppress the exception + + +@shared_task +def idindex_update_task(): + """Update I-D indexes""" + id_path = Path(settings.INTERNET_DRAFT_PATH) + derived_path = Path(settings.DERIVED_DIR) + download_path = Path(settings.ALL_ID_DOWNLOAD_DIR) + ftp_path = Path(settings.FTP_DIR) / "internet-drafts" + all_archive_path = Path(settings.INTERNET_ALL_DRAFTS_ARCHIVE_DIR) + + with TempFileManager() as tmp_mgr: + # Generate copies of new contents + all_id_content = all_id_txt() + all_id_tmpfile = tmp_mgr.make_temp_file(all_id_content) + derived_all_id_tmpfile = tmp_mgr.make_temp_file(all_id_content) + download_all_id_tmpfile = tmp_mgr.make_temp_file(all_id_content) + + id_index_content = id_index_txt() + id_index_tmpfile = tmp_mgr.make_temp_file(id_index_content) + derived_id_index_tmpfile = tmp_mgr.make_temp_file(id_index_content) + download_id_index_tmpfile = tmp_mgr.make_temp_file(id_index_content) + + id_abstracts_content = id_index_txt(with_abstracts=True) + id_abstracts_tmpfile = tmp_mgr.make_temp_file(id_abstracts_content) + derived_id_abstracts_tmpfile = tmp_mgr.make_temp_file(id_abstracts_content) + download_id_abstracts_tmpfile = tmp_mgr.make_temp_file(id_abstracts_content) + + all_id2_content = all_id2_txt() + all_id2_tmpfile = tmp_mgr.make_temp_file(all_id2_content) + derived_all_id2_tmpfile = tmp_mgr.make_temp_file(all_id2_content) + + # Move temp files as-atomically-as-possible into place + tmp_mgr.move_into_place(all_id_tmpfile, id_path / "all_id.txt", [ftp_path, all_archive_path]) + tmp_mgr.move_into_place(derived_all_id_tmpfile, derived_path / "all_id.txt") + tmp_mgr.move_into_place(download_all_id_tmpfile, download_path / "id-all.txt") + + tmp_mgr.move_into_place(id_index_tmpfile, id_path / "1id-index.txt", [ftp_path, all_archive_path]) + tmp_mgr.move_into_place(derived_id_index_tmpfile, derived_path / "1id-index.txt") + tmp_mgr.move_into_place(download_id_index_tmpfile, download_path / "id-index.txt") + + tmp_mgr.move_into_place(id_abstracts_tmpfile, id_path / "1id-abstracts.txt", [ftp_path, all_archive_path]) + tmp_mgr.move_into_place(derived_id_abstracts_tmpfile, derived_path / "1id-abstracts.txt") + tmp_mgr.move_into_place(download_id_abstracts_tmpfile, download_path / "id-abstract.txt") + + tmp_mgr.move_into_place(all_id2_tmpfile, id_path / "all_id2.txt", [ftp_path, all_archive_path]) + tmp_mgr.move_into_place(derived_all_id2_tmpfile, derived_path / "all_id2.txt") diff --git a/ietf/idindex/tests.py b/ietf/idindex/tests.py index f207fa5621..ba6100550d 100644 --- a/ietf/idindex/tests.py +++ b/ietf/idindex/tests.py @@ -3,19 +3,23 @@ import datetime +from unittest import mock from pathlib import Path +from tempfile import TemporaryDirectory from django.conf import settings from django.utils import timezone import debug # pyflakes:ignore -from ietf.doc.factories import WgDraftFactory -from ietf.doc.models import Document, DocAlias, RelatedDocument, State, LastCallDocEvent, NewRevisionDocEvent +from ietf.doc.factories import WgDraftFactory, RfcFactory +from ietf.doc.models import Document, RelatedDocument, State, LastCallDocEvent, NewRevisionDocEvent +from ietf.doc.storage_utils import retrieve_str from ietf.group.factories import GroupFactory from ietf.name.models import DocRelationshipName from ietf.idindex.index import all_id_txt, all_id2_txt, id_index_txt +from ietf.idindex.tasks import idindex_update_task, TempFileManager from ietf.person.factories import PersonFactory, EmailFactory from ietf.utils.test_utils import TestCase @@ -41,7 +45,8 @@ def test_all_id_txt(self): # published draft.set_state(State.objects.get(type="draft", slug="rfc")) - DocAlias.objects.create(name="rfc1234").docs.add(draft) + rfc = RfcFactory(rfc_number=1234) + draft.relateddocument_set.create(relationship_id="became_rfc", target=rfc) txt = all_id_txt() self.assertTrue(draft.name + "-" + draft.rev in txt) @@ -52,8 +57,13 @@ def test_all_id_txt(self): RelatedDocument.objects.create( relationship=DocRelationshipName.objects.get(slug="replaces"), - source=Document.objects.create(type_id="draft", rev="00", name="draft-test-replacement"), - target=draft.docalias.get(name__startswith="draft")) + source=Document.objects.create( + type_id="draft", + rev="00", + name="draft-test-replacement" + ), + target=draft + ) txt = all_id_txt() self.assertTrue(draft.name + "-" + draft.rev in txt) @@ -103,7 +113,8 @@ def get_fields(content): # test RFC draft.set_state(State.objects.get(type="draft", slug="rfc")) - DocAlias.objects.create(name="rfc1234").docs.add(draft) + rfc = RfcFactory(rfc_number=1234) + draft.relateddocument_set.create(relationship_id="became_rfc", target=rfc) t = get_fields(all_id2_txt()) self.assertEqual(t[4], "1234") @@ -111,8 +122,12 @@ def get_fields(content): draft.set_state(State.objects.get(type="draft", slug="repl")) RelatedDocument.objects.create( relationship=DocRelationshipName.objects.get(slug="replaces"), - source=Document.objects.create(type_id="draft", rev="00", name="draft-test-replacement"), - target=draft.docalias.get(name__startswith="draft")) + source=Document.objects.create( + type_id="draft", + rev="00", + name="draft-test-replacement" + ), + target=draft) t = get_fields(all_id2_txt()) self.assertEqual(t[5], "draft-test-replacement") @@ -140,3 +155,58 @@ def test_id_index_txt(self): txt = id_index_txt(with_abstracts=True) self.assertTrue(draft.abstract[:20] in txt) + + +class TaskTests(TestCase): + @mock.patch("ietf.idindex.tasks.all_id_txt") + @mock.patch("ietf.idindex.tasks.all_id2_txt") + @mock.patch("ietf.idindex.tasks.id_index_txt") + @mock.patch.object(TempFileManager, "__enter__") + def test_idindex_update_task( + self, + temp_file_mgr_enter_mock, + id_index_mock, + all_id2_mock, + all_id_mock, + ): + # Replace TempFileManager's __enter__() method with one that returns a mock. + # Pass a spec to the mock so we validate that only actual methods are called. + mgr_mock = mock.Mock(spec=TempFileManager) + temp_file_mgr_enter_mock.return_value = mgr_mock + + idindex_update_task() + + self.assertEqual(all_id_mock.call_count, 1) + self.assertEqual(all_id2_mock.call_count, 1) + self.assertEqual(id_index_mock.call_count, 2) + self.assertEqual(id_index_mock.call_args_list[0], (tuple(), dict())) + self.assertEqual( + id_index_mock.call_args_list[1], + (tuple(), {"with_abstracts": True}), + ) + self.assertEqual(mgr_mock.make_temp_file.call_count, 11) + self.assertEqual(mgr_mock.move_into_place.call_count, 11) + + def test_temp_file_manager(self): + with TemporaryDirectory() as temp_dir: + with TemporaryDirectory() as other_dir: + temp_path = Path(temp_dir) + other_path = Path(other_dir) + with TempFileManager(temp_path) as tfm: + path1 = tfm.make_temp_file("yay") + path2 = tfm.make_temp_file("boo") # do not keep this one + self.assertTrue(path1.exists()) + self.assertTrue(path2.exists()) + dest = temp_path / "yay.txt" + tfm.move_into_place(path1, dest, [other_path]) + # make sure things were cleaned up... + self.assertFalse(path1.exists()) # moved to dest + self.assertFalse(path2.exists()) # left behind + # check destination contents and permissions + self.assertEqual(dest.read_text(), "yay") + self.assertEqual( + retrieve_str("indexes", "yay.txt"), + "yay" + ) + self.assertEqual(dest.stat().st_mode & 0o777, 0o644) + self.assertTrue(dest.samefile(other_path / "yay.txt")) diff --git a/ietf/iesg/agenda.py b/ietf/iesg/agenda.py index 0abc5e6cda..ace4c9ec40 100644 --- a/ietf/iesg/agenda.py +++ b/ietf/iesg/agenda.py @@ -66,7 +66,7 @@ def get_doc_section(doc): elif doc.type_id == 'statchg': protocol_action = False for relation in doc.relateddocument_set.filter(relationship__slug__in=('tops','tois','tohist','toinf','tobcp','toexp')): - if relation.relationship_id in ('tops','tois') or relation.target.document.std_level_id in ('std','ds','ps'): + if relation.relationship_id in ('tops','tois') or relation.target.std_level_id in ('std','ds','ps'): protocol_action = True if protocol_action: s = "2.3" @@ -133,7 +133,7 @@ def agenda_sections(): ('4.2', {'title':"WG rechartering"}), ('4.2.1', {'title':"Under evaluation for IETF review", 'docs':[]}), ('4.2.2', {'title':"Proposed for approval", 'docs':[]}), - ('5', {'title':"IAB news we can use"}), + ('5', {'title':"IESG Liaison News"}), ('6', {'title':"Management issues"}), ('7', {'title':"Any Other Business (WG News, New Proposals, etc.)"}), ]) @@ -186,7 +186,7 @@ def fill_in_agenda_docs(date, sections, docs=None): doc.review_assignments = review_assignments_for_docs.get(doc.name, []) elif doc.type_id == "conflrev": - doc.conflictdoc = doc.relateddocument_set.get(relationship__slug='conflrev').target.document + doc.conflictdoc = doc.relateddocument_set.get(relationship__slug='conflrev').target elif doc.type_id == "charter": pass @@ -219,4 +219,4 @@ def agenda_data(date=None): fill_in_agenda_docs(date, sections) fill_in_agenda_management_issues(date, sections) - return { 'date': date.isoformat(), 'sections': sections } \ No newline at end of file + return { 'date': date.isoformat(), 'sections': sections } diff --git a/ietf/iesg/migrations/0003_delete_telechat.py b/ietf/iesg/migrations/0003_delete_telechat.py new file mode 100644 index 0000000000..6a09b88555 --- /dev/null +++ b/ietf/iesg/migrations/0003_delete_telechat.py @@ -0,0 +1,16 @@ +# Generated by Django 4.2.13 on 2024-06-21 20:40 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("iesg", "0002_telechatagendacontent"), + ] + + operations = [ + migrations.DeleteModel( + name="Telechat", + ), + ] diff --git a/ietf/iesg/models.py b/ietf/iesg/models.py index 96b78a195a..dcc8a9880b 100644 --- a/ietf/iesg/models.py +++ b/ietf/iesg/models.py @@ -59,20 +59,6 @@ def __str__(self): type_name = self.TYPE_CHOICES_DICT.get(self.type, str(self.type)) return "%s: %s" % (type_name, self.title or "") -class Telechat(models.Model): - telechat_id = models.IntegerField(primary_key=True) - telechat_date = models.DateField(null=True, blank=True) - minute_approved = models.IntegerField(null=True, blank=True) - wg_news_txt = models.TextField(blank=True) - iab_news_txt = models.TextField(blank=True) - management_issue = models.TextField(blank=True) - frozen = models.IntegerField(null=True, blank=True) - mi_frozen = models.IntegerField(null=True, blank=True) - - class Meta: - db_table = 'telechat' - - def next_telechat_date(): dates = TelechatDate.objects.order_by("-date") if dates: diff --git a/ietf/iesg/resources.py b/ietf/iesg/resources.py index c5deed27ff..c28dcf51d3 100644 --- a/ietf/iesg/resources.py +++ b/ietf/iesg/resources.py @@ -9,7 +9,7 @@ from ietf import api -from ietf.iesg.models import TelechatDate, Telechat, TelechatAgendaItem, TelechatAgendaContent +from ietf.iesg.models import TelechatDate, TelechatAgendaItem, TelechatAgendaContent class TelechatDateResource(ModelResource): @@ -17,62 +17,57 @@ class Meta: cache = SimpleCache() queryset = TelechatDate.objects.all() serializer = api.Serializer() - #resource_name = 'telechatdate' - ordering = ['id', ] - filtering = { + # resource_name = 'telechatdate' + ordering = [ + "id", + ] + filtering = { "id": ALL, "date": ALL, } + + api.iesg.register(TelechatDateResource()) -class TelechatResource(ModelResource): - class Meta: - cache = SimpleCache() - queryset = Telechat.objects.all() - serializer = api.Serializer() - #resource_name = 'telechat' - ordering = ['tlechat_id', ] - filtering = { - "telechat_id": ALL, - "telechat_date": ALL, - "minute_approved": ALL, - "wg_news_txt": ALL, - "iab_news_txt": ALL, - "management_issue": ALL, - "frozen": ALL, - "mi_frozen": ALL, - } -api.iesg.register(TelechatResource()) class TelechatAgendaItemResource(ModelResource): class Meta: cache = SimpleCache() queryset = TelechatAgendaItem.objects.all() serializer = api.Serializer() - #resource_name = 'telechatagendaitem' - ordering = ['id', ] - filtering = { + # resource_name = 'telechatagendaitem' + ordering = [ + "id", + ] + filtering = { "id": ALL, "text": ALL, "type": ALL, "title": ALL, } -api.iesg.register(TelechatAgendaItemResource()) +api.iesg.register(TelechatAgendaItemResource()) from ietf.name.resources import TelechatAgendaSectionNameResource + + class TelechatAgendaContentResource(ModelResource): - section = ToOneField(TelechatAgendaSectionNameResource, 'section') + section = ToOneField(TelechatAgendaSectionNameResource, "section") + class Meta: queryset = TelechatAgendaContent.objects.none() serializer = api.Serializer() cache = SimpleCache() - #resource_name = 'telechatagendacontent' - ordering = ['id', ] - filtering = { + # resource_name = 'telechatagendacontent' + ordering = [ + "id", + ] + filtering = { "id": ALL, "text": ALL, "section": ALL_WITH_RELATIONS, } + + api.iesg.register(TelechatAgendaContentResource()) diff --git a/ietf/iesg/tests.py b/ietf/iesg/tests.py index 53172e645f..e5fbe5da7b 100644 --- a/ietf/iesg/tests.py +++ b/ietf/iesg/tests.py @@ -2,6 +2,7 @@ # -*- coding: utf-8 -*- +from collections import Counter import datetime import io import tarfile @@ -17,20 +18,21 @@ import debug # pyflakes:ignore from ietf.doc.models import DocEvent, BallotPositionDocEvent, TelechatDocEvent -from ietf.doc.models import Document, DocAlias, State, RelatedDocument -from ietf.doc.factories import WgDraftFactory, IndividualDraftFactory, ConflictReviewFactory, BaseDocumentFactory, CharterFactory, WgRfcFactory, IndividualRfcFactory +from ietf.doc.models import Document, State, RelatedDocument +from ietf.doc.factories import BallotDocEventFactory, BallotPositionDocEventFactory, TelechatDocEventFactory, WgDraftFactory, IndividualDraftFactory, ConflictReviewFactory, BaseDocumentFactory, CharterFactory, WgRfcFactory, IndividualRfcFactory from ietf.doc.utils import create_ballot_if_not_open -from ietf.group.factories import RoleFactory, GroupFactory +from ietf.group.factories import RoleFactory, GroupFactory, DatedGroupMilestoneFactory, DatelessGroupMilestoneFactory from ietf.group.models import Group, GroupMilestone, Role from ietf.iesg.agenda import get_agenda_date, agenda_data, fill_in_agenda_administrivia, agenda_sections from ietf.iesg.models import TelechatDate, TelechatAgendaContent +from ietf.iesg.utils import get_wg_dashboard_info from ietf.name.models import StreamName, TelechatAgendaSectionName +from ietf.person.factories import PersonFactory from ietf.person.models import Person from ietf.utils.test_utils import TestCase, login_testing_unauthorized, unicontent from ietf.iesg.factories import IESGMgmtItemFactory, TelechatAgendaContentFactory from ietf.utils.timezone import date_today, DEADLINE_TZINFO - class IESGTests(TestCase): def test_feed(self): draft = WgDraftFactory(states=[('draft','active'),('draft-iesg','iesg-eva')],ad=Person.objects.get(user__username='ad')) @@ -52,6 +54,15 @@ def test_feed(self): self.assertContains(r, draft.name) self.assertContains(r, escape(pos.balloter.plain_name())) + # Mark draft as replaced + draft.set_state(State.objects.get(type="draft", slug="repl")) + + r = self.client.get(urlreverse("ietf.iesg.views.discusses")) + self.assertEqual(r.status_code, 200) + + self.assertNotContains(r, draft.name) + self.assertNotContains(r, escape(pos.balloter.plain_name())) + def test_milestones_needing_review(self): draft = WgDraftFactory() RoleFactory(name_id='ad',group=draft.group,person=Person.objects.get(user__username='ad')) @@ -71,7 +82,80 @@ def test_milestones_needing_review(self): r = self.client.get(url) self.assertEqual(r.status_code, 200) self.assertNotContains(r, m.desc) + + def test_milestones_needing_review_ordering(self): + dated_group = GroupFactory(uses_milestone_dates=True) + RoleFactory( + name_id='ad', + group=dated_group, + person=Person.objects.get(user__username='ad'), + ) + dated_milestones = [ + DatedGroupMilestoneFactory( + group=dated_group, + state_id="review", + desc="This is the description of one dated group milestone", + ), + DatedGroupMilestoneFactory( + group=dated_group, + state_id="review", + desc="This is the description of another dated group milestone", + ), + ] + dated_milestones[0].due -= datetime.timedelta(days=1) # make this one earlier + dated_milestones[0].save() + + dateless_group = GroupFactory(uses_milestone_dates=False) + RoleFactory( + name_id='ad', + group=dateless_group, + person=Person.objects.get(user__username='ad'), + ) + dateless_milestones = [ + DatelessGroupMilestoneFactory( + group=dateless_group, + state_id="review", + desc="This is the description of one dateless group milestone", + ), + DatelessGroupMilestoneFactory( + group=dateless_group, + state_id="review", + desc="This is the description of another dateless group milestone", + ), + ] + + url = urlreverse("ietf.iesg.views.milestones_needing_review") + self.client.login(username="ad", password="ad+password") + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + pq = PyQuery(r.content) + # check order-by-date + dated_tbody = pq(f'td:contains("{dated_milestones[0].desc}")').closest("tbody") + rows = list(dated_tbody.items("tr")) # keep as pyquery objects + self.assertTrue(rows[0].find('td:first:contains("Last")')) # Last milestone shown first + self.assertFalse(rows[0].find('td:first:contains("Next")')) + self.assertTrue(rows[0].find(f'td:contains("{dated_milestones[1].desc}")')) + self.assertFalse(rows[0].find(f'td:contains("{dated_milestones[0].desc}")')) + + self.assertFalse(rows[1].find('td:first:contains("Last")')) # Last milestone shown first + self.assertTrue(rows[1].find('td:first:contains("Next")')) + self.assertFalse(rows[1].find(f'td:contains("{dated_milestones[1].desc}")')) + self.assertTrue(rows[1].find(f'td:contains("{dated_milestones[0].desc}")')) + + # check order-by-order + dateless_tbody = pq(f'td:contains("{dateless_milestones[0].desc}")').closest("tbody") + rows = list(dateless_tbody.items("tr")) # keep as pyquery objects + self.assertTrue(rows[0].find('td:first:contains("Last")')) # Last milestone shown first + self.assertFalse(rows[0].find('td:first:contains("Next")')) + self.assertTrue(rows[0].find(f'td:contains("{dateless_milestones[1].desc}")')) + self.assertFalse(rows[0].find(f'td:contains("{dateless_milestones[0].desc}")')) + + self.assertFalse(rows[1].find('td:first:contains("Last")')) # Last milestone shown first + self.assertTrue(rows[1].find('td:first:contains("Next")')) + self.assertFalse(rows[1].find(f'td:contains("{dateless_milestones[1].desc}")')) + self.assertTrue(rows[1].find(f'td:contains("{dateless_milestones[0].desc}")')) + def test_review_decisions(self): draft = WgDraftFactory() @@ -101,14 +185,1595 @@ def test_ietf_activity(self): r = self.client.get(url) self.assertEqual(r.status_code, 200) + def test_working_groups(self): + # Clean away the wasted built-for-every-test noise + Group.objects.filter(type__in=["wg", "area"]).delete() + + ( + area_summary, + area_totals, + ad_summary, + noad_summary, + ad_totals, + noad_totals, + totals, + wg_summary, + ) = get_wg_dashboard_info() + self.assertEqual(area_summary, []) + self.assertEqual( + area_totals, {"group_count": 0, "doc_count": 0, "page_count": 0} + ) + self.assertEqual(ad_summary, []) + self.assertEqual(noad_summary, []) + self.assertEqual( + ad_totals, + { + "ad_group_count": 0, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + }, + ) + self.assertEqual( + noad_totals, + { + "ad_group_count": 0, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + }, + ) + self.assertEqual( + totals, + { + "group_count": 0, + "doc_count": 0, + "page_count": 0, + "groups_with_docs_count": 0, + }, + ) + self.assertEqual(wg_summary, []) + + # Construct Areas with WGs similar in shape to a real moment of the IETF + + # Note that this test construciton uses the first letter of the wg acronyms + # for convenience to switch on whether groups have documents with assigned ADs. + # (Search for ` if wg_acronym[0] > "g"`) + # There's no other significance to the names of the area directors or the + # acronyms of the areas and groups other than being distinct. Taking the + # values from sets of similar things hopefully helps with debugging the tests. + + areas = {} + for area_acronym in ["red", "orange", "yellow", "green", "blue", "violet"]: + areas[area_acronym] = GroupFactory(type_id="area", acronym=area_acronym) + for ad, area, wgs in [ + ("Alpha", "red", ["bassoon"]), + ("Bravo", "orange", ["celesta"]), + ("Charlie", "orange", ["clarinet", "cymbals"]), + ("Delta", "yellow", ["flute"]), + ("Echo", "yellow", ["glockenspiel"]), + ("Foxtrot", "green", ["gong", "guitar"]), + ("Golf", "green", ["harp"]), + ("Hotel", "blue", ["harpsichord"]), + ("Indigo", "blue", ["oboe", "organ"]), + ("Juliet", "violet", ["piano"]), + ("Kilo", "violet", ["piccolo"]), + ("Lima", "violet", ["saxophone", "tambourine"]), + ]: + p = Person.objects.filter(name=ad).first() or PersonFactory(name=ad) + RoleFactory(group=areas[area], person=p, name_id="ad") + for wg in wgs: + g = GroupFactory(acronym=wg, type_id="wg", parent=areas[area]) + RoleFactory(group=g, person=p, name_id="ad") + + # Some ADs have out of area groups + g = GroupFactory(acronym="timpani", parent=areas["orange"]) + RoleFactory(group=g, person=Person.objects.get(name="Juliet"), name_id="ad") + + ( + area_summary, + area_totals, + ad_summary, + noad_summary, + ad_totals, + noad_totals, + totals, + wg_summary, + ) = get_wg_dashboard_info() + + # checks for the expected result with area sorted by name + self.assertEqual( + area_summary, + [ + { + "area": "blue", + "groups_in_area": 3, + "groups_with_docs": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "area": "green", + "groups_in_area": 3, + "groups_with_docs": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "area": "orange", + "groups_in_area": 4, + "groups_with_docs": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "area": "red", + "groups_in_area": 1, + "groups_with_docs": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "area": "violet", + "groups_in_area": 4, + "groups_with_docs": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "area": "yellow", + "groups_in_area": 2, + "groups_with_docs": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + ], + ) + self.assertEqual( + area_totals, {"group_count": 0, "doc_count": 0, "page_count": 0} + ) + self.assertEqual( + ad_summary, + [ + { + "ad": "Alpha", + "area": "red", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Bravo", + "area": "orange", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Charlie", + "area": "orange", + "ad_group_count": 2, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Delta", + "area": "yellow", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Echo", + "area": "yellow", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Foxtrot", + "area": "green", + "ad_group_count": 2, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Golf", + "area": "green", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Hotel", + "area": "blue", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Indigo", + "area": "blue", + "ad_group_count": 2, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Juliet", + "area": "orange", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Juliet", + "area": "violet", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Kilo", + "area": "violet", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Lima", + "area": "violet", + "ad_group_count": 2, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + ], + ) + self.assertEqual( + noad_summary, + [ + { + "ad": "Alpha", + "area": "red", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Bravo", + "area": "orange", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Charlie", + "area": "orange", + "ad_group_count": 2, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Delta", + "area": "yellow", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Echo", + "area": "yellow", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Foxtrot", + "area": "green", + "ad_group_count": 2, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Golf", + "area": "green", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Hotel", + "area": "blue", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Indigo", + "area": "blue", + "ad_group_count": 2, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Juliet", + "area": "orange", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Juliet", + "area": "violet", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Kilo", + "area": "violet", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + { + "ad": "Lima", + "area": "violet", + "ad_group_count": 2, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0, + "doc_percent": 0, + "page_percent": 0, + }, + ], + ) + self.assertEqual( + ad_totals, + { + "ad_group_count": 17, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + }, + ) + self.assertEqual( + noad_totals, + { + "ad_group_count": 17, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + }, + ) + self.assertEqual( + totals, + { + "group_count": 17, + "doc_count": 0, + "page_count": 0, + "groups_with_docs_count": 0, + }, + ) + self.assertEqual( + wg_summary, + [ + { + "wg": "bassoon", + "area": "red", + "ad": "Alpha", + "doc_count": 0, + "page_count": 0, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "celesta", + "area": "orange", + "ad": "Bravo", + "doc_count": 0, + "page_count": 0, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "clarinet", + "area": "orange", + "ad": "Charlie", + "doc_count": 0, + "page_count": 0, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "cymbals", + "area": "orange", + "ad": "Charlie", + "doc_count": 0, + "page_count": 0, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "flute", + "area": "yellow", + "ad": "Delta", + "doc_count": 0, + "page_count": 0, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "glockenspiel", + "area": "yellow", + "ad": "Echo", + "doc_count": 0, + "page_count": 0, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "gong", + "area": "green", + "ad": "Foxtrot", + "doc_count": 0, + "page_count": 0, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "guitar", + "area": "green", + "ad": "Foxtrot", + "doc_count": 0, + "page_count": 0, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "harp", + "area": "green", + "ad": "Golf", + "doc_count": 0, + "page_count": 0, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "harpsichord", + "area": "blue", + "ad": "Hotel", + "doc_count": 0, + "page_count": 0, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "oboe", + "area": "blue", + "ad": "Indigo", + "doc_count": 0, + "page_count": 0, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "organ", + "area": "blue", + "ad": "Indigo", + "doc_count": 0, + "page_count": 0, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "piano", + "area": "violet", + "ad": "Juliet", + "doc_count": 0, + "page_count": 0, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "piccolo", + "area": "violet", + "ad": "Kilo", + "doc_count": 0, + "page_count": 0, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "saxophone", + "area": "violet", + "ad": "Lima", + "doc_count": 0, + "page_count": 0, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "tambourine", + "area": "violet", + "ad": "Lima", + "doc_count": 0, + "page_count": 0, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "timpani", + "area": "orange", + "ad": "Juliet", + "doc_count": 0, + "page_count": 0, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + ], + ) + + # As seen above, all doc and page counts are currently 0 + + # We'll give a group a document but not assign it to its AD + WgDraftFactory( + group=Group.objects.get(acronym="saxophone"), pages=len("saxophone") + ) + ( + area_summary, + area_totals, + ad_summary, + noad_summary, + ad_totals, + noad_totals, + totals, + wg_summary, + ) = get_wg_dashboard_info() + count_violet_dicts = 0 + for d in area_summary: + if d["area"] == "violet": + count_violet_dicts += 1 + self.assertEqual(d["groups_with_docs"], 1) + self.assertEqual(d["doc_count"], 1) + self.assertEqual(d["page_count"], 9) + self.assertEqual(d["group_percent"], 100.0) + self.assertEqual(d["doc_percent"], 100.0) + self.assertEqual(d["page_percent"], 100.0) + else: + self.assertEqual(d["groups_with_docs"], 0) + self.assertEqual(d["doc_count"], 0) + self.assertEqual(d["page_count"], 0) + self.assertEqual(d["group_percent"], 0) + self.assertEqual(d["doc_percent"], 0) + self.assertEqual(d["page_percent"], 0) + self.assertEqual(count_violet_dicts, 1) + + self.assertEqual( + area_totals, {"group_count": 1, "doc_count": 1, "page_count": 9} + ) + + # No AD has this document, even though it's in Lima's group + count_lima_dicts = 0 + for d in ad_summary: + if d["ad"] == "Lima": + count_lima_dicts += 1 + self.assertEqual(d["doc_group_count"], 0) + self.assertEqual(d["doc_count"], 0) + self.assertEqual(d["page_count"], 0) + self.assertEqual(d["group_percent"], 0) + self.assertEqual(d["doc_percent"], 0) + self.assertEqual(d["page_percent"], 0) + self.assertEqual(count_lima_dicts, 1) + + # It's in Lima's group, so normally it will eventually land on Lima + count_lima_dicts = 0 + for d in noad_summary: + if d["ad"] == "Lima": + count_lima_dicts += 1 + self.assertEqual(d["doc_group_count"], 1) + self.assertEqual(d["doc_count"], 1) + self.assertEqual(d["page_count"], 9) + self.assertEqual(d["group_percent"], 100.0) + self.assertEqual(d["doc_percent"], 100.0) + self.assertEqual(d["page_percent"], 100.0) + else: + self.assertEqual(d["doc_group_count"], 0) + self.assertEqual(d["doc_count"], 0) + self.assertEqual(d["page_count"], 0) + self.assertEqual(d["group_percent"], 0) + self.assertEqual(d["doc_percent"], 0) + self.assertEqual(d["page_percent"], 0) + self.assertEqual(count_lima_dicts, 1) + + self.assertEqual( + ad_totals, + { + "ad_group_count": 17, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + }, + ) + self.assertEqual( + noad_totals, + { + "ad_group_count": 17, + "doc_group_count": 1, + "doc_count": 1, + "page_count": 9, + }, + ) + self.assertEqual( + totals, + { + "group_count": 17, + "doc_count": 1, + "page_count": 9, + "groups_with_docs_count": 1, + }, + ) + + count_sax_dicts = 0 + for d in wg_summary: + if d["wg"] == "saxophone": + count_sax_dicts += 1 + self.assertEqual(d["doc_count"], 1) + self.assertEqual(d["page_count"], 9) + else: + self.assertEqual(d["doc_count"], 0) + self.assertEqual(d["page_count"], 0) + self.assertEqual(count_sax_dicts, 1) + + # Assign that doc to Lima + self.assertEqual(Document.objects.count(), 1) + Document.objects.all().update(ad=Person.objects.get(name="Lima")) + ( + area_summary, + area_totals, + ad_summary, + noad_summary, + ad_totals, + noad_totals, + totals, + wg_summary, + ) = get_wg_dashboard_info() + count_violet_dicts = 0 + for d in area_summary: + if d["area"] == "violet": + count_violet_dicts += 1 + self.assertEqual(d["groups_with_docs"], 1) + self.assertEqual(d["doc_count"], 1) + self.assertEqual(d["page_count"], 9) + self.assertEqual(d["group_percent"], 100.0) + self.assertEqual(d["doc_percent"], 100.0) + self.assertEqual(d["page_percent"], 100.0) + else: + self.assertEqual(d["groups_with_docs"], 0) + self.assertEqual(d["doc_count"], 0) + self.assertEqual(d["page_count"], 0) + self.assertEqual(d["group_percent"], 0) + self.assertEqual(d["doc_percent"], 0) + self.assertEqual(d["page_percent"], 0) + self.assertEqual(count_violet_dicts, 1) + + self.assertEqual( + area_totals, {"group_count": 1, "doc_count": 1, "page_count": 9} + ) + + # This time it will show up as a doc assigned to Lima + count_lima_dicts = 0 + for d in ad_summary: + if d["ad"] == "Lima": + count_lima_dicts += 1 + self.assertEqual(d["doc_group_count"], 1) + self.assertEqual(d["doc_count"], 1) + self.assertEqual(d["page_count"], 9) + self.assertEqual(d["group_percent"], 100.0) + self.assertEqual(d["doc_percent"], 100.0) + self.assertEqual(d["page_percent"], 100.0) + else: + self.assertEqual(d["doc_group_count"], 0) + self.assertEqual(d["doc_count"], 0) + self.assertEqual(d["page_count"], 0) + self.assertEqual(d["group_percent"], 0) + self.assertEqual(d["doc_percent"], 0) + self.assertEqual(d["page_percent"], 0) + self.assertEqual(count_lima_dicts, 1) + + # and there will be no noad documents + count_lima_dicts = 0 + for d in noad_summary: + if d["ad"] == "Lima": + count_lima_dicts += 1 + self.assertEqual(d["doc_group_count"], 0) + self.assertEqual(d["doc_count"], 0) + self.assertEqual(d["page_count"], 0) + self.assertEqual(d["group_percent"], 0) + self.assertEqual(d["doc_percent"], 0) + self.assertEqual(d["page_percent"], 0) + self.assertEqual(count_lima_dicts, 1) + + self.assertEqual( + ad_totals, + { + "ad_group_count": 17, + "doc_group_count": 1, + "doc_count": 1, + "page_count": 9, + }, + ) + self.assertEqual( + noad_totals, + { + "ad_group_count": 17, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + }, + ) + self.assertEqual( + totals, + { + "group_count": 17, + "doc_count": 1, + "page_count": 9, + "groups_with_docs_count": 1, + }, + ) + + count_sax_dicts = 0 + for d in wg_summary: + if d["wg"] == "saxophone": + count_sax_dicts += 1 + self.assertEqual(d["doc_count"], 1) + self.assertEqual(d["page_count"], 9) + else: + self.assertEqual(d["doc_count"], 0) + self.assertEqual(d["page_count"], 0) + self.assertEqual(count_sax_dicts, 1) + + # Now give Lima a document in a group that's not in their area: + WgDraftFactory( + group=Group.objects.get(acronym="gong"), + pages=len("gong"), + ad=Person.objects.get(name="Lima"), + ) + ( + area_summary, + area_totals, + ad_summary, + noad_summary, + ad_totals, + noad_totals, + totals, + wg_summary, + ) = get_wg_dashboard_info() + seen_dicts = Counter([d["area"] for d in area_summary]) + for d in areas: + self.assertEqual(seen_dicts[area], 1 if area in ["violet", "green"] else 0) + for d in area_summary: + if d["area"] in ["violet", "green"]: + self.assertEqual(d["doc_count"], 1) + self.assertEqual(d["page_count"], 9 if d["area"] == "violet" else 4) + self.assertEqual(d["group_percent"], 50) + self.assertEqual(d["doc_percent"], 50) + self.assertEqual( + d["page_percent"], + 100 * 9 / 13 if d["area"] == "violet" else 100 * 4 / 13, + ) + else: + self.assertEqual(d["doc_count"], 0) + self.assertEqual(d["page_count"], 0) + self.assertEqual(d["group_percent"], 0) + self.assertEqual(d["doc_percent"], 0) + self.assertEqual(d["page_percent"], 0) + + self.assertEqual( + area_totals, {"group_count": 2, "doc_count": 2, "page_count": 13} + ) + + for d in ad_summary: + if d["ad"] == "Lima": + self.assertEqual(d["doc_group_count"], 1) + self.assertEqual(d["doc_count"], 1) + self.assertEqual(d["page_count"], 9 if d["area"] == "violet" else 4) + self.assertEqual(d["group_percent"], 50) + self.assertEqual(d["doc_percent"], 50) + self.assertEqual( + d["page_percent"], + 100 * 9 / 13 if d["area"] == "violet" else 100 * 4 / 13, + ) + else: + self.assertEqual(d["doc_group_count"], 0) + self.assertEqual( + d["doc_count"], 0 + ) # Note in particular this is 0 for Foxtrot + self.assertEqual(d["page_count"], 0) + self.assertEqual(d["group_percent"], 0) + self.assertEqual(d["doc_percent"], 0) + self.assertEqual(d["page_percent"], 0) + + for d in wg_summary: + if d["wg"] == "gong": + # Lima's doc in gong above counts at the dict for gong even though the ad reported there is Foxtrot. + self.assertEqual( + d, + { + "wg": "gong", + "area": "green", + "ad": "Foxtrot", + "doc_count": 1, + "page_count": 4, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + ) + elif d["ad"] == "Lima": + self.assertEqual( + d["area"], "violet" + ) # The out of area assignment is not reflected in the wg_summary at all. + + # Now pile on a lot of documents + for wg_acronym in [ + "bassoon", + "celesta", + "clarinet", + "cymbals", + "flute", + "glockenspiel", + "gong", + "guitar", + "harp", + "harpsichord", + "oboe", + "organ", + "piano", + "piccolo", + "saxophone", + "tambourine", + "timpani", + ]: + if wg_acronym in ["bassoon", "celesta"]: + continue # Those WGs have no docs + # The rest have a doc that's not assigned to any ad + WgDraftFactory( + group=Group.objects.get(acronym=wg_acronym), pages=len(wg_acronym) + ) + if wg_acronym[0] > "g": + # Some have a doc assigned to the responsible ad + WgDraftFactory( + group=Group.objects.get(acronym=wg_acronym), + pages=len(wg_acronym), + ad=Role.objects.get(name_id="ad", group__acronym=wg_acronym).person, + ) + # The other AD for an area might be covering a doc + WgDraftFactory( + group=Group.objects.get(acronym="saxophone"), + pages=len("saxophone"), + ad=Person.objects.get(name="Juliet"), + ) + # An Ad not associated with the group or the area is responsible for a doc + WgDraftFactory( + group=Group.objects.get(acronym="bassoon"), + pages=len("bassoon"), + ad=Person.objects.get(name="Juliet"), + ) + + ( + area_summary, + area_totals, + ad_summary, + noad_summary, + ad_totals, + noad_totals, + totals, + wg_summary, + ) = get_wg_dashboard_info() + + self.assertEqual( + area_summary, + [ + { + "area": "blue", + "groups_in_area": 3, + "groups_with_docs": 3, + "doc_count": 6, + "page_count": 40, + "group_percent": 18.75, + "doc_percent": 21.428571428571427, + "page_percent": 20.51282051282051, + }, + { + "area": "green", + "groups_in_area": 3, + "groups_with_docs": 3, + "doc_count": 5, + "page_count": 22, + "group_percent": 18.75, + "doc_percent": 17.857142857142858, + "page_percent": 11.282051282051283, + }, + { + "area": "orange", + "groups_in_area": 4, + "groups_with_docs": 3, + "doc_count": 4, + "page_count": 29, + "group_percent": 18.75, + "doc_percent": 14.285714285714285, + "page_percent": 14.871794871794872, + }, + { + "area": "red", + "groups_in_area": 1, + "groups_with_docs": 1, + "doc_count": 1, + "page_count": 7, + "group_percent": 6.25, + "doc_percent": 3.571428571428571, + "page_percent": 3.5897435897435894, + }, + { + "area": "violet", + "groups_in_area": 4, + "groups_with_docs": 4, + "doc_count": 10, + "page_count": 80, + "group_percent": 25.0, + "doc_percent": 35.714285714285715, + "page_percent": 41.02564102564102, + }, + { + "area": "yellow", + "groups_in_area": 2, + "groups_with_docs": 2, + "doc_count": 2, + "page_count": 17, + "group_percent": 12.5, + "doc_percent": 7.142857142857142, + "page_percent": 8.717948717948717, + }, + ], + ) + self.assertEqual( + area_totals, {"group_count": 16, "doc_count": 28, "page_count": 195} + ) + self.assertEqual( + ad_summary, + [ + { + "ad": "Alpha", + "area": "red", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0.0, + "doc_percent": 0.0, + "page_percent": 0.0, + }, + { + "ad": "Bravo", + "area": "orange", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0.0, + "doc_percent": 0.0, + "page_percent": 0.0, + }, + { + "ad": "Charlie", + "area": "orange", + "ad_group_count": 2, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0.0, + "doc_percent": 0.0, + "page_percent": 0.0, + }, + { + "ad": "Delta", + "area": "yellow", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0.0, + "doc_percent": 0.0, + "page_percent": 0.0, + }, + { + "ad": "Echo", + "area": "yellow", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0.0, + "doc_percent": 0.0, + "page_percent": 0.0, + }, + { + "ad": "Foxtrot", + "area": "green", + "ad_group_count": 2, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0.0, + "doc_percent": 0.0, + "page_percent": 0.0, + }, + { + "ad": "Golf", + "area": "green", + "ad_group_count": 1, + "doc_group_count": 1, + "doc_count": 1, + "page_count": 4, + "group_percent": 8.333333333333332, + "doc_percent": 7.6923076923076925, + "page_percent": 4.395604395604396, + }, + { + "ad": "Hotel", + "area": "blue", + "ad_group_count": 1, + "doc_group_count": 1, + "doc_count": 1, + "page_count": 11, + "group_percent": 8.333333333333332, + "doc_percent": 7.6923076923076925, + "page_percent": 12.087912087912088, + }, + { + "ad": "Indigo", + "area": "blue", + "ad_group_count": 2, + "doc_group_count": 2, + "doc_count": 2, + "page_count": 9, + "group_percent": 16.666666666666664, + "doc_percent": 15.384615384615385, + "page_percent": 9.89010989010989, + }, + { + "ad": "Juliet", + "area": "orange", + "ad_group_count": 1, + "doc_group_count": 1, + "doc_count": 1, + "page_count": 7, + "group_percent": 8.333333333333332, + "doc_percent": 7.6923076923076925, + "page_percent": 7.6923076923076925, + }, + { + "ad": "Juliet", + "area": "red", + "ad_group_count": 0, + "doc_group_count": 1, + "doc_count": 1, + "page_count": 7, + "group_percent": 8.333333333333332, + "doc_percent": 7.6923076923076925, + "page_percent": 7.6923076923076925, + }, + { + "ad": "Juliet", + "area": "violet", + "ad_group_count": 1, + "doc_group_count": 2, + "doc_count": 2, + "page_count": 14, + "group_percent": 16.666666666666664, + "doc_percent": 15.384615384615385, + "page_percent": 15.384615384615385, + }, + { + "ad": "Kilo", + "area": "violet", + "ad_group_count": 1, + "doc_group_count": 1, + "doc_count": 1, + "page_count": 7, + "group_percent": 8.333333333333332, + "doc_percent": 7.6923076923076925, + "page_percent": 7.6923076923076925, + }, + { + "ad": "Lima", + "area": "green", + "ad_group_count": 0, + "doc_group_count": 1, + "doc_count": 1, + "page_count": 4, + "group_percent": 8.333333333333332, + "doc_percent": 7.6923076923076925, + "page_percent": 4.395604395604396, + }, + { + "ad": "Lima", + "area": "violet", + "ad_group_count": 2, + "doc_group_count": 2, + "doc_count": 3, + "page_count": 28, + "group_percent": 16.666666666666664, + "doc_percent": 23.076923076923077, + "page_percent": 30.76923076923077, + }, + ], + ) + self.assertEqual( + noad_summary, + [ + { + "ad": "Alpha", + "area": "red", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0.0, + "doc_percent": 0.0, + "page_percent": 0.0, + }, + { + "ad": "Bravo", + "area": "orange", + "ad_group_count": 1, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + "group_percent": 0.0, + "doc_percent": 0.0, + "page_percent": 0.0, + }, + { + "ad": "Charlie", + "area": "orange", + "ad_group_count": 2, + "doc_group_count": 2, + "doc_count": 2, + "page_count": 15, + "group_percent": 13.333333333333334, + "doc_percent": 13.333333333333334, + "page_percent": 14.423076923076922, + }, + { + "ad": "Delta", + "area": "yellow", + "ad_group_count": 1, + "doc_group_count": 1, + "doc_count": 1, + "page_count": 5, + "group_percent": 6.666666666666667, + "doc_percent": 6.666666666666667, + "page_percent": 4.807692307692308, + }, + { + "ad": "Echo", + "area": "yellow", + "ad_group_count": 1, + "doc_group_count": 1, + "doc_count": 1, + "page_count": 12, + "group_percent": 6.666666666666667, + "doc_percent": 6.666666666666667, + "page_percent": 11.538461538461538, + }, + { + "ad": "Foxtrot", + "area": "green", + "ad_group_count": 2, + "doc_group_count": 2, + "doc_count": 2, + "page_count": 10, + "group_percent": 13.333333333333334, + "doc_percent": 13.333333333333334, + "page_percent": 9.615384615384617, + }, + { + "ad": "Golf", + "area": "green", + "ad_group_count": 1, + "doc_group_count": 1, + "doc_count": 1, + "page_count": 4, + "group_percent": 6.666666666666667, + "doc_percent": 6.666666666666667, + "page_percent": 3.8461538461538463, + }, + { + "ad": "Hotel", + "area": "blue", + "ad_group_count": 1, + "doc_group_count": 1, + "doc_count": 1, + "page_count": 11, + "group_percent": 6.666666666666667, + "doc_percent": 6.666666666666667, + "page_percent": 10.576923076923077, + }, + { + "ad": "Indigo", + "area": "blue", + "ad_group_count": 2, + "doc_group_count": 2, + "doc_count": 2, + "page_count": 9, + "group_percent": 13.333333333333334, + "doc_percent": 13.333333333333334, + "page_percent": 8.653846153846153, + }, + { + "ad": "Juliet", + "area": "orange", + "ad_group_count": 1, + "doc_group_count": 1, + "doc_count": 1, + "page_count": 7, + "group_percent": 6.666666666666667, + "doc_percent": 6.666666666666667, + "page_percent": 6.730769230769231, + }, + { + "ad": "Juliet", + "area": "violet", + "ad_group_count": 1, + "doc_group_count": 1, + "doc_count": 1, + "page_count": 5, + "group_percent": 6.666666666666667, + "doc_percent": 6.666666666666667, + "page_percent": 4.807692307692308, + }, + { + "ad": "Kilo", + "area": "violet", + "ad_group_count": 1, + "doc_group_count": 1, + "doc_count": 1, + "page_count": 7, + "group_percent": 6.666666666666667, + "doc_percent": 6.666666666666667, + "page_percent": 6.730769230769231, + }, + { + "ad": "Lima", + "area": "violet", + "ad_group_count": 2, + "doc_group_count": 2, + "doc_count": 2, + "page_count": 19, + "group_percent": 13.333333333333334, + "doc_percent": 13.333333333333334, + "page_percent": 18.269230769230766, + }, + ], + ) + self.assertEqual( + ad_totals, + { + "ad_group_count": 17, + "doc_group_count": 12, + "doc_count": 13, + "page_count": 91, + }, + ) + self.assertEqual( + noad_totals, + { + "ad_group_count": 17, + "doc_group_count": 15, + "doc_count": 15, + "page_count": 104, + }, + ) + self.assertEqual( + totals, + { + "group_count": 17, + "doc_count": 28, + "page_count": 195, + "groups_with_docs_count": 16, + }, + ) + self.assertEqual( + wg_summary, + [ + { + "wg": "bassoon", + "area": "red", + "ad": "Alpha", + "doc_count": 1, + "page_count": 7, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "celesta", + "area": "orange", + "ad": "Bravo", + "doc_count": 0, + "page_count": 0, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "clarinet", + "area": "orange", + "ad": "Charlie", + "doc_count": 1, + "page_count": 8, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "cymbals", + "area": "orange", + "ad": "Charlie", + "doc_count": 1, + "page_count": 7, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "flute", + "area": "yellow", + "ad": "Delta", + "doc_count": 1, + "page_count": 5, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "glockenspiel", + "area": "yellow", + "ad": "Echo", + "doc_count": 1, + "page_count": 12, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "gong", + "area": "green", + "ad": "Foxtrot", + "doc_count": 2, + "page_count": 8, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "guitar", + "area": "green", + "ad": "Foxtrot", + "doc_count": 1, + "page_count": 6, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "harp", + "area": "green", + "ad": "Golf", + "doc_count": 2, + "page_count": 8, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "harpsichord", + "area": "blue", + "ad": "Hotel", + "doc_count": 2, + "page_count": 22, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "oboe", + "area": "blue", + "ad": "Indigo", + "doc_count": 2, + "page_count": 8, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "organ", + "area": "blue", + "ad": "Indigo", + "doc_count": 2, + "page_count": 10, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "piano", + "area": "violet", + "ad": "Juliet", + "doc_count": 2, + "page_count": 10, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "piccolo", + "area": "violet", + "ad": "Kilo", + "doc_count": 2, + "page_count": 14, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "saxophone", + "area": "violet", + "ad": "Lima", + "doc_count": 4, + "page_count": 36, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "tambourine", + "area": "violet", + "ad": "Lima", + "doc_count": 2, + "page_count": 20, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + { + "wg": "timpani", + "area": "orange", + "ad": "Juliet", + "doc_count": 2, + "page_count": 14, + "rfc_count": 0, + "recent_rfc_count": 0, + }, + ], + ) + + # Make sure the view doesn't _crash_ - the template is a dead-simple rendering of the dicts, but this test doesn't prove that + url = urlreverse("ietf.iesg.views.working_groups") + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + class IESGAgendaTests(TestCase): def setUp(self): super().setUp() mars = GroupFactory(acronym='mars',parent=Group.objects.get(acronym='farfut')) wgdraft = WgDraftFactory(name='draft-ietf-mars-test', group=mars, intended_std_level_id='ps') - rfc = IndividualRfcFactory.create(stream_id='irtf', other_aliases=['rfc6666',], states=[('draft','rfc'),('draft-iesg','pub')], std_level_id='inf', ) - wgdraft.relateddocument_set.create(target=rfc.docalias.get(name='rfc6666'), relationship_id='refnorm') + rfc = IndividualRfcFactory.create(stream_id='irtf', rfc_number=6666, std_level_id='inf', ) + wgdraft.relateddocument_set.create(target=rfc, relationship_id='refnorm') ise_draft = IndividualDraftFactory(name='draft-imaginary-independent-submission') ise_draft.stream = StreamName.objects.get(slug="ise") ise_draft.save_with_history([DocEvent(doc=ise_draft, rev=ise_draft.rev, type="changed_stream", by=Person.objects.get(user__username="secretary"), desc="Test")]) @@ -238,7 +1903,7 @@ def test_fill_in_agenda_docs(self): relation = RelatedDocument.objects.create( source=statchg, - target=DocAlias.objects.filter(name__startswith='rfc', docs__std_level="ps")[0], + target=Document.objects.filter(type_id="rfc", std_level="ps").first(), relationship_id="tohist") statchg.group = Group.objects.get(acronym="mars") @@ -256,7 +1921,7 @@ def test_fill_in_agenda_docs(self): self.assertTrue(statchg in agenda_data(date_str)["sections"]["2.3.3"]["docs"]) # 3.3 document status changes - relation.target = DocAlias.objects.filter(name__startswith='rfc', docs__std_level="inf")[0] + relation.target = Document.objects.filter(type_id="rfc", std_level="inf").first() relation.save() statchg.group = Group.objects.get(acronym="mars") @@ -353,6 +2018,8 @@ def test_agenda(self): self.assertContains(r, action_items.text) + q = PyQuery(r.content) + for k, d in self.telechat_docs.items(): if d.type_id == "charter": self.assertContains(r, d.group.name, msg_prefix="%s '%s' not in response" % (k, d.group.name)) @@ -361,6 +2028,18 @@ def test_agenda(self): self.assertContains(r, d.name, msg_prefix="%s '%s' not in response" % (k, d.name)) self.assertContains(r, d.title, msg_prefix="%s '%s' title not in response" % (k, d.title)) + if d.type_id in ["charter", "draft"]: + if d.group.parent is None: + continue + wg_url = urlreverse("ietf.group.views.active_groups", kwargs=dict(group_type="wg")) + href = f"{wg_url}#{d.group.parent.acronym.upper()}" + texts = [elem.text.strip() for elem in q(f'a[href="{href}"]')] + self.assertGreater(len(texts), 0) + if d.type_id == "charter": + self.assertTrue(any(t == d.group.parent.acronym.upper() for t in texts)) + elif d.type_id == "draft": + self.assertTrue(any(t == f"({d.group.parent.acronym.upper()})" for t in texts)) + for i, mi in enumerate(self.mgmt_items, start=1): s = "6." + str(i) self.assertContains(r, s, msg_prefix="Section '%s' not in response" % s) @@ -457,12 +2136,13 @@ def test_agenda_documents_txt(self): def test_agenda_documents(self): url = urlreverse("ietf.iesg.views.agenda_documents") r = self.client.get(url) + self.assertEqual(r.status_code, 200) for k, d in self.telechat_docs.items(): self.assertContains(r, d.name, msg_prefix="%s '%s' not in response" % (k, d.name, )) - self.assertContains(r, d.title, msg_prefix="%s '%s' title not in response" % (k, d.title, )) - + self.assertContains(r, d.title, msg_prefix="%s '%s' not in response" % (k, d.title, )) + def test_past_documents(self): url = urlreverse("ietf.iesg.views.past_documents") # We haven't put any documents on past telechats, so this should be empty @@ -537,6 +2217,66 @@ def test_admin_change(self): draft = Document.objects.get(name="draft-ietf-mars-test") self.assertEqual(draft.telechat_date(),today) +class IESGAgendaTelechatPagesTests(TestCase): + def setUp(self): + super().setUp() + # make_immutable_test_data made a set of future telechats - only need one + # We'll take the "next" one + self.telechat_date = get_agenda_date() + # make_immutable_test_data made and area with only one ad - give it another + ad = Person.objects.get(user__username="ad") + adrole = Role.objects.get(person=ad, name="ad") + ad2 = RoleFactory(group=adrole.group, name_id="ad").person + self.ads=[ad,ad2] + + # Make some drafts + docs = [ + WgDraftFactory(pages=2, states=[('draft-iesg','iesg-eva'),]), + IndividualDraftFactory(pages=20, states=[('draft-iesg','iesg-eva'),]), + WgDraftFactory(pages=200, states=[('draft-iesg','iesg-eva'),]), + ] + # Put them on the telechat + for doc in docs: + TelechatDocEventFactory(doc=doc, telechat_date=self.telechat_date) + # Give them ballots + ballots = [BallotDocEventFactory(doc=doc) for doc in docs] + + # Give the "ad" Area-Director a discuss on one + BallotPositionDocEventFactory(balloter=ad, doc=docs[0], pos_id="discuss", ballot=ballots[0]) + # and a "norecord" position on another + BallotPositionDocEventFactory(balloter=ad, doc=docs[1], pos_id="norecord", ballot=ballots[1]) + # Now "ad" should have 220 pages left to ballot on. + # Every other ad should have 222 pages left to ballot on. + + def test_ad_pages_left_to_ballot_on(self): + url = urlreverse("ietf.iesg.views.agenda_documents") + + # A non-AD user won't get "pages left" + response = self.client.get(url) + telechat = response.context["telechats"][0] + self.assertEqual(telechat["date"], self.telechat_date) + self.assertEqual(telechat["ad_pages_left_to_ballot_on"],0) + self.assertNotContains(response,"pages left to ballot on") + + username=self.ads[0].user.username + self.assertTrue(self.client.login(username=username, password=f"{username}+password")) + + response = self.client.get(url) + telechat = response.context["telechats"][0] + self.assertEqual(telechat["ad_pages_left_to_ballot_on"],220) + self.assertContains(response,"220 pages left to ballot on") + + self.client.logout() + username=self.ads[1].user.username + self.assertTrue(self.client.login(username=username, password=f"{username}+password")) + + response = self.client.get(url) + telechat = response.context["telechats"][0] + self.assertEqual(telechat["ad_pages_left_to_ballot_on"],222) + + + + class RescheduleOnAgendaTests(TestCase): def test_reschedule(self): draft = WgDraftFactory() @@ -603,7 +2343,7 @@ def test_telechat_agenda_content_view(self): urlreverse("ietf.iesg.views.telechat_agenda_content_view", kwargs={"section": section}) ) self.assertContains(r, content, status_code=200) - self.assertEqual(r.get("Content-Type", None), "text/plain") + self.assertEqual(r.get("Content-Type", None), "text/plain; charset=utf-8") def test_telechat_agenda_content_view_permissions(self): for section in TelechatAgendaSectionName.objects.filter(used=True).values_list("slug", flat=True): diff --git a/ietf/iesg/urls.py b/ietf/iesg/urls.py index d8cfec9f90..5fd9dea0cc 100644 --- a/ietf/iesg/urls.py +++ b/ietf/iesg/urls.py @@ -59,6 +59,7 @@ url(r'^agenda/telechat-(?:%(date)s-)?docs.tgz' % settings.URL_REGEXPS, views.telechat_docs_tarfile), url(r'^discusses/$', views.discusses), url(r'^ietf-activity/$', views.ietf_activity), + url(r'^working-groups/$', views.working_groups), url(r'^milestones/$', views.milestones_needing_review), url(r'^photos/$', views.photos), ] diff --git a/ietf/iesg/utils.py b/ietf/iesg/utils.py index 4ddc9cb404..1d24ecac8e 100644 --- a/ietf/iesg/utils.py +++ b/ietf/iesg/utils.py @@ -1,56 +1,321 @@ -from collections import namedtuple +from collections import Counter, defaultdict, namedtuple -import debug # pyflakes:ignore +import datetime + +import debug # pyflakes:ignore + +from django.db import models +from django.utils import timezone from ietf.doc.models import Document, STATUSCHANGE_RELATIONS from ietf.doc.utils_search import fill_in_telechat_date +from ietf.group.models import Group from ietf.iesg.agenda import get_doc_section +from ietf.person.utils import get_active_ads +from ietf.utils.unicodenormalize import normalize_for_sorting +TelechatPageCount = namedtuple( + "TelechatPageCount", + ["for_approval", "for_action", "related", "ad_pages_left_to_ballot_on"], +) -TelechatPageCount = namedtuple('TelechatPageCount',['for_approval','for_action','related']) -def telechat_page_count(date=None, docs=None): +def telechat_page_count(date=None, docs=None, ad=None): if not date and not docs: - return TelechatPageCount(0, 0, 0) + return TelechatPageCount(0, 0, 0, 0) if not docs: - candidates = Document.objects.filter(docevent__telechatdocevent__telechat_date=date).distinct() + candidates = Document.objects.filter( + docevent__telechatdocevent__telechat_date=date + ).distinct() fill_in_telechat_date(candidates) - docs = [ doc for doc in candidates if doc.telechat_date()==date ] + docs = [doc for doc in candidates if doc.telechat_date() == date] + + for_action = [d for d in docs if get_doc_section(d).endswith(".3")] - for_action =[d for d in docs if get_doc_section(d).endswith('.3')] + for_approval = set(docs) - set(for_action) - for_approval = set(docs)-set(for_action) + drafts = [d for d in for_approval if d.type_id == "draft"] - drafts = [d for d in for_approval if d.type_id == 'draft'] + ad_pages_left_to_ballot_on = 0 + pages_for_approval = 0 - pages_for_approval = sum([d.pages or 0 for d in drafts]) + for draft in drafts: + pages_for_approval += draft.pages or 0 + if ad: + ballot = draft.active_ballot() + if ballot: + positions = ballot.active_balloter_positions() + ad_position = positions.get(ad, None) + if ad_position is None or ad_position.pos_id == "norecord": + ad_pages_left_to_ballot_on += draft.pages or 0 pages_for_action = 0 for d in for_action: - if d.type_id == 'draft': + if d.type_id == "draft": pages_for_action += d.pages or 0 - elif d.type_id == 'statchg': + elif d.type_id == "statchg": for rel in d.related_that_doc(STATUSCHANGE_RELATIONS): - pages_for_action += rel.document.pages or 0 - elif d.type_id == 'conflrev': - for rel in d.related_that_doc('conflrev'): - pages_for_action += rel.document.pages or 0 + pages_for_action += rel.pages or 0 + elif d.type_id == "conflrev": + for rel in d.related_that_doc("conflrev"): + pages_for_action += rel.pages or 0 else: pass related_pages = 0 - for d in for_approval-set(drafts): - if d.type_id == 'statchg': + for d in for_approval - set(drafts): + if d.type_id == "statchg": for rel in d.related_that_doc(STATUSCHANGE_RELATIONS): - related_pages += rel.document.pages or 0 - elif d.type_id == 'conflrev': - for rel in d.related_that_doc('conflrev'): - related_pages += rel.document.pages or 0 + related_pages += rel.pages or 0 + elif d.type_id == "conflrev": + for rel in d.related_that_doc("conflrev"): + related_pages += rel.pages or 0 else: # There's really nothing to rely on to give a reading load estimate for charters pass - - return TelechatPageCount(for_approval=pages_for_approval, - for_action=pages_for_action, - related=related_pages) + + return TelechatPageCount( + for_approval=pages_for_approval, + for_action=pages_for_action, + related=related_pages, + ad_pages_left_to_ballot_on=ad_pages_left_to_ballot_on, + ) + + +def get_wg_dashboard_info(): + docs = ( + Document.objects.filter( + group__type="wg", + group__state="active", + states__type="draft", + states__slug="active", + ) + .filter(models.Q(ad__isnull=True) | models.Q(ad__in=get_active_ads())) + .distinct() + .prefetch_related("group", "group__parent") + .exclude( + states__type="draft-stream-ietf", + states__slug__in=["c-adopt", "wg-cand", "dead", "parked", "info"], + ) + ) + groups = Group.objects.filter(state="active", type="wg") + areas = Group.objects.filter(state="active", type="area") + + total_group_count = groups.count() + total_doc_count = docs.count() + total_page_count = docs.aggregate(models.Sum("pages"))["pages__sum"] or 0 + totals = { + "group_count": total_group_count, + "doc_count": total_doc_count, + "page_count": total_page_count, + } + + # Since this view is primarily about counting subsets of the above docs query and the + # expected number of returned documents is just under 1000 typically - do the totaling + # work in python rather than asking the db to do it. + + groups_for_area = defaultdict(set) + pages_for_area = defaultdict(lambda: 0) + docs_for_area = defaultdict(lambda: 0) + groups_for_ad = defaultdict(lambda: defaultdict(set)) + pages_for_ad = defaultdict(lambda: defaultdict(lambda: 0)) + docs_for_ad = defaultdict(lambda: defaultdict(lambda: 0)) + groups_for_noad = defaultdict(lambda: defaultdict(set)) + pages_for_noad = defaultdict(lambda: defaultdict(lambda: 0)) + docs_for_noad = defaultdict(lambda: defaultdict(lambda: 0)) + docs_for_wg = defaultdict(lambda: 0) + pages_for_wg = defaultdict(lambda: 0) + groups_total = set() + pages_total = 0 + docs_total = 0 + + responsible_for_group = defaultdict(lambda: defaultdict(lambda: "None")) + responsible_count = defaultdict(lambda: defaultdict(lambda: 0)) + for group in groups: + responsible = f"{', '.join([r.person.plain_name() for r in group.role_set.filter(name_id='ad')])}" + docs_for_noad[responsible][group.parent.acronym] = ( + 0 # Ensure these keys are present later + ) + docs_for_ad[responsible][group.parent.acronym] = 0 + responsible_for_group[group.acronym][group.parent.acronym] = responsible + responsible_count[responsible][group.parent.acronym] += 1 + + for doc in docs: + docs_for_wg[doc.group] += 1 + pages_for_wg[doc.group] += doc.pages + groups_for_area[doc.group.area.acronym].add(doc.group.acronym) + pages_for_area[doc.group.area.acronym] += doc.pages + docs_for_area[doc.group.area.acronym] += 1 + + if doc.ad is None: + responsible = responsible_for_group[doc.group.acronym][ + doc.group.parent.acronym + ] + groups_for_noad[responsible][doc.group.parent.acronym].add( + doc.group.acronym + ) + pages_for_noad[responsible][doc.group.parent.acronym] += doc.pages + docs_for_noad[responsible][doc.group.parent.acronym] += 1 + else: + responsible = f"{doc.ad.plain_name()}" + groups_for_ad[responsible][doc.group.parent.acronym].add(doc.group.acronym) + pages_for_ad[responsible][doc.group.parent.acronym] += doc.pages + docs_for_ad[responsible][doc.group.parent.acronym] += 1 + + docs_total += 1 + groups_total.add(doc.group.acronym) + pages_total += doc.pages + + groups_total = len(groups_total) + totals["groups_with_docs_count"] = groups_total + + area_summary = [] + + for area in areas: + group_count = len(groups_for_area[area.acronym]) + doc_count = docs_for_area[area.acronym] + page_count = pages_for_area[area.acronym] + area_summary.append( + { + "area": area.acronym, + "groups_in_area": groups.filter(parent=area).count(), + "groups_with_docs": group_count, + "doc_count": doc_count, + "page_count": page_count, + "group_percent": group_count / groups_total * 100 + if groups_total != 0 + else 0, + "doc_percent": doc_count / docs_total * 100 if docs_total != 0 else 0, + "page_percent": page_count / pages_total * 100 + if pages_total != 0 + else 0, + } + ) + area_summary.sort(key=lambda r: r["area"]) + area_totals = { + "group_count": groups_total, + "doc_count": docs_total, + "page_count": pages_total, + } + + noad_summary = [] + noad_totals = { + "ad_group_count": 0, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + } + for ad in docs_for_noad: + for area in docs_for_noad[ad]: + noad_totals["ad_group_count"] += responsible_count[ad][area] + noad_totals["doc_group_count"] += len(groups_for_noad[ad][area]) + noad_totals["doc_count"] += docs_for_noad[ad][area] + noad_totals["page_count"] += pages_for_noad[ad][area] + for ad in docs_for_noad: + for area in docs_for_noad[ad]: + noad_summary.append( + { + "ad": ad, + "area": area, + "ad_group_count": responsible_count[ad][area], + "doc_group_count": len(groups_for_noad[ad][area]), + "doc_count": docs_for_noad[ad][area], + "page_count": pages_for_noad[ad][area], + "group_percent": len(groups_for_noad[ad][area]) + / noad_totals["doc_group_count"] + * 100 + if noad_totals["doc_group_count"] != 0 + else 0, + "doc_percent": docs_for_noad[ad][area] + / noad_totals["doc_count"] + * 100 + if noad_totals["doc_count"] != 0 + else 0, + "page_percent": pages_for_noad[ad][area] + / noad_totals["page_count"] + * 100 + if noad_totals["page_count"] != 0 + else 0, + } + ) + noad_summary.sort(key=lambda r: (normalize_for_sorting(r["ad"]), r["area"])) + + ad_summary = [] + ad_totals = { + "ad_group_count": 0, + "doc_group_count": 0, + "doc_count": 0, + "page_count": 0, + } + for ad in docs_for_ad: + for area in docs_for_ad[ad]: + ad_totals["ad_group_count"] += responsible_count[ad][area] + ad_totals["doc_group_count"] += len(groups_for_ad[ad][area]) + ad_totals["doc_count"] += docs_for_ad[ad][area] + ad_totals["page_count"] += pages_for_ad[ad][area] + for ad in docs_for_ad: + for area in docs_for_ad[ad]: + ad_summary.append( + { + "ad": ad, + "area": area, + "ad_group_count": responsible_count[ad][area], + "doc_group_count": len(groups_for_ad[ad][area]), + "doc_count": docs_for_ad[ad][area], + "page_count": pages_for_ad[ad][area], + "group_percent": len(groups_for_ad[ad][area]) + / ad_totals["doc_group_count"] + * 100 + if ad_totals["doc_group_count"] != 0 + else 0, + "doc_percent": docs_for_ad[ad][area] / ad_totals["doc_count"] * 100 + if ad_totals["doc_count"] != 0 + else 0, + "page_percent": pages_for_ad[ad][area] + / ad_totals["page_count"] + * 100 + if ad_totals["page_count"] != 0 + else 0, + } + ) + ad_summary.sort(key=lambda r: (normalize_for_sorting(r["ad"]), r["area"])) + + rfc_counter = Counter( + Document.objects.filter(type="rfc").values_list("group__acronym", flat=True) + ) + recent_rfc_counter = Counter( + Document.objects.filter( + type="rfc", + docevent__type="published_rfc", + docevent__time__gte=timezone.now() - datetime.timedelta(weeks=104), + ).values_list("group__acronym", flat=True) + ) + for wg in set(groups) - set(docs_for_wg.keys()): + docs_for_wg[wg] += 0 + pages_for_wg[wg] += 0 + wg_summary = [] + for wg in docs_for_wg: + wg_summary.append( + { + "wg": wg.acronym, + "area": wg.parent.acronym, + "ad": responsible_for_group[wg.acronym][wg.parent.acronym], + "doc_count": docs_for_wg[wg], + "page_count": pages_for_wg[wg], + "rfc_count": rfc_counter[wg.acronym], + "recent_rfc_count": recent_rfc_counter[wg.acronym], + } + ) + wg_summary.sort(key=lambda r: (r["wg"], r["area"])) + + return ( + area_summary, + area_totals, + ad_summary, + noad_summary, + ad_totals, + noad_totals, + totals, + wg_summary, + ) diff --git a/ietf/iesg/views.py b/ietf/iesg/views.py index 8c2bd7d0ec..f03afb9fc1 100644 --- a/ietf/iesg/views.py +++ b/ietf/iesg/views.py @@ -61,13 +61,14 @@ from ietf.group.models import GroupMilestone, Role from ietf.iesg.agenda import agenda_data, agenda_sections, fill_in_agenda_docs, get_agenda_date from ietf.iesg.models import TelechatDate, TelechatAgendaContent -from ietf.iesg.utils import telechat_page_count +from ietf.iesg.utils import get_wg_dashboard_info, telechat_page_count from ietf.ietfauth.utils import has_role, role_required, user_is_person from ietf.name.models import TelechatAgendaSectionName from ietf.person.models import Person from ietf.meeting.utils import get_activity_stats from ietf.doc.utils_search import fill_in_document_table_attributes, fill_in_telechat_date from ietf.utils.timezone import date_today, datetime_from_date +from ietf.utils.unicodenormalize import normalize_for_sorting def review_decisions(request, year=None): events = DocEvent.objects.filter(type__in=("iesg_disapproved", "iesg_approved")) @@ -101,7 +102,7 @@ def agenda_json(request, date=None): res = { "telechat-date": str(data["date"]), - "as-of": str(datetime.datetime.utcnow()), + "as-of": str(datetime.datetime.now(datetime.UTC)), "page-counts": telechat_page_count(date=get_agenda_date(date))._asdict(), "sections": {}, } @@ -122,7 +123,7 @@ def agenda_json(request, date=None): for doc in docs: wginfo = { - 'docname': doc.canonical_name(), + 'docname': doc.name, 'rev': doc.rev, 'wgname': doc.group.name, 'acronym': doc.group.acronym, @@ -137,7 +138,7 @@ def agenda_json(request, date=None): for doc in docs: docinfo = { - 'docname':doc.canonical_name(), + 'docname':doc.name, 'title':doc.title, 'ad':doc.ad.name if doc.ad else None, } @@ -149,8 +150,8 @@ def agenda_json(request, date=None): if doc.type_id == "draft": docinfo['rev'] = doc.rev docinfo['intended-std-level'] = str(doc.intended_std_level) - if doc.rfc_number(): - docinfo['rfc-number'] = doc.rfc_number() + if doc.type_id == "rfc": + docinfo['rfc-number'] = doc.rfc_number iana_state = doc.get_state("draft-iana-review") if iana_state and iana_state.slug in ("not-ok", "changed", "need-rev"): @@ -170,8 +171,8 @@ def agenda_json(request, date=None): elif doc.type_id == 'conflrev': docinfo['rev'] = doc.rev - td = doc.relateddocument_set.get(relationship__slug='conflrev').target.document - docinfo['target-docname'] = td.canonical_name() + td = doc.relateddocument_set.get(relationship__slug='conflrev').target + docinfo['target-docname'] = td.name docinfo['target-title'] = td.title docinfo['target-rev'] = td.rev docinfo['intended-std-level'] = str(td.intended_std_level) @@ -209,7 +210,6 @@ def agenda(request, date=None): urlreverse("ietf.iesg.views.telechat_agenda_content_view", kwargs={"section": "minutes"}) )) - request.session['ballot_edit_return_point'] = request.path_info return render(request, "iesg/agenda.html", { "date": data["date"], "sections": sorted(data["sections"].items(), key=lambda x:[int(p) for p in x[0].split('.')]), @@ -222,7 +222,7 @@ def agenda_txt(request, date=None): "date": data["date"], "sections": sorted(data["sections"].items(), key=lambda x:[int(p) for p in x[0].split('.')]), "domain": Site.objects.get_current().domain, - }, content_type="text/plain; charset=%s"%settings.DEFAULT_CHARSET) + }, content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}") @role_required('Area Director', 'Secretariat') def agenda_moderator_package(request, date=None): @@ -278,14 +278,23 @@ def leaf_section(num, section): @role_required('Area Director', 'Secretariat') def agenda_package(request, date=None): data = agenda_data(date) - return render(request, "iesg/agenda_package.txt", { + return render( + request, + "iesg/agenda_package.txt", + { "date": data["date"], "sections": sorted(data["sections"].items()), "roll_call": data["sections"]["1.1"]["text"], "minutes": data["sections"]["1.3"]["text"], - "management_items": [(num, section) for num, section in data["sections"].items() if "6" < num < "7"], + "management_items": [ + (num, section) + for num, section in data["sections"].items() + if "6" < num < "7" + ], "domain": Site.objects.get_current().domain, - }, content_type='text/plain') + }, + content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}", + ) def agenda_documents_txt(request): @@ -316,7 +325,10 @@ def agenda_documents_txt(request): d.rev, ) rows.append("\t".join(row)) - return HttpResponse("\n".join(rows), content_type='text/plain') + return HttpResponse( + "\n".join(rows), + content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}", + ) class RescheduleForm(forms.Form): telechat_date = forms.TypedChoiceField(coerce=lambda x: datetime.datetime.strptime(x, '%Y-%m-%d').date(), empty_value=None, required=False) @@ -361,6 +373,8 @@ def handle_reschedule_form(request, doc, dates, status): return form def agenda_documents(request): + ad = request.user.person if has_role(request.user, "Area Director") else None + dates = list(TelechatDate.objects.active().order_by('date').values_list("date", flat=True)[:4]) docs_by_date = dict((d, []) for d in dates) @@ -390,15 +404,17 @@ def agenda_documents(request): # the search_result_row view to display them (which expects them) fill_in_document_table_attributes(docs_by_date[date], have_telechat_date=True) fill_in_agenda_docs(date, sections, docs_by_date[date]) - pages = telechat_page_count(docs=docs_by_date[date]).for_approval - + page_count = telechat_page_count(docs=docs_by_date[date], ad=ad) + pages = page_count.for_approval + telechats.append({ "date": date, "pages": pages, + "ad_pages_left_to_ballot_on": page_count.ad_pages_left_to_ballot_on, "sections": sorted((num, section) for num, section in sections.items() if "2" <= num < "5") }) - request.session['ballot_edit_return_point'] = request.path_info + return render(request, 'iesg/agenda_documents.html', { 'telechats': telechats }) def past_documents(request): @@ -483,6 +499,7 @@ def discusses(request): models.Q(states__type__in=("statchg", "conflrev"), states__slug__in=("iesgeval", "defer")), docevent__ballotpositiondocevent__pos__blocking=True) + possible_docs = possible_docs.exclude(states__in=State.objects.filter(type="draft", slug="repl")) possible_docs = possible_docs.select_related("stream", "group", "ad").distinct() docs = [] @@ -526,10 +543,12 @@ def milestones_needing_review(request): ad_list.append(ad) ad.groups_needing_review = sorted(groups, key=lambda g: g.acronym) for g, milestones in groups.items(): - g.milestones_needing_review = sorted(milestones, key=lambda m: m.due) + g.milestones_needing_review = sorted( + milestones, key=lambda m: m.due if m.group.uses_milestone_dates else m.order + ) return render(request, 'iesg/milestones_needing_review.html', - dict(ads=sorted(ad_list, key=lambda ad: ad.plain_name()),)) + dict(ads=sorted(ad_list, key=lambda ad: normalize_for_sorting(ad.plain_name())),)) def photos(request): roles = sorted(Role.objects.filter(group__type='area', group__state='active', name_id='ad'),key=lambda x: "" if x.group.acronym=="gen" else x.group.acronym) @@ -604,4 +623,17 @@ def telechat_agenda_content_manage(request): @role_required("Secretariat", "IAB Chair", "Area Director") def telechat_agenda_content_view(request, section): content = get_object_or_404(TelechatAgendaContent, section__slug=section, section__used=True) - return HttpResponse(content=content.text, content_type="text/plain") + return HttpResponse( + content=content.text, + content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}", + ) + +def working_groups(request): + + area_summary, area_totals, ad_summary, noad_summary, ad_totals, noad_totals, totals, wg_summary = get_wg_dashboard_info() + + return render( + request, + "iesg/working_groups.html", + dict(area_summary=area_summary, area_totals=area_totals, ad_summary=ad_summary, noad_summary=noad_summary, ad_totals=ad_totals, noad_totals=noad_totals, totals=totals, wg_summary=wg_summary), + ) diff --git a/ietf/ietfauth/admin.py b/ietf/ietfauth/admin.py new file mode 100644 index 0000000000..c2914f9efa --- /dev/null +++ b/ietf/ietfauth/admin.py @@ -0,0 +1,136 @@ +# Copyright The IETF Trust 2025, All Rights Reserved +import datetime + +from django.conf import settings +from django.contrib import admin, messages +from django.contrib.admin import action +from django.contrib.admin.actions import delete_selected as default_delete_selected +from django.contrib.auth.admin import UserAdmin +from django.contrib.auth.models import User +from django.utils import timezone + + +# Replace default UserAdmin with our custom one +admin.site.unregister(User) + + +class AgeListFilter(admin.SimpleListFilter): + title = "account age" + parameter_name = "age" + + def lookups(self, request, model_admin): + return [ + ("1day", "> 1 day"), + ("3days", "> 3 days"), + ("1week", "> 1 week"), + ("1month", "> 1 month"), + ("1year", "> 1 year"), + ] + + def queryset(self, request, queryset): + deltas = { + "1day": datetime.timedelta(days=1), + "3days": datetime.timedelta(days=3), + "1week": datetime.timedelta(weeks=1), + "1month": datetime.timedelta(days=30), + "1year": datetime.timedelta(days=365), + } + if self.value(): + return queryset.filter(date_joined__lt=timezone.now()-deltas[self.value()]) + return queryset + + +@admin.register(User) +class CustomUserAdmin(UserAdmin): + list_display = ( + "username", + "person", + "date_joined", + "last_login", + "is_staff", + ) + list_filter = list(UserAdmin.list_filter) + [ + AgeListFilter, + ("person", admin.EmptyFieldListFilter), + ] + actions = ["delete_selected"] + + @action( + permissions=["delete"], description="Delete personless %(verbose_name_plural)s" + ) + def delete_selected(self, request, queryset): + """Delete selected action restricted to Users with a null Person field + + This displaces the default delete_selected action with a safer one that will + only delete personless Users. It is done this way instead of by introducing + a new action so that we can simply hand off to the default action (imported + as default_delete_selected()) without having to adjust its template (and maybe + other things) to make it work with a different action name. + """ + already_confirmed = bool(request.POST.get("post")) + personless_queryset = queryset.filter(person__isnull=True) + original_count = queryset.count() + personless_count = personless_queryset.count() + if personless_count > original_count: + # Refuse to act if the count increased! + self.message_user( + request, + ( + "Limiting the selection to Users without a Person INCREASED the " + "count from {} to {}. This should not happen and probably means a " + "concurrent change to the database affected this request. Please " + "try again.".format(original_count, personless_count) + ), + level=messages.ERROR, + ) + return None # return to changelist + + # Display warning/info if this is showing the confirmation page + if not already_confirmed: + if personless_count < original_count: + self.message_user( + request, + ( + "Limiting the selection to Users without a Person reduced the " + "count from {} to {}. Only {} will be deleted.".format( + original_count, personless_count, personless_count + ) + ), + level=messages.WARNING, + ) + else: + self.message_user( + request, + "Confirmed that all selected Users had no Persons.", + ) + + # Django limits the number of fields in a request. The delete form itself + # includes a few metadata fields, so give it a little padding. The default + # limit is 1000 and everything will break if it's a small number, so not + # bothering to check that it's > 10. + max_count = settings.DATA_UPLOAD_MAX_NUMBER_FIELDS - 10 + if personless_count > max_count: + self.message_user( + request, + ( + f"Only {max_count} Users can be deleted at once. Will only delete " + f"the first {max_count} selected Personless Users." + ), + level=messages.WARNING, + ) + # delete() doesn't like a queryset limited via [:max_count], so do an + # equivalent filter. + last_to_delete = personless_queryset.order_by("pk")[max_count] + personless_queryset = personless_queryset.filter(pk__lt=last_to_delete.pk) + + if already_confirmed and personless_count != original_count: + # After confirmation, none of the above filtering should change anything. + # Refuse to delete if the DB moved underneath us. + self.message_user( + request, + "Queryset count changed, nothing deleted. Please try again.", + level=messages.ERROR, + ) + return None + + return default_delete_selected(self, request, personless_queryset) diff --git a/ietf/ietfauth/forms.py b/ietf/ietfauth/forms.py index 9b8ee22e0b..41828f2bf6 100644 --- a/ietf/ietfauth/forms.py +++ b/ietf/ietfauth/forms.py @@ -3,20 +3,21 @@ import re + from unidecode import unidecode from django import forms -from django.conf import settings +from django.contrib.auth.models import User +from django.contrib.auth import password_validation from django.core.exceptions import ValidationError from django.db import models -from django.contrib.auth.models import User - -import debug # pyflakes:ignore from ietf.person.models import Person, Email from ietf.mailinglists.models import Allowlisted from ietf.utils.text import isascii +from .password_validation import StrongPasswordValidator +from .validators import prevent_at_symbol, prevent_system_name, prevent_anonymous_name, is_allowed_address from .widgets import PasswordStrengthInput, PasswordConfirmationInput @@ -32,40 +33,62 @@ def clean_email(self): return email +class PasswordStrengthField(forms.CharField): + widget = PasswordStrengthInput( + attrs={ + "class": "password_strength", + "data-disable-strength-enforcement": "", # usually removed in init + } + ) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + for pwval in password_validation.get_default_password_validators(): + if isinstance(pwval, password_validation.MinimumLengthValidator): + self.widget.attrs["minlength"] = pwval.min_length + elif isinstance(pwval, StrongPasswordValidator): + self.widget.attrs.pop( + "data-disable-strength-enforcement", None + ) + + + class PasswordForm(forms.Form): - password = forms.CharField(widget=PasswordStrengthInput(attrs={'class':'password_strength'})) + password = PasswordStrengthField() password_confirmation = forms.CharField(widget=PasswordConfirmationInput( confirm_with='password', attrs={'class':'password_confirmation'}), help_text="Enter the same password as above, for verification.",) - + + def __init__(self, *args, user=None, **kwargs): + # user is a kw-only argument to avoid interfering with the signature + # when this class is mixed with ModelForm in PersonPasswordForm + self.user = user + super().__init__(*args, **kwargs) def clean_password_confirmation(self): - password = self.cleaned_data.get("password", "") - password_confirmation = self.cleaned_data["password_confirmation"] + # clean fields here rather than a clean() method so validation is + # still enforced in PersonPasswordForm without having to override its + # clean() method + password = self.cleaned_data.get("password") + password_confirmation = self.cleaned_data.get("password_confirmation") if password != password_confirmation: - raise forms.ValidationError("The two password fields didn't match.") + raise ValidationError( + "The password confirmation is different than the new password" + ) + try: + password_validation.validate_password(password_confirmation, self.user) + except ValidationError as err: + self.add_error("password", err) return password_confirmation + def ascii_cleaner(supposedly_ascii): outside_printable_ascii_pattern = r'[^\x20-\x7F]' if re.search(outside_printable_ascii_pattern, supposedly_ascii): raise forms.ValidationError("Only unaccented Latin characters are allowed.") return supposedly_ascii -def prevent_at_symbol(name): - if "@" in name: - raise forms.ValidationError("Please fill in name - this looks like an email address (@ is not allowed in names).") - -def prevent_system_name(name): - name_without_spaces = name.replace(" ", "").replace("\t", "") - if "(system)" in name_without_spaces.lower(): - raise forms.ValidationError("Please pick another name - this name is reserved.") - -def prevent_anonymous_name(name): - name_without_spaces = name.replace(" ", "").replace("\t", "") - if "anonymous" in name_without_spaces.lower(): - raise forms.ValidationError("Please pick another name - this name is reserved.") class PersonPasswordForm(forms.ModelForm, PasswordForm): @@ -156,15 +179,7 @@ def clean(self): class NewEmailForm(forms.Form): - new_email = forms.EmailField(label="New email address", required=False) - - def clean_new_email(self): - email = self.cleaned_data.get("new_email", "") - for pat in settings.EXCLUDED_PERSONAL_EMAIL_REGEX_PATTERNS: - if re.search(pat, email): - raise ValidationError("This email address is not valid in a datatracker account") - - return email + new_email = forms.EmailField(label="New email address", required=False, validators=[is_allowed_address]) class RoleEmailForm(forms.Form): @@ -193,33 +208,21 @@ class Meta: model = Allowlisted exclude = ['by', 'time' ] - -from django import forms - -class ChangePasswordForm(forms.Form): +class ChangePasswordForm(PasswordForm): current_password = forms.CharField(widget=forms.PasswordInput) + field_order = ["current_password", "password", "password_confirmation"] - new_password = forms.CharField(widget=PasswordStrengthInput(attrs={'class':'password_strength'})) - new_password_confirmation = forms.CharField(widget=PasswordConfirmationInput( - confirm_with='new_password', - attrs={'class':'password_confirmation'})) - - def __init__(self, user, data=None): - self.user = user - super(ChangePasswordForm, self).__init__(data) + def __init__(self, user, *args, **kwargs): + # user arg is optional in superclass, but required for this form + super().__init__(*args, user=user, **kwargs) def clean_current_password(self): - password = self.cleaned_data.get('current_password', None) + # n.b., password = None is handled by check_password and results in a failed check + password = self.cleaned_data.get("current_password", None) if not self.user.check_password(password): - raise ValidationError('Invalid password') + raise ValidationError("Invalid password") return password - - def clean(self): - new_password = self.cleaned_data.get('new_password', None) - conf_password = self.cleaned_data.get('new_password_confirmation', None) - if not new_password == conf_password: - raise ValidationError("The password confirmation is different than the new password") class ChangeUsernameForm(forms.Form): diff --git a/ietf/ietfauth/htpasswd.py b/ietf/ietfauth/htpasswd.py deleted file mode 100644 index 3716d98600..0000000000 --- a/ietf/ietfauth/htpasswd.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright The IETF Trust 2016-2020, All Rights Reserved -# -*- coding: utf-8 -*- - - -import io -import subprocess, hashlib -from django.utils.encoding import force_bytes - -from django.conf import settings - -def update_htpasswd_file(username, password): - if getattr(settings, 'USE_PYTHON_HTDIGEST', None): - pass_file = settings.HTPASSWD_FILE - realm = settings.HTDIGEST_REALM - prefix = force_bytes('%s:%s:' % (username, realm)) - key = force_bytes(hashlib.md5(prefix + force_bytes(password)).hexdigest()) - f = io.open(pass_file, 'r+b') - pos = f.tell() - line = f.readline() - while line: - if line.startswith(prefix): - break - pos=f.tell() - line = f.readline() - f.seek(pos) - f.write(b'%s%s\n' % (prefix, key)) - f.close() - else: - p = subprocess.Popen([settings.HTPASSWD_COMMAND, "-b", settings.HTPASSWD_FILE, username, password], stdout=subprocess.PIPE, stderr=subprocess.PIPE) - stdout, stderr = p.communicate() diff --git a/ietf/ietfauth/management/commands/send_apikey_usage_emails.py b/ietf/ietfauth/management/commands/send_apikey_usage_emails.py deleted file mode 100644 index d3fce1bcc2..0000000000 --- a/ietf/ietfauth/management/commands/send_apikey_usage_emails.py +++ /dev/null @@ -1,52 +0,0 @@ -# Copyright The IETF Trust 2017-2020, All Rights Reserved -# -*- coding: utf-8 -*- - - -import datetime - -from textwrap import dedent - -from django.conf import settings -from django.core.management.base import BaseCommand -from django.utils import timezone - -import debug # pyflakes:ignore - -from ietf.person.models import PersonalApiKey, PersonApiKeyEvent -from ietf.utils.mail import send_mail - - -class Command(BaseCommand): - """ - Send out emails to all persons who have personal API keys about usage. - - Usage is show over the given period, where the default period is 7 days. - """ - - help = dedent(__doc__).strip() - - def add_arguments(self, parser): - parser.add_argument('-d', '--days', dest='days', type=int, default=7, - help='The period over which to show usage.') - - def handle(self, *filenames, **options): - """ - """ - - self.verbosity = int(options.get('verbosity')) - days = options.get('days') - - keys = PersonalApiKey.objects.filter(valid=True) - for key in keys: - earliest = timezone.now() - datetime.timedelta(days=days) - events = PersonApiKeyEvent.objects.filter(key=key, time__gt=earliest) - count = events.count() - events = events[:32] - if count: - key_name = key.hash()[:8] - subject = "API key usage for key '%s' for the last %s days" %(key_name, days) - to = key.person.email_address() - frm = settings.DEFAULT_FROM_EMAIL - send_mail(None, to, frm, subject, 'utils/apikey_usage_report.txt', {'person':key.person, - 'days':days, 'key':key, 'key_name':key_name, 'count':count, 'events':events, } ) - diff --git a/ietf/ietfauth/password_validation.py b/ietf/ietfauth/password_validation.py new file mode 100644 index 0000000000..bfed4a784e --- /dev/null +++ b/ietf/ietfauth/password_validation.py @@ -0,0 +1,23 @@ +# Copyright The IETF Trust 2025, All Rights Reserved +from django.core.exceptions import ValidationError +from zxcvbn import zxcvbn + + +class StrongPasswordValidator: + message = "This password does not meet complexity requirements and is easily guessable." + code = "weak" + min_zxcvbn_score = 3 + + def __init__(self, message=None, code=None, min_zxcvbn_score=None): + if message is not None: + self.message = message + if code is not None: + self.code = code + if min_zxcvbn_score is not None: + self.min_zxcvbn_score = min_zxcvbn_score + + def validate(self, password, user=None): + """Validate that a password is strong enough""" + strength_report = zxcvbn(password[:72], max_length=72) + if strength_report["score"] < self.min_zxcvbn_score: + raise ValidationError(message=self.message, code=self.code) diff --git a/ietf/ietfauth/tests.py b/ietf/ietfauth/tests.py index ec085ed813..a77e5bd5d5 100644 --- a/ietf/ietfauth/tests.py +++ b/ietf/ietfauth/tests.py @@ -3,13 +3,10 @@ import datetime -import io import logging # pyflakes:ignore -import os import re import requests import requests_mock -import shutil import time import urllib @@ -21,7 +18,6 @@ from oic.utils.authn.client import CLIENT_AUTHN_METHOD from oidc_provider.models import RSAKey from pyquery import PyQuery -from unittest import skipIf from urllib.parse import urlsplit import django.core.signing @@ -35,79 +31,46 @@ from ietf.group.factories import GroupFactory, RoleFactory from ietf.group.models import Group, Role, RoleName -from ietf.ietfauth.htpasswd import update_htpasswd_file from ietf.ietfauth.utils import has_role -from ietf.mailinglists.models import Subscribed -from ietf.meeting.factories import MeetingFactory +from ietf.meeting.factories import MeetingFactory, RegistrationFactory, RegistrationTicketFactory from ietf.nomcom.factories import NomComFactory from ietf.person.factories import PersonFactory, EmailFactory, UserFactory, PersonalApiKeyFactory -from ietf.person.models import Person, Email, PersonalApiKey +from ietf.person.models import Person, Email +from ietf.person.tasks import send_apikey_usage_emails_task from ietf.review.factories import ReviewRequestFactory, ReviewAssignmentFactory from ietf.review.models import ReviewWish, UnavailablePeriod -from ietf.stats.models import MeetingRegistration -from ietf.utils.decorators import skip_coverage from ietf.utils.mail import outbox, empty_outbox, get_payload_text from ietf.utils.test_utils import TestCase, login_testing_unauthorized from ietf.utils.timezone import date_today -import ietf.ietfauth.views - -if os.path.exists(settings.HTPASSWD_COMMAND): - skip_htpasswd_command = False - skip_message = "" -else: - skip_htpasswd_command = True - skip_message = ("Skipping htpasswd test: The binary for htpasswd wasn't found in the\n " - "location indicated in settings.py.") - print(" "+skip_message) - class IetfAuthTests(TestCase): - def setUp(self): - super().setUp() - self.saved_use_python_htdigest = getattr(settings, "USE_PYTHON_HTDIGEST", None) - settings.USE_PYTHON_HTDIGEST = True - - self.saved_htpasswd_file = settings.HTPASSWD_FILE - self.htpasswd_dir = self.tempdir('htpasswd') - settings.HTPASSWD_FILE = os.path.join(self.htpasswd_dir, "htpasswd") - io.open(settings.HTPASSWD_FILE, 'a').close() # create empty file - - self.saved_htdigest_realm = getattr(settings, "HTDIGEST_REALM", None) - settings.HTDIGEST_REALM = "test-realm" - - def tearDown(self): - shutil.rmtree(self.htpasswd_dir) - settings.USE_PYTHON_HTDIGEST = self.saved_use_python_htdigest - settings.HTPASSWD_FILE = self.saved_htpasswd_file - settings.HTDIGEST_REALM = self.saved_htdigest_realm - super().tearDown() def test_index(self): - self.assertEqual(self.client.get(urlreverse(ietf.ietfauth.views.index)).status_code, 200) + self.assertEqual(self.client.get(urlreverse("ietf.ietfauth.views.index")).status_code, 200) def test_login_and_logout(self): PersonFactory(user__username='plain') # try logging in without a next - r = self.client.get(urlreverse(ietf.ietfauth.views.login)) + r = self.client.get(urlreverse("ietf.ietfauth.views.login")) self.assertEqual(r.status_code, 200) - r = self.client.post(urlreverse(ietf.ietfauth.views.login), {"username":"plain", "password":"plain+password"}) + r = self.client.post(urlreverse("ietf.ietfauth.views.login"), {"username":"plain", "password":"plain+password"}) self.assertEqual(r.status_code, 302) - self.assertEqual(urlsplit(r["Location"])[2], urlreverse(ietf.ietfauth.views.profile)) + self.assertEqual(urlsplit(r["Location"])[2], urlreverse("ietf.ietfauth.views.profile")) # try logging out r = self.client.post(urlreverse('django.contrib.auth.views.logout'), {}) self.assertEqual(r.status_code, 200) self.assertNotContains(r, "accounts/logout") - r = self.client.get(urlreverse(ietf.ietfauth.views.profile)) + r = self.client.get(urlreverse("ietf.ietfauth.views.profile")) self.assertEqual(r.status_code, 302) - self.assertEqual(urlsplit(r["Location"])[2], urlreverse(ietf.ietfauth.views.login)) + self.assertEqual(urlsplit(r["Location"])[2], urlreverse("ietf.ietfauth.views.login")) # try logging in with a next - r = self.client.post(urlreverse(ietf.ietfauth.views.login) + "?next=/foobar", {"username":"plain", "password":"plain+password"}) + r = self.client.post(urlreverse("ietf.ietfauth.views.login") + "?next=/foobar", {"username":"plain", "password":"plain+password"}) self.assertEqual(r.status_code, 302) self.assertEqual(urlsplit(r["Location"])[2], "/foobar") @@ -138,19 +101,19 @@ def _test_login(url): # try with a trivial next _test_login("/") # try with a next that requires login - _test_login(urlreverse(ietf.ietfauth.views.profile)) + _test_login(urlreverse("ietf.ietfauth.views.profile")) def test_login_with_different_email(self): person = PersonFactory(user__username='plain') email = EmailFactory(person=person) # try logging in without a next - r = self.client.get(urlreverse(ietf.ietfauth.views.login)) + r = self.client.get(urlreverse("ietf.ietfauth.views.login")) self.assertEqual(r.status_code, 200) - r = self.client.post(urlreverse(ietf.ietfauth.views.login), {"username":email, "password":"plain+password"}) + r = self.client.post(urlreverse("ietf.ietfauth.views.login"), {"username":email, "password":"plain+password"}) self.assertEqual(r.status_code, 302) - self.assertEqual(urlsplit(r["Location"])[2], urlreverse(ietf.ietfauth.views.profile)) + self.assertEqual(urlsplit(r["Location"])[2], urlreverse("ietf.ietfauth.views.profile")) def extract_confirm_url(self, confirm_email): # dig out confirm_email link @@ -164,20 +127,11 @@ def extract_confirm_url(self, confirm_email): return confirm_url - def username_in_htpasswd_file(self, username): - with io.open(settings.HTPASSWD_FILE) as f: - for l in f: - if l.startswith(username + ":"): - return True - with io.open(settings.HTPASSWD_FILE) as f: - print(f.read()) - - return False # For the lowered barrier to account creation period, we are disabling this kind of failure # def test_create_account_failure(self): - # url = urlreverse(ietf.ietfauth.views.create_account) + # url = urlreverse("ietf.ietfauth.views.create_account") # # get # r = self.client.get(url) @@ -196,7 +150,7 @@ def test_create_account_failure_template(self): self.assertTrue("Additional Assistance Required" in r) def register(self, email): - url = urlreverse(ietf.ietfauth.views.create_account) + url = urlreverse("ietf.ietfauth.views.create_account") # register email empty_outbox() @@ -214,54 +168,41 @@ def register_and_verify(self, email): self.assertEqual(r.status_code, 200) # password mismatch - r = self.client.post(confirm_url, { 'password': 'secret', 'password_confirmation': 'nosecret' }) + r = self.client.post( + confirm_url, { + "password": "secret-and-secure", + "password_confirmation": "not-secret-or-secure", + } + ) + self.assertEqual(r.status_code, 200) + self.assertEqual(User.objects.filter(username=email).count(), 0) + + # weak password + r = self.client.post( + confirm_url, { + "password": "password1234", + "password_confirmation": "password1234", + } + ) self.assertEqual(r.status_code, 200) self.assertEqual(User.objects.filter(username=email).count(), 0) # confirm - r = self.client.post(confirm_url, { 'name': 'User Name', 'ascii': 'User Name', 'password': 'secret', 'password_confirmation': 'secret' }) + r = self.client.post( + confirm_url, + { + "name": "User Name", + "ascii": "User Name", + "password": "secret-and-secure", + "password_confirmation": "secret-and-secure", + }, + ) self.assertEqual(r.status_code, 200) self.assertEqual(User.objects.filter(username=email).count(), 1) self.assertEqual(Person.objects.filter(user__username=email).count(), 1) self.assertEqual(Email.objects.filter(person__user__username=email).count(), 1) - self.assertTrue(self.username_in_htpasswd_file(email)) - - def test_create_allowlisted_account(self): - email = "new-account@example.com" - - # add allowlist entry - r = self.client.post(urlreverse(ietf.ietfauth.views.login), {"username":"secretary", "password":"secretary+password"}) - self.assertEqual(r.status_code, 302) - self.assertEqual(urlsplit(r["Location"])[2], urlreverse(ietf.ietfauth.views.profile)) - - r = self.client.get(urlreverse(ietf.ietfauth.views.add_account_allowlist)) - self.assertEqual(r.status_code, 200) - self.assertContains(r, "Add an allowlist entry") - - r = self.client.post(urlreverse(ietf.ietfauth.views.add_account_allowlist), {"email": email}) - self.assertEqual(r.status_code, 200) - self.assertContains(r, "Allowlist entry creation successful") - - # log out - r = self.client.post(urlreverse('django.contrib.auth.views.logout'), {}) - self.assertEqual(r.status_code, 200) - - # register and verify allowlisted email - self.register_and_verify(email) - - - def test_create_subscribed_account(self): - # verify creation with email in subscribed list - saved_delay = settings.LIST_ACCOUNT_DELAY - settings.LIST_ACCOUNT_DELAY = 1 - email = "subscribed@example.com" - s = Subscribed(email=email) - s.save() - time.sleep(1.1) - self.register_and_verify(email) - settings.LIST_ACCOUNT_DELAY = saved_delay - + # This also tests new account creation. def test_create_existing_account(self): # create account once email = "new-account@example.com" @@ -274,7 +215,7 @@ def test_create_existing_account(self): note = get_payload_text(outbox[-1]) self.assertIn(email, note) self.assertIn("A datatracker account for that email already exists", note) - self.assertIn(urlreverse(ietf.ietfauth.views.password_reset), note) + self.assertIn(urlreverse("ietf.ietfauth.views.password_reset"), note) def test_ietfauth_profile(self): EmailFactory(person__user__username='plain') @@ -283,7 +224,7 @@ def test_ietfauth_profile(self): username = "plain" email_address = Email.objects.filter(person__user__username=username).first().address - url = urlreverse(ietf.ietfauth.views.profile) + url = urlreverse("ietf.ietfauth.views.profile") login_testing_unauthorized(self, username, url) @@ -434,7 +375,7 @@ def test_ietfauth_profile(self): def test_email_case_insensitive_protection(self): EmailFactory(address="TestAddress@example.net") person = PersonFactory() - url = urlreverse(ietf.ietfauth.views.profile) + url = urlreverse("ietf.ietfauth.views.profile") login_testing_unauthorized(self, person.user.username, url) data = { @@ -473,14 +414,15 @@ def test_nomcom_dressing_on_profile(self): self.assertFalse(q('#volunteer-button')) self.assertTrue(q('#volunteered')) - def test_reset_password(self): - url = urlreverse(ietf.ietfauth.views.password_reset) - email = 'someone@example.com' - password = 'foobar' + WEAK_PASSWORD="password1234" + VALID_PASSWORD = "complex-and-long-valid-password" + ANOTHER_VALID_PASSWORD = "very-complicated-and-lengthy-password" + url = urlreverse("ietf.ietfauth.views.password_reset") + email = "someone@example.com" user = PersonFactory(user__email=email).user - user.set_password(password) + user.set_password(VALID_PASSWORD) user.save() # get @@ -488,21 +430,23 @@ def test_reset_password(self): self.assertEqual(r.status_code, 200) # ask for reset, wrong username (form should not fail) - r = self.client.post(url, { 'username': "nobody@example.com" }) + r = self.client.post(url, {"username": "nobody@example.com"}) self.assertEqual(r.status_code, 200) q = PyQuery(r.content) self.assertTrue(len(q("form .is-invalid")) == 0) # ask for reset empty_outbox() - r = self.client.post(url, { 'username': user.username }) + r = self.client.post(url, {"username": user.username}) self.assertEqual(r.status_code, 200) self.assertEqual(len(outbox), 1) # goto change password page, logged in as someone else confirm_url = self.extract_confirm_url(outbox[-1]) other_user = UserFactory() - self.client.login(username=other_user.username, password=other_user.username + '+password') + self.client.login( + username=other_user.username, password=other_user.username + "+password" + ) r = self.client.get(confirm_url) self.assertEqual(r.status_code, 403) @@ -511,21 +455,47 @@ def test_reset_password(self): r = self.client.get(confirm_url) self.assertEqual(r.status_code, 200) q = PyQuery(r.content) - self.assertNotIn(user.username, q('.nav').text(), - 'user should not appear signed in while resetting password') + self.assertNotIn( + user.username, + q(".nav").text(), + "user should not appear signed in while resetting password", + ) # password mismatch - r = self.client.post(confirm_url, { 'password': 'secret', 'password_confirmation': 'nosecret' }) + r = self.client.post( + confirm_url, + { + "password": ANOTHER_VALID_PASSWORD, + "password_confirmation": ANOTHER_VALID_PASSWORD[::-1], + }, + ) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertTrue(len(q("form .is-invalid")) > 0) + + # weak password + r = self.client.post( + confirm_url, + { + "password": WEAK_PASSWORD, + "password_confirmation": WEAK_PASSWORD, + }, + ) self.assertEqual(r.status_code, 200) q = PyQuery(r.content) self.assertTrue(len(q("form .is-invalid")) > 0) # confirm - r = self.client.post(confirm_url, { 'password': 'secret', 'password_confirmation': 'secret' }) + r = self.client.post( + confirm_url, + { + "password": ANOTHER_VALID_PASSWORD, + "password_confirmation": ANOTHER_VALID_PASSWORD, + }, + ) self.assertEqual(r.status_code, 200) q = PyQuery(r.content) self.assertEqual(len(q("form .is-invalid")), 0) - self.assertTrue(self.username_in_htpasswd_file(user.username)) # reuse reset url r = self.client.get(confirm_url) @@ -533,15 +503,18 @@ def test_reset_password(self): # login after reset request empty_outbox() - user.set_password(password) + user.set_password(VALID_PASSWORD) user.save() - r = self.client.post(url, { 'username': user.username }) + r = self.client.post(url, {"username": user.username}) self.assertEqual(r.status_code, 200) self.assertEqual(len(outbox), 1) confirm_url = self.extract_confirm_url(outbox[-1]) - r = self.client.post(urlreverse(ietf.ietfauth.views.login), {'username': email, 'password': password}) + r = self.client.post( + urlreverse("ietf.ietfauth.views.login"), + {"username": email, "password": VALID_PASSWORD}, + ) r = self.client.get(confirm_url) self.assertEqual(r.status_code, 404) @@ -549,12 +522,12 @@ def test_reset_password(self): # change password after reset request empty_outbox() - r = self.client.post(url, { 'username': user.username }) + r = self.client.post(url, {"username": user.username}) self.assertEqual(r.status_code, 200) self.assertEqual(len(outbox), 1) confirm_url = self.extract_confirm_url(outbox[-1]) - user.set_password('newpassword') + user.set_password(ANOTHER_VALID_PASSWORD) user.save() r = self.client.get(confirm_url) @@ -608,6 +581,24 @@ def test_reset_password_without_username(self): self.assertIn(secondary_address, to) self.assertNotIn(inactive_secondary_address, to) + def test_reset_password_without_user(self): + """Reset password using email address for person without a user account""" + url = urlreverse('ietf.ietfauth.views.password_reset') + email = EmailFactory() + person = email.person + # Remove the user object from the person to get a Email/Person without User: + person.user = None + person.save() + # Remove the remaining User record, since reset_password looks for that by username: + User.objects.filter(username__iexact=email.address).delete() + empty_outbox() + r = self.client.post(url, { 'username': email.address }) + self.assertEqual(len(outbox), 1) + lastReceivedEmail = outbox[-1] + self.assertIn(email.address, lastReceivedEmail.get('To')) + self.assertTrue(lastReceivedEmail.get('Subject').startswith("Confirm password reset")) + self.assertContains(r, "Your password reset request has been successfully received", status_code=200) + def test_review_overview(self): review_req = ReviewRequestFactory() assignment = ReviewAssignmentFactory(review_request=review_req,reviewer=EmailFactory(person__user__username='reviewer')) @@ -623,7 +614,7 @@ def test_review_overview(self): availability="unavailable", ) - url = urlreverse(ietf.ietfauth.views.review_overview) + url = urlreverse("ietf.ietfauth.views.review_overview") login_testing_unauthorized(self, reviewer.user.username, url) @@ -649,117 +640,176 @@ def test_review_overview(self): self.assertEqual(r.status_code, 302) self.assertEqual(ReviewWish.objects.filter(doc=doc, team=review_req.team).count(), 0) - def test_htpasswd_file_with_python(self): - # make sure we test both Python and call-out to binary - settings.USE_PYTHON_HTDIGEST = True - - update_htpasswd_file("foo", "passwd") - self.assertTrue(self.username_in_htpasswd_file("foo")) - - @skipIf(skip_htpasswd_command, skip_message) - @skip_coverage - def test_htpasswd_file_with_htpasswd_binary(self): - # make sure we test both Python and call-out to binary - settings.USE_PYTHON_HTDIGEST = False - - update_htpasswd_file("foo", "passwd") - self.assertTrue(self.username_in_htpasswd_file("foo")) - - def test_change_password(self): - - chpw_url = urlreverse(ietf.ietfauth.views.change_password) - prof_url = urlreverse(ietf.ietfauth.views.profile) - login_url = urlreverse(ietf.ietfauth.views.login) - redir_url = '%s?next=%s' % (login_url, chpw_url) + VALID_PASSWORD = "complex-and-long-valid-password" + ANOTHER_VALID_PASSWORD = "very-complicated-and-lengthy-password" + chpw_url = urlreverse("ietf.ietfauth.views.change_password") + prof_url = urlreverse("ietf.ietfauth.views.profile") + login_url = urlreverse("ietf.ietfauth.views.login") + redir_url = "%s?next=%s" % (login_url, chpw_url) # get without logging in r = self.client.get(chpw_url) self.assertRedirects(r, redir_url) - user = User.objects.create(username="someone@example.com", email="someone@example.com") - user.set_password("password") + user = User.objects.create( + username="someone@example.com", email="someone@example.com" + ) + user.set_password(VALID_PASSWORD) user.save() p = Person.objects.create(name="Some One", ascii="Some One", user=user) Email.objects.create(address=user.username, person=p, origin=user.username) # log in - r = self.client.post(redir_url, {"username":user.username, "password":"password"}) + r = self.client.post( + redir_url, {"username": user.username, "password": VALID_PASSWORD} + ) self.assertRedirects(r, chpw_url) # wrong current password - r = self.client.post(chpw_url, {"current_password": "fiddlesticks", - "new_password": "foobar", - "new_password_confirmation": "foobar", - }) + r = self.client.post( + chpw_url, + { + "current_password": "fiddlesticks", + "password": ANOTHER_VALID_PASSWORD, + "password_confirmation": ANOTHER_VALID_PASSWORD, + }, + ) self.assertEqual(r.status_code, 200) - self.assertFormError(r.context["form"], 'current_password', 'Invalid password') + self.assertFormError(r.context["form"], "current_password", "Invalid password") # mismatching new passwords - r = self.client.post(chpw_url, {"current_password": "password", - "new_password": "foobar", - "new_password_confirmation": "barfoo", - }) + r = self.client.post( + chpw_url, + { + "current_password": VALID_PASSWORD, + "password": ANOTHER_VALID_PASSWORD, + "password_confirmation": ANOTHER_VALID_PASSWORD[::-1], + }, + ) + self.assertEqual(r.status_code, 200) + self.assertFormError( + r.context["form"], + "password_confirmation", + "The password confirmation is different than the new password", + ) + + # password too short + r = self.client.post( + chpw_url, + { + "current_password": VALID_PASSWORD, + "password": "sh0rtpw0rd", + "password_confirmation": "sh0rtpw0rd", + } + ) self.assertEqual(r.status_code, 200) - self.assertFormError(r.context["form"], None, "The password confirmation is different than the new password") + self.assertFormError( + r.context["form"], + "password", + "This password is too short. It must contain at least " + f"{settings.PASSWORD_POLICY_MIN_LENGTH} characters." + ) + + # password too simple + r = self.client.post( + chpw_url, + { + "current_password": VALID_PASSWORD, + "password": "passwordpassword", + "password_confirmation": "passwordpassword", + } + ) + self.assertEqual(r.status_code, 200) + self.assertFormError( + r.context["form"], + "password", + "This password does not meet complexity requirements " + "and is easily guessable." + ) # correct password change - r = self.client.post(chpw_url, {"current_password": "password", - "new_password": "foobar", - "new_password_confirmation": "foobar", - }) + r = self.client.post( + chpw_url, + { + "current_password": VALID_PASSWORD, + "password": ANOTHER_VALID_PASSWORD, + "password_confirmation": ANOTHER_VALID_PASSWORD, + }, + ) self.assertRedirects(r, prof_url) # refresh user object user = User.objects.get(username="someone@example.com") - self.assertTrue(user.check_password('foobar')) + self.assertTrue(user.check_password(ANOTHER_VALID_PASSWORD)) def test_change_username(self): - - chun_url = urlreverse(ietf.ietfauth.views.change_username) - prof_url = urlreverse(ietf.ietfauth.views.profile) - login_url = urlreverse(ietf.ietfauth.views.login) - redir_url = '%s?next=%s' % (login_url, chun_url) + VALID_PASSWORD = "complex-and-long-valid-password" + chun_url = urlreverse("ietf.ietfauth.views.change_username") + prof_url = urlreverse("ietf.ietfauth.views.profile") + login_url = urlreverse("ietf.ietfauth.views.login") + redir_url = "%s?next=%s" % (login_url, chun_url) # get without logging in r = self.client.get(chun_url) self.assertRedirects(r, redir_url) - user = User.objects.create(username="someone@example.com", email="someone@example.com") - user.set_password("password") + user = User.objects.create( + username="someone@example.com", email="someone@example.com" + ) + user.set_password(VALID_PASSWORD) user.save() p = Person.objects.create(name="Some One", ascii="Some One", user=user) Email.objects.create(address=user.username, person=p, origin=user.username) - Email.objects.create(address="othername@example.org", person=p, origin=user.username) + Email.objects.create( + address="othername@example.org", person=p, origin=user.username + ) # log in - r = self.client.post(redir_url, {"username":user.username, "password":"password"}) + r = self.client.post( + redir_url, {"username": user.username, "password": VALID_PASSWORD} + ) self.assertRedirects(r, chun_url) # wrong username - r = self.client.post(chun_url, {"username": "fiddlesticks", - "password": "password", - }) + r = self.client.post( + chun_url, + { + "username": "fiddlesticks", + "password": VALID_PASSWORD, + }, + ) self.assertEqual(r.status_code, 200) - self.assertFormError(r.context["form"], 'username', - "Select a valid choice. fiddlesticks is not one of the available choices.") + self.assertFormError( + r.context["form"], + "username", + "Select a valid choice. fiddlesticks is not one of the available choices.", + ) # wrong password - r = self.client.post(chun_url, {"username": "othername@example.org", - "password": "foobar", - }) + r = self.client.post( + chun_url, + { + "username": "othername@example.org", + "password": "foobar", + }, + ) self.assertEqual(r.status_code, 200) - self.assertFormError(r.context["form"], 'password', 'Invalid password') + self.assertFormError(r.context["form"], "password", "Invalid password") # correct username change - r = self.client.post(chun_url, {"username": "othername@example.org", - "password": "password", - }) + r = self.client.post( + chun_url, + { + "username": "othername@example.org", + "password": VALID_PASSWORD, + }, + ) self.assertRedirects(r, prof_url) # refresh user object prev = user user = User.objects.get(username="othername@example.org") self.assertEqual(prev, user) - self.assertTrue(user.check_password('password')) + self.assertTrue(user.check_password(VALID_PASSWORD)) def test_apikey_management(self): # Create a person with a role that will give at least one valid apikey @@ -869,9 +919,8 @@ def test_apikey_errors(self): self.assertContains(r, 'Invalid apikey', status_code=403) # invalid apikey (invalidated api key) - unauthorized_url = urlreverse('ietf.api.views.app_auth') - invalidated_apikey = PersonalApiKey.objects.create( - endpoint=unauthorized_url, person=person, valid=False) + unauthorized_url = urlreverse('ietf.api.views.app_auth', kwargs={'app': 'authortools'}) + invalidated_apikey = PersonalApiKeyFactory(endpoint=unauthorized_url, person=person, valid=False) r = self.client.post(unauthorized_url, {'apikey': invalidated_apikey.hash()}) self.assertContains(r, 'Invalid apikey', status_code=403) @@ -884,15 +933,16 @@ def test_apikey_errors(self): person.user.save() # endpoint mismatch - key2 = PersonalApiKey.objects.create(person=person, endpoint='/') + key2 = PersonalApiKeyFactory( + person=person, + endpoint='/', + validate_model=False, # allow invalid endpoint + ) r = self.client.post(key.endpoint, {'apikey':key2.hash(), 'dummy':'dummy',}) self.assertContains(r, 'Apikey endpoint mismatch', status_code=400) key2.delete() def test_send_apikey_report(self): - from ietf.ietfauth.management.commands.send_apikey_usage_emails import Command - from ietf.utils.mail import outbox, empty_outbox - person = RoleFactory(name_id='secr', group__acronym='secretariat').person url = urlreverse('ietf.ietfauth.views.apikey_create') @@ -917,9 +967,8 @@ def test_send_apikey_report(self): date = str(date_today()) empty_outbox() - cmd = Command() - cmd.handle(verbosity=0, days=7) - + send_apikey_usage_emails_task(days=7) + self.assertEqual(len(outbox), len(endpoints)) for mail in outbox: body = get_payload_text(mail) @@ -1098,11 +1147,15 @@ def test_oidc_code_auth(self): EmailFactory(person=person) email_list = person.email_set.all().values_list('address', flat=True) meeting = MeetingFactory(type_id='ietf', date=date_today()) - MeetingRegistration.objects.create( - meeting=meeting, person=None, first_name=person.first_name(), last_name=person.last_name(), - email=email_list[0], ticket_type='full_week', reg_type='remote', affiliation='Some Company', - ) - + reg_person = RegistrationFactory( + meeting=meeting, + person=person, + first_name=person.first_name(), + last_name=person.last_name(), + email=email_list[0], + affiliation='Some Company', + with_ticket={'attendance_type_id': 'remote', 'ticket_type_id': 'week_pass'}, + ) # Get access authorisation session = {} session["state"] = rndstr() @@ -1155,35 +1208,48 @@ def test_oidc_code_auth(self): for key in ['iss', 'sub', 'aud', 'exp', 'iat', 'auth_time', 'nonce', 'at_hash']: self.assertIn(key, access_token_info['id_token']) - # Get userinfo, check keys present + # Get userinfo, check keys present, most common scenario userinfo = client.do_user_info_request(state=params["state"], scope=args['scope']) for key in [ 'email', 'family_name', 'given_name', 'meeting', 'name', 'pronouns', 'roles', 'ticket_type', 'reg_type', 'affiliation', 'picture', 'dots', ]: self.assertIn(key, userinfo) self.assertTrue(userinfo[key]) self.assertIn('remote', set(userinfo['reg_type'].split())) - self.assertNotIn('hackathon', set(userinfo['reg_type'].split())) + self.assertNotIn('hackathon_onsite', set(userinfo['reg_type'].split())) self.assertIn(active_group.acronym, [i[1] for i in userinfo['roles']]) self.assertNotIn(closed_group.acronym, [i[1] for i in userinfo['roles']]) - # Create another registration, with a different email - MeetingRegistration.objects.create( - meeting=meeting, person=None, first_name=person.first_name(), last_name=person.last_name(), - email=email_list[1], ticket_type='one_day', reg_type='hackathon', affiliation='Some Company, Inc', - ) + # Create a registration, with only email, no person (rare if at all) + reg_person.delete() + reg_email = RegistrationFactory( + meeting=meeting, + person=None, + first_name=person.first_name(), + last_name=person.last_name(), + email=email_list[1], + affiliation='Some Company, Inc', + with_ticket={'attendance_type_id': 'hackathon_onsite', 'ticket_type_id': 'one_day'}, + ) userinfo = client.do_user_info_request(state=params["state"], scope=args['scope']) - self.assertIn('hackathon', set(userinfo['reg_type'].split())) - self.assertIn('remote', set(userinfo['reg_type'].split())) - self.assertIn('full_week', set(userinfo['ticket_type'].split())) - self.assertIn('Some Company', userinfo['affiliation']) - - # Create a third registration, with a composite reg type - MeetingRegistration.objects.create( - meeting=meeting, person=None, first_name=person.first_name(), last_name=person.last_name(), - email=email_list[1], ticket_type='one_day', reg_type='hackathon remote', affiliation='Some Company, Inc', - ) + self.assertIn('hackathon_onsite', set(userinfo['reg_type'].split())) + self.assertNotIn('remote', set(userinfo['reg_type'].split())) + self.assertIn('one_day', set(userinfo['ticket_type'].split())) + self.assertIn('Some Company, Inc', userinfo['affiliation']) + + # Test with multiple tickets + reg_email.delete() + creg = RegistrationFactory( + meeting=meeting, + person=None, + first_name=person.first_name(), + last_name=person.last_name(), + email=email_list[1], + affiliation='Some Company, Inc', + with_ticket={'attendance_type_id': 'hackathon_remote', 'ticket_type_id': 'week_pass'}, + ) + RegistrationTicketFactory(registration=creg, attendance_type_id='remote', ticket_type_id='week_pass') userinfo = client.do_user_info_request(state=params["state"], scope=args['scope']) - self.assertEqual(set(userinfo['reg_type'].split()), set(['remote', 'hackathon'])) + self.assertEqual(set(userinfo['reg_type'].split()), set(['remote', 'hackathon_remote'])) # Check that ending a session works r = client.do_end_session_request(state=params["state"], scope=args['scope']) diff --git a/ietf/ietfauth/urls.py b/ietf/ietfauth/urls.py index 56daae0535..7493fe5c97 100644 --- a/ietf/ietfauth/urls.py +++ b/ietf/ietfauth/urls.py @@ -14,7 +14,7 @@ url(r'^confirmnewemail/(?P[^/]+)/$', views.confirm_new_email), url(r'^create/$', views.create_account), url(r'^create/confirm/(?P[^/]+)/$', views.confirm_account), - url(r'^login/$', views.login), + url(r'^login/$', views.AnyEmailLoginView.as_view(), name="ietf.ietfauth.views.login"), url(r'^logout/$', LogoutView.as_view(), name="django.contrib.auth.views.logout"), url(r'^password/$', views.change_password), url(r'^profile/$', views.profile), @@ -24,5 +24,4 @@ url(r'^review/$', views.review_overview), url(r'^testemail/$', views.test_email), url(r'^username/$', views.change_username), - url(r'^allowlist/add/?$', views.add_account_allowlist), ] diff --git a/ietf/ietfauth/utils.py b/ietf/ietfauth/utils.py index 6fa9cddbcb..0df667fbd2 100644 --- a/ietf/ietfauth/utils.py +++ b/ietf/ietfauth/utils.py @@ -12,6 +12,8 @@ from django.conf import settings from django.contrib.auth import REDIRECT_FIELD_NAME +from django.contrib.sites.models import Site +from django.core import signing from django.core.exceptions import PermissionDenied from django.db.models import Q from django.http import HttpResponseRedirect @@ -20,9 +22,10 @@ import debug # pyflakes:ignore from ietf.group.models import Role, GroupFeatures -from ietf.person.models import Person +from ietf.person.models import Email, Person from ietf.person.utils import get_dots from ietf.doc.utils_bofreq import bofreq_editors +from ietf.utils.mail import send_mail def user_is_person(user, person): """Test whether user is associated with person.""" @@ -38,9 +41,10 @@ def has_role(user, role_names, *args, **kwargs): """Determines whether user has any of the given standard roles given. Role names must be a list or, in case of a single value, a string.""" - if not isinstance(role_names, (list, tuple)): - role_names = [ role_names ] - + extra_role_qs = kwargs.get("extra_role_qs", None) + if not isinstance(role_names, (list, tuple, set)): + role_names = [role_names] + if not user or not user.is_authenticated: return False @@ -48,7 +52,13 @@ def has_role(user, role_names, *args, **kwargs): if not hasattr(user, "roles_check_cache"): user.roles_check_cache = {} - key = frozenset(role_names) + keynames = set(role_names) + if extra_role_qs: + keynames.update(set(extra_role_qs.keys())) + year = kwargs.get("year", None) + if year is not None: + keynames.add(f"nomcomyear{year}") + key = frozenset(keynames) if key not in user.roles_check_cache: try: person = user.person @@ -56,54 +66,123 @@ def has_role(user, role_names, *args, **kwargs): return False role_qs = { - "Area Director": Q(person=person, name__in=("pre-ad", "ad"), group__type="area", group__state="active"), - "Secretariat": Q(person=person, name="secr", group__acronym="secretariat"), - "IAB" : Q(person=person, name="member", group__acronym="iab"), - "IANA": Q(person=person, name="auth", group__acronym="iana"), - "RFC Editor": Q(person=person, name="auth", group__acronym="rpc"), - "ISE" : Q(person=person, name="chair", group__acronym="ise"), - "IAD": Q(person=person, name="admdir", group__acronym="ietf"), - "IETF Chair": Q(person=person, name="chair", group__acronym="ietf"), - "IETF Trust Chair": Q(person=person, name="chair", group__acronym="ietf-trust"), - "IRTF Chair": Q(person=person, name="chair", group__acronym="irtf"), - "RSAB Chair": Q(person=person, name="chair", group__acronym="rsab"), - "IAB Chair": Q(person=person, name="chair", group__acronym="iab"), - "IAB Executive Director": Q(person=person, name="execdir", group__acronym="iab"), - "IAB Group Chair": Q(person=person, name="chair", group__type="iab", group__state="active"), - "IAOC Chair": Q(person=person, name="chair", group__acronym="iaoc"), - "WG Chair": Q(person=person,name="chair", group__type="wg", group__state__in=["active","bof", "proposed"]), - "WG Secretary": Q(person=person,name="secr", group__type="wg", group__state__in=["active","bof", "proposed"]), - "RG Chair": Q(person=person,name="chair", group__type="rg", group__state__in=["active","proposed"]), - "RG Secretary": Q(person=person,name="secr", group__type="rg", group__state__in=["active","proposed"]), - "AG Secretary": Q(person=person,name="secr", group__type="ag", group__state__in=["active"]), - "RAG Secretary": Q(person=person,name="secr", group__type="rag", group__state__in=["active"]), - "Team Chair": Q(person=person,name="chair", group__type="team", group__state="active"), - "Program Lead": Q(person=person,name="lead", group__type="program", group__state="active"), - "Program Secretary": Q(person=person,name="secr", group__type="program", group__state="active"), - "Program Chair": Q(person=person,name="chair", group__type="program", group__state="active"), - "EDWG Chair": Q(person=person, name="chair", group__type="edwg", group__state="active"), - "Nomcom Chair": Q(person=person, name="chair", group__type="nomcom", group__acronym__icontains=kwargs.get('year', '0000')), - "Nomcom Advisor": Q(person=person, name="advisor", group__type="nomcom", group__acronym__icontains=kwargs.get('year', '0000')), - "Nomcom": Q(person=person, group__type="nomcom", group__acronym__icontains=kwargs.get('year', '0000')), - "Liaison Manager": Q(person=person,name="liaiman",group__type="sdo",group__state="active", ), - "Authorized Individual": Q(person=person,name="auth",group__type="sdo",group__state="active", ), - "Recording Manager": Q(person=person,name="recman",group__type="ietf",group__state="active", ), - "Reviewer": Q(person=person, name="reviewer", group__state="active"), - "Review Team Secretary": Q(person=person, name="secr", group__reviewteamsettings__isnull=False,group__state="active", ), - "IRSG Member": (Q(person=person, name="member", group__acronym="irsg") | Q(person=person, name="chair", group__acronym="irtf") | Q(person=person, name="atlarge", group__acronym="irsg")), - "RSAB Member": Q(person=person, name="member", group__acronym="rsab"), - "Robot": Q(person=person, name="robot", group__acronym="secretariat"), - } - - filter_expr = Q(pk__in=[]) # ensure empty set is returned if no other terms are added + "Area Director": Q( + name__in=("pre-ad", "ad"), group__type="area", group__state="active" + ), + "Secretariat": Q(name="secr", group__acronym="secretariat"), + "IAB": Q(name="member", group__acronym="iab"), + "IANA": Q(name="auth", group__acronym="iana"), + "RFC Editor": Q(name="auth", group__acronym="rpc"), + "ISE": Q(name="chair", group__acronym="ise"), + "IAD": Q(name="admdir", group__acronym="ietf"), + "IETF Chair": Q(name="chair", group__acronym="ietf"), + "IETF Trust Chair": Q(name="chair", group__acronym="ietf-trust"), + "IRTF Chair": Q(name="chair", group__acronym="irtf"), + "RSAB Chair": Q(name="chair", group__acronym="rsab"), + "IAB Chair": Q(name="chair", group__acronym="iab"), + "IAB Executive Director": Q(name="execdir", group__acronym="iab"), + "IAB Group Chair": Q( + name="chair", group__type="iab", group__state="active" + ), + "IAOC Chair": Q(name="chair", group__acronym="iaoc"), + "WG Chair": Q( + name="chair", + group__type="wg", + group__state__in=["active", "bof", "proposed"], + ), + "WG Secretary": Q( + name="secr", + group__type="wg", + group__state__in=["active", "bof", "proposed"], + ), + "RG Chair": Q( + name="chair", group__type="rg", group__state__in=["active", "proposed"] + ), + "RG Secretary": Q( + name="secr", group__type="rg", group__state__in=["active", "proposed"] + ), + "AG Secretary": Q( + name="secr", group__type="ag", group__state__in=["active"] + ), + "RAG Secretary": Q( + name="secr", group__type="rag", group__state__in=["active"] + ), + "Team Chair": Q(name="chair", group__type="team", group__state="active"), + "Program Lead": Q( + name="lead", group__type="program", group__state="active" + ), + "Program Secretary": Q( + name="secr", group__type="program", group__state="active" + ), + "Program Chair": Q( + name="chair", group__type="program", group__state="active" + ), + "EDWG Chair": Q(name="chair", group__type="edwg", group__state="active"), + "Nomcom Chair": Q( + name="chair", + group__type="nomcom", + group__acronym__icontains=kwargs.get("year", "0000"), + ), + "Nomcom Advisor": Q( + name="advisor", + group__type="nomcom", + group__acronym__icontains=kwargs.get("year", "0000"), + ), + "Nomcom": Q( + group__type="nomcom", + group__acronym__icontains=kwargs.get("year", "0000"), + ), + "Liaison Manager": Q( + name="liaiman", + group__type="sdo", + group__state="active", + ), + "Liaison Coordinator": Q( + name="liaison_coordinator", + group__acronym="iab", + ), + "Authorized Individual": Q( + name="auth", + group__type="sdo", + group__state="active", + ), + "Recording Manager": Q( + name="recman", + group__type="ietf", + group__state="active", + ), + "Reviewer": Q(name="reviewer", group__state="active"), + "Review Team Secretary": Q( + name="secr", + group__reviewteamsettings__isnull=False, + group__state="active", + ), + "IRSG Member": ( + Q(name="member", group__acronym="irsg") + | Q(name="chair", group__acronym="irtf") + | Q(name="atlarge", group__acronym="irsg") + ), + "RSAB Member": Q(name="member", group__acronym="rsab"), + "Robot": Q(name="robot", group__acronym="secretariat"), + } + + filter_expr = Q( + pk__in=[] + ) # ensure empty set is returned if no other terms are added for r in role_names: filter_expr |= role_qs[r] + if extra_role_qs: + for r in extra_role_qs: + filter_expr |= extra_role_qs[r] - user.roles_check_cache[key] = bool(Role.objects.filter(filter_expr).exists()) + user.roles_check_cache[key] = bool( + Role.objects.filter(person=person).filter(filter_expr).exists() + ) return user.roles_check_cache[key] + # convenient decorator def passes_test_decorator(test_func, message): @@ -132,9 +211,9 @@ def role_required(*role_names): # specific permissions + def is_authorized_in_doc_stream(user, doc): - """Return whether user is authorized to perform stream duties on - document.""" + """Is user authorized to perform stream duties on doc?""" if has_role(user, ["Secretariat"]): return True @@ -208,7 +287,7 @@ def is_individual_draft_author(user, doc): if not hasattr(user, 'person'): return False - if user.person in doc.authors(): + if user.person in doc.author_persons(): return True return False @@ -271,13 +350,14 @@ def scope_pronouns(self): ) def scope_registration(self): + # import here to avoid circular imports from ietf.meeting.helpers import get_current_ietf_meeting - from ietf.stats.models import MeetingRegistration + from ietf.meeting.models import Registration meeting = get_current_ietf_meeting() person = self.user.person email_list = person.email_set.values_list('address') q = Q(person=person, meeting=meeting) | Q(email__in=email_list, meeting=meeting) - regs = MeetingRegistration.objects.filter(q).distinct() + regs = Registration.objects.filter(q).distinct() for reg in regs: if not reg.person_id: reg.person = person @@ -288,19 +368,20 @@ def scope_registration(self): ticket_types = set([]) reg_types = set([]) for reg in regs: - ticket_types.add(reg.ticket_type) - reg_types.add(reg.reg_type) + for ticket in reg.tickets.all(): + ticket_types.add(ticket.ticket_type.slug) + reg_types.add(ticket.attendance_type.slug) info = { - 'meeting': meeting.number, + 'meeting': meeting.number, # full_week, one_day, student: - 'ticket_type': ' '.join(ticket_types), + 'ticket_type': ' '.join(ticket_types), # onsite, remote, hackathon_onsite, hackathon_remote: - 'reg_type': ' '.join(reg_types), - 'affiliation': ([ reg.affiliation for reg in regs if reg.affiliation ] or [''])[0], + 'reg_type': ' '.join(reg_types), + 'affiliation': ([reg.affiliation for reg in regs if reg.affiliation] or [''])[0], } return info - + def can_request_rfc_publication(user, doc): """Answers whether this user has an appropriate role to send this document to the RFC Editor for publication as an RFC. @@ -322,3 +403,47 @@ def can_request_rfc_publication(user, doc): return False # See the docstring else: return False + + +def send_new_email_confirmation_request(person: Person, address: str): + """Request confirmation of a new email address + + If the email address is already in use, sends an alert to it. If not, sends a confirmation request. + By design, does not indicate which was sent. This is intended to make it a bit harder to scrape addresses + with a mindless bot. + """ + auth = signing.dumps([person.user.username, address], salt="add_email") + domain = Site.objects.get_current().domain + from_email = settings.DEFAULT_FROM_EMAIL + + existing = Email.objects.filter(address=address).first() + if existing: + subject = f"Attempt to add your email address by {person.name}" + send_mail( + None, + address, + from_email, + subject, + "registration/add_email_exists_email.txt", + { + "domain": domain, + "email": address, + "person": person, + }, + ) + else: + subject = f"Confirm email address for {person.name}" + send_mail( + None, + address, + from_email, + subject, + "registration/add_email_email.txt", + { + "domain": domain, + "auth": auth, + "email": address, + "person": person, + "expire": settings.DAYS_TO_EXPIRE_REGISTRATION_LINK, + }, + ) diff --git a/ietf/ietfauth/validators.py b/ietf/ietfauth/validators.py new file mode 100644 index 0000000000..84684f34d5 --- /dev/null +++ b/ietf/ietfauth/validators.py @@ -0,0 +1,34 @@ +# Copyright The IETF Trust 2024, All Rights Reserved +import re + +from django import forms +from django.conf import settings +from django.core.exceptions import ValidationError + + +def prevent_at_symbol(name): + if "@" in name: + raise forms.ValidationError( + "Please fill in name - this looks like an email address (@ is not allowed in names)." + ) + + +def prevent_system_name(name): + name_without_spaces = name.replace(" ", "").replace("\t", "") + if "(system)" in name_without_spaces.lower(): + raise forms.ValidationError("Please pick another name - this name is reserved.") + + +def prevent_anonymous_name(name): + name_without_spaces = name.replace(" ", "").replace("\t", "") + if "anonymous" in name_without_spaces.lower(): + raise forms.ValidationError("Please pick another name - this name is reserved.") + + +def is_allowed_address(value): + """Validate that an address complies with datatracker requirements""" + for pat in settings.EXCLUDED_PERSONAL_EMAIL_REGEX_PATTERNS: + if re.search(pat, value): + raise ValidationError( + "This email address is not valid in a datatracker account" + ) diff --git a/ietf/ietfauth/views.py b/ietf/ietfauth/views.py index ac47634499..b5256b14f8 100644 --- a/ietf/ietfauth/views.py +++ b/ietf/ietfauth/views.py @@ -38,14 +38,14 @@ import importlib # needed if we revert to higher barrier for account creation -#from datetime import datetime as DateTime, timedelta as TimeDelta, date as Date +# from datetime import datetime as DateTime, timedelta as TimeDelta, date as Date from collections import defaultdict import django.core.signing from django import forms from django.contrib import messages from django.conf import settings -from django.contrib.auth import update_session_auth_hash, logout, authenticate +from django.contrib.auth import logout, update_session_auth_hash, password_validation from django.contrib.auth.decorators import login_required from django.contrib.auth.forms import AuthenticationForm from django.contrib.auth.hashers import identify_hasher @@ -63,11 +63,9 @@ from ietf.group.models import Role, Group from ietf.ietfauth.forms import ( RegistrationForm, PasswordForm, ResetPasswordForm, TestEmailForm, - AllowlistForm, ChangePasswordForm, get_person_form, RoleEmailForm, + ChangePasswordForm, get_person_form, RoleEmailForm, NewEmailForm, ChangeUsernameForm, PersonPasswordForm) -from ietf.ietfauth.htpasswd import update_htpasswd_file -from ietf.ietfauth.utils import role_required, has_role -from ietf.mailinglists.models import Allowlisted +from ietf.ietfauth.utils import has_role, send_new_email_confirmation_request from ietf.name.models import ExtResourceName from ietf.nomcom.models import NomCom from ietf.person.models import Person, Email, Alias, PersonalApiKey, PERSON_API_KEY_VALUES @@ -82,7 +80,6 @@ # These are needed if we revert to the higher bar for account creation - def index(request): return render(request, 'registration/index.html') @@ -99,7 +96,7 @@ def index(request): # def ietf_login(request): # if not request.user.is_authenticated: # return HttpResponse("Not authenticated?", status=500) -# +# # redirect_to = request.REQUEST.get(REDIRECT_FIELD_NAME, '') # request.session.set_test_cookie() # return HttpResponseRedirect('/accounts/loggedin/?%s=%s' % (REDIRECT_FIELD_NAME, urlquote(redirect_to))) @@ -160,18 +157,8 @@ def create_account(request): ) new_account_email = None # Indicate to the template that we failed to create the requested account else: - # For the IETF 113 Registration period (at least) we are lowering the - # barriers for account creation to the simple email round-trip check send_account_creation_email(request, new_account_email) - # The following is what to revert to should that lowered barrier prove problematic - # existing = Subscribed.objects.filter(email__iexact=new_account_email).first() - # ok_to_create = ( Allowlisted.objects.filter(email__iexact=new_account_email).exists() - # or existing and (existing.time + TimeDelta(seconds=settings.LIST_ACCOUNT_DELAY)) < DateTime.now() ) - # if ok_to_create: - # send_account_creation_email(request, new_account_email) - # else: - # return render(request, 'registration/manual.html', { 'account_request_email': settings.ACCOUNT_REQUEST_EMAIL }) else: form = RegistrationForm() @@ -233,8 +220,6 @@ def confirm_account(request, auth): user = User.objects.create(username=email, email=email) user.set_password(password) user.save() - # password is also stored in htpasswd file - update_htpasswd_file(email, password) # make sure the rest of the person infrastructure is # well-connected @@ -311,31 +296,8 @@ def profile(request): to_email = f.cleaned_data["new_email"] if not to_email: continue - email_confirmations.append(to_email) - - auth = django.core.signing.dumps([person.user.username, to_email], salt="add_email") - - domain = Site.objects.get_current().domain - from_email = settings.DEFAULT_FROM_EMAIL - - existing = Email.objects.filter(address=to_email).first() - if existing: - subject = 'Attempt to add your email address by %s' % person.name - send_mail(request, to_email, from_email, subject, 'registration/add_email_exists_email.txt', { - 'domain': domain, - 'email': to_email, - 'person': person, - }) - else: - subject = 'Confirm email address for %s' % person.name - send_mail(request, to_email, from_email, subject, 'registration/add_email_email.txt', { - 'domain': domain, - 'auth': auth, - 'email': to_email, - 'person': person, - 'expire': settings.DAYS_TO_EXPIRE_REGISTRATION_LINK, - }) + send_new_email_confirmation_request(person, to_email) for r in roles: e = r.email_form.cleaned_data["email"] @@ -505,9 +467,19 @@ def password_reset(request): if not user: # try to find user ID from the email address email = Email.objects.filter(address=submitted_username).first() - if email and email.person and email.person.user: - user = email.person.user - + if email and email.person: + if email.person.user: + user = email.person.user + else: + # Create a User record with this (conditioned by way of Email) username + # Don't bother setting the name or email fields on User - rely on the + # Person pointer. + user = User.objects.create( + username=email.address.lower(), + is_active=True, + ) + email.person.user = user + email.person.save() if user and user.person.email_set.filter(active=True).exists(): data = { 'username': user.username, @@ -545,7 +517,7 @@ def confirm_password_reset(request, auth): password = data['password'] last_login = None if data['last_login']: - last_login = datetime.datetime.fromtimestamp(data['last_login'], datetime.timezone.utc) + last_login = datetime.datetime.fromtimestamp(data['last_login'], datetime.UTC) except django.core.signing.BadSignature: raise Http404("Invalid or expired auth") @@ -557,18 +529,16 @@ def confirm_password_reset(request, auth): ) success = False if request.method == 'POST': - form = PasswordForm(request.POST) + form = PasswordForm(user=user, data=request.POST) if form.is_valid(): password = form.cleaned_data["password"] user.set_password(password) user.save() - # password is also stored in htpasswd file - update_htpasswd_file(user.username, password) success = True else: - form = PasswordForm() + form = PasswordForm(user=user) hlibname, hashername = settings.PASSWORD_HASHERS[0].rsplit('.',1) hlib = importlib.import_module(hlibname) @@ -610,23 +580,6 @@ def test_email(request): return r -@role_required('Secretariat') -def add_account_allowlist(request): - success = False - if request.method == 'POST': - form = AllowlistForm(request.POST) - if form.is_valid(): - email = form.cleaned_data['email'] - entry = Allowlisted(email=email, by=request.user.person) - entry.save() - success = True - else: - form = AllowlistForm() - - return render(request, 'ietfauth/allowlist_form.html', { - 'form': form, - 'success': success, - }) class AddReviewWishForm(forms.Form): doc = SearchableDocumentField(label="Document", doc_type="draft") @@ -716,12 +669,10 @@ def change_password(request): if request.method == 'POST': form = ChangePasswordForm(user, request.POST) if form.is_valid(): - new_password = form.cleaned_data["new_password"] + new_password = form.cleaned_data["password"] user.set_password(new_password) user.save() - # password is also stored in htpasswd file - update_htpasswd_file(user.username, new_password) # keep the session update_session_auth_hash(request, user) @@ -743,7 +694,7 @@ def change_password(request): 'hasher': hasher, }) - + @login_required @person_required def change_username(request): @@ -758,13 +709,10 @@ def change_username(request): form = ChangeUsernameForm(user, request.POST) if form.is_valid(): new_username = form.cleaned_data["username"] - password = form.cleaned_data["password"] assert new_username in emails user.username = new_username.lower() user.save() - # password is also stored in htpasswd file - update_htpasswd_file(user.username, password) # keep the session update_session_auth_hash(request, user) @@ -779,53 +727,79 @@ def change_username(request): return render(request, 'registration/change_username.html', {'form': form}) - -def login(request, extra_context=None): - """ - This login function is a wrapper around django's login() for the purpose - of providing a notification if the user's password has been cleared. The - warning will be triggered if the password field has been set to something - which is not recognized as a valid password hash. +class AnyEmailAuthenticationForm(AuthenticationForm): + """AuthenticationForm that allows any email address as the username + + Also performs a check for a cleared password field and provides a helpful error message + if that applies to the user attempting to log in. """ - - if request.method == "POST": - form = AuthenticationForm(request, data=request.POST) - username = form.data.get('username') - user = User.objects.filter(username__iexact=username).first() # Consider _never_ actually looking for the User username and only looking at Email - if not user: - # try to find user ID from the email address + _unauthenticated_user = None + + def clean_username(self): + username = self.cleaned_data.get("username", None) + if username is None: + raise self.get_invalid_login_error() + user = User.objects.filter(username__iexact=username).first() + if user is None: email = Email.objects.filter(address=username).first() - if email and email.person and email.person.user: - u2 = email.person.user - # be conservative, only accept this if login is valid - if u2: - pw = form.data.get('password') - au = authenticate(request, username=u2.username, password=pw) - if au: - # kludge to change the querydict - q2 = request.POST.copy() - q2['username'] = u2.username - request.POST = q2 - user = u2 - # - if user: - try: - identify_hasher(user.password) + if email and email.person: + user = email.person.user # might be None + if user is None: + raise self.get_invalid_login_error() + self._unauthenticated_user = user # remember this for the clean() method + return user.username + + def clean(self): + if self._unauthenticated_user is not None: + try: + identify_hasher(self._unauthenticated_user.password) except ValueError: - extra_context = {"alert": - "Note: Your password has been cleared because " - "of possible password leakage. " - "Please use the password reset link below " - "to set a new password for your account.", - } - response = LoginView.as_view(extra_context=extra_context)(request) - if isinstance(response, HttpResponseRedirect) and user and user.is_authenticated: - try: - user.person - except Person.DoesNotExist: - logout(request) - response = render(request, 'registration/missing_person.html') - return response + self.add_error( + "password", + 'Your password has been cleared because of possible password leakage. ' + 'Please use the "Forgot your password?" button below to set a new password ' + 'for your account.', + ) + return super().clean() + + def confirm_login_allowed(self, user): + """Check whether a successfully authenticated user is permitted to log in""" + super().confirm_login_allowed(user) + # Optionally enforce password validation + if getattr(settings, "PASSWORD_POLICY_ENFORCE_AT_LOGIN", False): + try: + password_validation.validate_password( + self.cleaned_data["password"], user + ) + except ValidationError: + raise ValidationError( + # dict mapping field to error / error list + { + "__all__": ValidationError( + 'You entered your password correctly, but it does not ' + 'meet our current length and complexity requirements. ' + 'Please use the "Forgot your password?" button below to ' + 'set a new password for your account.' + ), + } + ) + + +class AnyEmailLoginView(LoginView): + """LoginView that allows any email address as the username + + Redirects to the missing_person page instead of logging in if the user does not have a Person + """ + form_class = AnyEmailAuthenticationForm + + def form_valid(self, form): + """Security check complete. Log the user in if they have a Person.""" + user = form.get_user() # user has authenticated at this point + if not hasattr(user, "person"): + logout(self.request) # should not be logged in yet, but just in case... + return render(self.request, "registration/missing_person.html") + return super().form_valid(form) + @login_required @person_required diff --git a/ietf/ietfauth/widgets.py b/ietf/ietfauth/widgets.py index c9a0523402..fd7fa16726 100644 --- a/ietf/ietfauth/widgets.py +++ b/ietf/ietfauth/widgets.py @@ -39,18 +39,19 @@ def render(self, name, value, attrs=None, renderer=None): strength_markup = """
-
+
- """ % ( + """.format( _("Warning"), _( 'This password would take to crack.' diff --git a/ietf/ipr/admin.py b/ietf/ipr/admin.py index a0185f58c6..d6a320203b 100644 --- a/ietf/ipr/admin.py +++ b/ietf/ipr/admin.py @@ -1,13 +1,23 @@ -# Copyright The IETF Trust 2010-2020, All Rights Reserved +# Copyright The IETF Trust 2010-2025, All Rights Reserved # -*- coding: utf-8 -*- from django import forms from django.contrib import admin from ietf.name.models import DocRelationshipName -from ietf.ipr.models import (IprDisclosureBase, IprDocRel, IprEvent, - RelatedIpr, HolderIprDisclosure, ThirdPartyIprDisclosure, GenericIprDisclosure, - NonDocSpecificIprDisclosure, LegacyMigrationIprEvent) +from ietf.ipr.models import ( + IprDisclosureBase, + IprDocRel, + IprEvent, + RelatedIpr, + HolderIprDisclosure, + RemovedIprDisclosure, + ThirdPartyIprDisclosure, + GenericIprDisclosure, + NonDocSpecificIprDisclosure, + LegacyMigrationIprEvent, +) +from ietf.utils.admin import SaferTabularInline # ------------------------------------------------------ # ModelAdmins @@ -20,13 +30,13 @@ class Meta: 'sections':forms.TextInput, } -class IprDocRelInline(admin.TabularInline): +class IprDocRelInline(SaferTabularInline): model = IprDocRel form = IprDocRelAdminForm raw_id_fields = ['document'] extra = 1 -class RelatedIprInline(admin.TabularInline): +class RelatedIprInline(SaferTabularInline): model = RelatedIpr raw_id_fields = ['target'] fk_name = 'source' @@ -94,7 +104,7 @@ class IprDocRelAdmin(admin.ModelAdmin): class RelatedIprAdmin(admin.ModelAdmin): list_display = ['source', 'target', 'relationship', ] - search_fields = ['source__name', 'target__name', 'target__docs__name', ] + search_fields = ['source__name', 'target__name', ] raw_id_fields = ['source', 'target', ] admin.site.register(RelatedIpr, RelatedIprAdmin) @@ -110,3 +120,9 @@ class LegacyMigrationIprEventAdmin(admin.ModelAdmin): list_filter = ['time', 'type', 'response_due'] raw_id_fields = ['by', 'disclosure', 'message', 'in_reply_to'] admin.site.register(LegacyMigrationIprEvent, LegacyMigrationIprEventAdmin) + +class RemovedIprDisclosureAdmin(admin.ModelAdmin): + pass + + +admin.site.register(RemovedIprDisclosure, RemovedIprDisclosureAdmin) diff --git a/ietf/ipr/factories.py b/ietf/ipr/factories.py index ca48b32d02..8a8a740158 100644 --- a/ietf/ipr/factories.py +++ b/ietf/ipr/factories.py @@ -42,7 +42,7 @@ def docs(self, create, extracted, **kwargs): return if extracted: for doc in extracted: - IprDocRel.objects.create(disclosure=self,document=doc.docalias.first()) + IprDocRel.objects.create(disclosure=self,document=doc) @factory.post_generation def updates(self, create, extracted, **kwargs): diff --git a/ietf/ipr/forms.py b/ietf/ipr/forms.py index fe4a70f8c4..dac34bddf6 100644 --- a/ietf/ipr/forms.py +++ b/ietf/ipr/forms.py @@ -14,7 +14,7 @@ import debug # pyflakes:ignore from ietf.group.models import Group -from ietf.doc.fields import SearchableDocAliasField +from ietf.doc.fields import SearchableDocumentField from ietf.ipr.mail import utc_from_string from ietf.ipr.fields import SearchableIprDisclosuresField from ietf.ipr.models import (IprDocRel, IprDisclosureBase, HolderIprDisclosure, @@ -95,7 +95,7 @@ def clean(self): return self.cleaned_data class DraftForm(forms.ModelForm): - document = SearchableDocAliasField(label="I-D name/RFC number", required=True, doc_type="draft") + document = SearchableDocumentField(label="I-D name/RFC number", required=True, doc_type="all") class Meta: model = IprDocRel @@ -112,7 +112,7 @@ def clean(self): if not document: self.add_error("document", "Identifying the Internet-Draft or RFC for this disclosure is required.") elif not document.name.startswith("rfc"): - if revisions.strip() == "": + if revisions is None or revisions.strip() == "": self.add_error("revisions", "Revisions of this Internet-Draft for which this disclosure is relevant must be specified.") return cleaned_data @@ -338,7 +338,19 @@ def clean(self): return cleaned_data + class HolderIprDisclosureForm(IprDisclosureFormBase): + is_blanket_disclosure = forms.BooleanField( + label=mark_safe( + 'This is a blanket IPR disclosure ' + '(see Section 5.4.3 of RFC 8179)' + ), + help_text="In satisfaction of its disclosure obligations, Patent Holder commits to license all of " + "IPR (as defined in RFC 8179) that would have required disclosure under RFC 8179 on a " + "royalty-free (and otherwise reasonable and non-discriminatory) basis. Patent Holder " + "confirms that all other terms and conditions are described in this IPR disclosure.", + required=False, + ) licensing = CustomModelChoiceField(IprLicenseTypeName.objects.all(), widget=forms.RadioSelect,empty_label=None) @@ -356,6 +368,15 @@ def __init__(self, *args, **kwargs): else: # entering new disclosure self.fields['licensing'].queryset = IprLicenseTypeName.objects.exclude(slug='none-selected') + + if self.data.get("is_blanket_disclosure", False): + # for a blanket disclosure, patent details are not required + self.fields["patent_number"].required = False + self.fields["patent_inventor"].required = False + self.fields["patent_title"].required = False + self.fields["patent_date"].required = False + # n.b., self.fields["patent_notes"] is never required + def clean(self): cleaned_data = super(HolderIprDisclosureForm, self).clean() diff --git a/ietf/ipr/mail.py b/ietf/ipr/mail.py index 842426d820..9bef751b95 100644 --- a/ietf/ipr/mail.py +++ b/ietf/ipr/mail.py @@ -66,9 +66,9 @@ def utc_from_string(s): if date is None: return None elif is_aware(date): - return date.astimezone(datetime.timezone.utc) + return date.astimezone(datetime.UTC) else: - return date.replace(tzinfo=datetime.timezone.utc) + return date.replace(tzinfo=datetime.UTC) # ---------------------------------------------------------------- # Email Functions @@ -171,31 +171,44 @@ def message_from_message(message,by=None): ) return msg + +class UndeliverableIprResponseError(Exception): + """Response email could not be delivered and should be treated as an error""" + + def process_response_email(msg): - """Saves an incoming message. msg=string. Message "To" field is expected to - be in the format ietf-ipr+[identifier]@ietf.org. Expect to find a message with - a matching value in the reply_to field, associated to an IPR disclosure through - IprEvent. Create a Message object for the incoming message and associate it to - the original message via new IprEvent""" + """Save an incoming IPR response email message + + Message "To" field is expected to be in the format ietf-ipr+[identifier]@ietf.org. If + the address or identifier is missing, the message will be silently dropped. + + Expect to find a message with a matching value in the reply_to field, associated to an + IPR disclosure through IprEvent. If it cannot be matched, raises UndeliverableIprResponseError + + Creates a Message object for the incoming message and associates it to + the original message via new IprEvent + """ message = message_from_bytes(force_bytes(msg)) to = message.get('To', '') # exit if this isn't a response we're interested in (with plus addressing) - local,domain = get_base_ipr_request_address().split('@') + local, domain = get_base_ipr_request_address().split('@') if not re.match(r'^{}\+[a-zA-Z0-9_\-]{}@{}'.format(local,'{16}',domain),to): - return None - + _from = message.get("From", "") + log(f"Ignoring IPR email without a message identifier from {_from} to {to}") + return + try: to_message = Message.objects.get(reply_to=to) except Message.DoesNotExist: log('Error finding matching message ({})'.format(to)) - return None + raise UndeliverableIprResponseError(f"Unable to find message matching {to}") try: disclosure = to_message.msgevents.first().disclosure except: log('Error processing message ({})'.format(to)) - return None + raise UndeliverableIprResponseError("Error processing message for {to}") ietf_message = message_from_message(message) IprEvent.objects.create( @@ -207,4 +220,4 @@ def process_response_email(msg): ) log("Received IPR email from %s" % ietf_message.frm) - return ietf_message + diff --git a/ietf/ipr/management/commands/generate_draft_recursive_txt.py b/ietf/ipr/management/commands/generate_draft_recursive_txt.py deleted file mode 100644 index 2c61a7604f..0000000000 --- a/ietf/ipr/management/commands/generate_draft_recursive_txt.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright The IETF Trust 2014-2021, All Rights Reserved -# -*- coding: utf-8 -*- - - -from django.core.management.base import BaseCommand, CommandError - -from ietf.ipr.utils import generate_draft_recursive_txt - - -class Command(BaseCommand): - help = ("Generate machine-readable list of IPR disclosures by Internet-Draft name (recursive)") - - def handle(self, *args, **options): - try: - generate_draft_recursive_txt() - except (ValueError, IOError) as e: - raise CommandError(e) diff --git a/ietf/ipr/management/commands/process_email.py b/ietf/ipr/management/commands/process_email.py index 0b15fb0651..616cade5c4 100644 --- a/ietf/ipr/management/commands/process_email.py +++ b/ietf/ipr/management/commands/process_email.py @@ -9,7 +9,7 @@ from django.core.management import CommandError from ietf.utils.management.base import EmailOnFailureCommand -from ietf.ipr.mail import process_response_email +from ietf.ipr.mail import process_response_email, UndeliverableIprResponseError import debug # pyflakes:ignore @@ -31,7 +31,7 @@ def handle(self, *args, **options): self.msg_bytes = sys.stdin.buffer.read() try: process_response_email(self.msg_bytes) - except ValueError as e: + except (ValueError, UndeliverableIprResponseError) as e: raise CommandError(e) failure_subject = 'Error during ipr email processing' diff --git a/ietf/ipr/management/tests.py b/ietf/ipr/management/tests.py index d84b0cfef4..d7acd65042 100644 --- a/ietf/ipr/management/tests.py +++ b/ietf/ipr/management/tests.py @@ -1,7 +1,7 @@ # Copyright The IETF Trust 2021, All Rights Reserved # -*- coding: utf-8 -*- """Tests of ipr management commands""" -import mock +from unittest import mock import sys from django.core.management import call_command diff --git a/ietf/ipr/migrations/0002_iprdocrel_no_aliases.py b/ietf/ipr/migrations/0002_iprdocrel_no_aliases.py new file mode 100644 index 0000000000..bcfc73a320 --- /dev/null +++ b/ietf/ipr/migrations/0002_iprdocrel_no_aliases.py @@ -0,0 +1,104 @@ +# Generated by Django 4.2.2 on 2023-06-16 13:40 + +from django.db import migrations +import django.db.models.deletion +from django.db.models import F, Subquery, OuterRef, ManyToManyField, CharField +import ietf.utils.models + +def forward(apps, schema_editor): + IprDocRel = apps.get_model("ipr", "IprDocRel") + DocAlias = apps.get_model("doc", "DocAlias") + document_subquery = Subquery( + DocAlias.objects.filter( + pk=OuterRef("deprecated_document") + ).values("docs")[:1] + ) + name_subquery = Subquery( + DocAlias.objects.filter( + pk=OuterRef("deprecated_document") + ).values("name")[:1] + ) + IprDocRel.objects.annotate( + firstdoc=document_subquery, + aliasname=name_subquery, + ).update( + document=F("firstdoc"), + originaldocumentaliasname=F("aliasname"), + ) + # This might not be right - we may need here (and in the relateddocument migrations) to pay attention to + # whether the name being pointed to is and rfc name or a draft name and point to the right object instead... + +def reverse(apps, schema_editor): + pass + +class Migration(migrations.Migration): + dependencies = [ + ("ipr", "0001_initial"), + ("doc", "0016_relate_hist_no_aliases") + ] + + operations = [ + migrations.AlterField( + model_name='iprdocrel', + name='document', + field=ietf.utils.models.ForeignKey( + db_index=False, + on_delete=django.db.models.deletion.CASCADE, + to='doc.docalias', + ), + ), + migrations.RenameField( + model_name="iprdocrel", + old_name="document", + new_name="deprecated_document" + ), + migrations.AlterField( + model_name='iprdocrel', + name='deprecated_document', + field=ietf.utils.models.ForeignKey( + db_index=True, + on_delete=django.db.models.deletion.CASCADE, + to='doc.docalias', + ), + ), + migrations.AddField( + model_name="iprdocrel", + name="document", + field=ietf.utils.models.ForeignKey( + default=1, # A lie, but a convenient one - no iprdocrel objects point here. + on_delete=django.db.models.deletion.CASCADE, + to="doc.document", + db_index=False, + ), + preserve_default=False, + ), + migrations.AddField( + model_name="iprdocrel", + name="originaldocumentaliasname", + field=CharField(max_length=255,null=True,blank=True), + preserve_default=True, + ), + migrations.RunPython(forward, reverse), + migrations.AlterField( + model_name="iprdocrel", + name="document", + field=ietf.utils.models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="doc.document", + db_index=True, + ), + ), + migrations.AlterField( + model_name='iprdisclosurebase', + name='docs', + field=ManyToManyField(through='ipr.IprDocRel', to='doc.Document'), + ), + migrations.RemoveField( + model_name="iprdocrel", + name="deprecated_document", + field=ietf.utils.models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to='doc.DocAlias', + ), + ), + ] diff --git a/ietf/ipr/migrations/0003_alter_iprdisclosurebase_docs.py b/ietf/ipr/migrations/0003_alter_iprdisclosurebase_docs.py new file mode 100644 index 0000000000..23b349f567 --- /dev/null +++ b/ietf/ipr/migrations/0003_alter_iprdisclosurebase_docs.py @@ -0,0 +1,18 @@ +# Copyright The IETF Trust 2023, All Rights Reserved + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("doc", "0017_delete_docalias"), + ("ipr", "0002_iprdocrel_no_aliases"), + ] + + operations = [ + migrations.AlterField( + model_name="iprdisclosurebase", + name="docs", + field=models.ManyToManyField(through="ipr.IprDocRel", to="doc.document"), + ), + ] diff --git a/ietf/ipr/migrations/0004_holderiprdisclosure_is_blanket_disclosure.py b/ietf/ipr/migrations/0004_holderiprdisclosure_is_blanket_disclosure.py new file mode 100644 index 0000000000..66282b3cd5 --- /dev/null +++ b/ietf/ipr/migrations/0004_holderiprdisclosure_is_blanket_disclosure.py @@ -0,0 +1,16 @@ +# Copyright The IETF Trust 2024, All Rights Reserved +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("ipr", "0003_alter_iprdisclosurebase_docs"), + ] + + operations = [ + migrations.AddField( + model_name="holderiprdisclosure", + name="is_blanket_disclosure", + field=models.BooleanField(default=False), + ), + ] diff --git a/ietf/ipr/migrations/0005_removediprdisclosure.py b/ietf/ipr/migrations/0005_removediprdisclosure.py new file mode 100644 index 0000000000..400a264579 --- /dev/null +++ b/ietf/ipr/migrations/0005_removediprdisclosure.py @@ -0,0 +1,28 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("ipr", "0004_holderiprdisclosure_is_blanket_disclosure"), + ] + + operations = [ + migrations.CreateModel( + name="RemovedIprDisclosure", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("removed_id", models.PositiveBigIntegerField(unique=True)), + ("reason", models.TextField()), + ], + ), + ] diff --git a/ietf/ipr/migrations/0006_already_removed_ipr.py b/ietf/ipr/migrations/0006_already_removed_ipr.py new file mode 100644 index 0000000000..0e2dbc63eb --- /dev/null +++ b/ietf/ipr/migrations/0006_already_removed_ipr.py @@ -0,0 +1,24 @@ +# Copyright The IETF Trust 2025, All Rights Reserved +from django.db import migrations + + +def forward(apps, schema_editor): + RemovedIprDisclosure = apps.get_model("ipr", "RemovedIprDisclosure") + for id in (6544, 6068): + RemovedIprDisclosure.objects.create( + removed_id=id, + reason="This IPR disclosure was removed as objectively false.", + ) + + +def reverse(apps, schema_editor): + RemovedIprDisclosure = apps.get_model("ipr", "RemovedIprDisclosure") + RemovedIprDisclosure.objects.all().delete() + + +class Migration(migrations.Migration): + dependencies = [ + ("ipr", "0005_removediprdisclosure"), + ] + + operations = [migrations.RunPython(forward, reverse)] diff --git a/ietf/ipr/models.py b/ietf/ipr/models.py index b3add079d0..ea148c2704 100644 --- a/ietf/ipr/models.py +++ b/ietf/ipr/models.py @@ -1,13 +1,14 @@ -# Copyright The IETF Trust 2007-2023, All Rights Reserved +# Copyright The IETF Trust 2007-2025, All Rights Reserved # -*- coding: utf-8 -*- from django.conf import settings +from django.core.exceptions import ValidationError from django.db import models from django.urls import reverse from django.utils import timezone -from ietf.doc.models import DocAlias, DocEvent +from ietf.doc.models import Document, DocEvent from ietf.name.models import DocRelationshipName,IprDisclosureStateName,IprLicenseTypeName,IprEventTypeName from ietf.person.models import Person from ietf.message.models import Message @@ -16,11 +17,11 @@ class IprDisclosureBase(models.Model): by = ForeignKey(Person) # who was logged in, or System if nobody was logged in compliant = models.BooleanField("Complies to RFC3979", default=True) - docs = models.ManyToManyField(DocAlias, through='IprDocRel') + docs = models.ManyToManyField(Document, through='ipr.IprDocRel') holder_legal_name = models.CharField(max_length=255) notes = models.TextField("Additional notes", blank=True) other_designations = models.CharField("Designations for other contributions", blank=True, max_length=255) - rel = models.ManyToManyField('self', through='RelatedIpr', symmetrical=False) + rel = models.ManyToManyField('self', through='ipr.RelatedIpr', symmetrical=False) state = ForeignKey(IprDisclosureStateName) submitter_name = models.CharField(max_length=255,blank=True) submitter_email = models.EmailField(blank=True) @@ -124,17 +125,30 @@ def is_thirdparty(self): class HolderIprDisclosure(IprDisclosureBase): - ietfer_name = models.CharField(max_length=255, blank=True) # "Whose Personal Belief Triggered..." - ietfer_contact_email = models.EmailField(blank=True) - ietfer_contact_info = models.TextField(blank=True) - patent_info = models.TextField() - has_patent_pending = models.BooleanField(default=False) - holder_contact_email = models.EmailField() - holder_contact_name = models.CharField(max_length=255) - holder_contact_info = models.TextField(blank=True, help_text="Address, phone, etc.") - licensing = ForeignKey(IprLicenseTypeName) - licensing_comments = models.TextField(blank=True) + ietfer_name = models.CharField( + max_length=255, blank=True + ) # "Whose Personal Belief Triggered..." + ietfer_contact_email = models.EmailField(blank=True) + ietfer_contact_info = models.TextField(blank=True) + patent_info = models.TextField() + has_patent_pending = models.BooleanField(default=False) + holder_contact_email = models.EmailField() + holder_contact_name = models.CharField(max_length=255) + holder_contact_info = models.TextField(blank=True, help_text="Address, phone, etc.") + licensing = ForeignKey(IprLicenseTypeName) + licensing_comments = models.TextField(blank=True) submitter_claims_all_terms_disclosed = models.BooleanField(default=False) + is_blanket_disclosure = models.BooleanField(default=False) + + def clean(self): + if self.is_blanket_disclosure: + # If the IprLicenseTypeName does not exist, we have a serious problem and a 500 response is ok, + # so not handling failure of the `get()` + royalty_free_licensing = IprLicenseTypeName.objects.get(slug="royalty-free") + if self.licensing_id != royalty_free_licensing.pk: + raise ValidationError( + f'Must select "{royalty_free_licensing.desc}" for a blanket IPR disclosure.') + class ThirdPartyIprDisclosure(IprDisclosureBase): ietfer_name = models.CharField(max_length=255) # "Whose Personal Belief Triggered..." @@ -160,9 +174,10 @@ class GenericIprDisclosure(IprDisclosureBase): class IprDocRel(models.Model): disclosure = ForeignKey(IprDisclosureBase) - document = ForeignKey(DocAlias) + document = ForeignKey(Document) sections = models.TextField(blank=True) revisions = models.CharField(max_length=16,blank=True) # allows strings like 01-07 + originaldocumentaliasname = models.CharField(max_length=255, null=True, blank=True) def doc_type(self): name = self.document.name @@ -175,7 +190,7 @@ def doc_type(self): def formatted_name(self): name = self.document.name - if name.startswith("rfc"): + if len(name) >= 3 and name[:3] in ("rfc", "bcp", "fyi", "std"): return name.upper() #elif self.revisions: # return "%s-%s" % (name, self.revisions) @@ -234,10 +249,7 @@ def create_doc_events(self): 'removed_objfalse': 'removed_objfalse_related_ipr', } if self.type_id in event_type_map: - related_docs = set() # related docs, no duplicates - for alias in self.disclosure.docs.all(): - related_docs.update(alias.docs.all()) - for doc in related_docs: + for doc in self.disclosure.docs.distinct(): DocEvent.objects.create( type=event_type_map[self.type_id], time=self.time, @@ -258,3 +270,7 @@ class LegacyMigrationIprEvent(IprEvent): """A subclass of IprEvent specifically for capturing contents of legacy_url_0, the text of a disclosure submitted by email""" pass + +class RemovedIprDisclosure(models.Model): + removed_id = models.PositiveBigIntegerField(unique=True) + reason = models.TextField() diff --git a/ietf/ipr/resources.py b/ietf/ipr/resources.py index 665b0ab02f..c4d2c436e6 100644 --- a/ietf/ipr/resources.py +++ b/ietf/ipr/resources.py @@ -1,4 +1,4 @@ -# Copyright The IETF Trust 2015-2019, All Rights Reserved +# Copyright The IETF Trust 2015-2025, All Rights Reserved # -*- coding: utf-8 -*- # Autogenerated by the mkresources management command 2015-03-21 14:05 PDT @@ -11,16 +11,16 @@ from ietf import api -from ietf.ipr.models import ( IprDisclosureBase, IprDocRel, HolderIprDisclosure, ThirdPartyIprDisclosure, +from ietf.ipr.models import ( IprDisclosureBase, IprDocRel, HolderIprDisclosure, RemovedIprDisclosure, ThirdPartyIprDisclosure, RelatedIpr, NonDocSpecificIprDisclosure, GenericIprDisclosure, IprEvent, LegacyMigrationIprEvent ) from ietf.person.resources import PersonResource from ietf.name.resources import IprDisclosureStateNameResource -from ietf.doc.resources import DocAliasResource +from ietf.doc.resources import DocumentResource class IprDisclosureBaseResource(ModelResource): by = ToOneField(PersonResource, 'by') state = ToOneField(IprDisclosureStateNameResource, 'state') - docs = ToManyField(DocAliasResource, 'docs', null=True) + docs = ToManyField(DocumentResource, 'docs', null=True) rel = ToManyField('ietf.ipr.resources.IprDisclosureBaseResource', 'rel', null=True) class Meta: queryset = IprDisclosureBase.objects.all() @@ -45,10 +45,9 @@ class Meta: } api.ipr.register(IprDisclosureBaseResource()) -from ietf.doc.resources import DocAliasResource class IprDocRelResource(ModelResource): disclosure = ToOneField(IprDisclosureBaseResource, 'disclosure') - document = ToOneField(DocAliasResource, 'document') + document = ToOneField(DocumentResource, 'document') class Meta: cache = SimpleCache() queryset = IprDocRel.objects.all() @@ -66,13 +65,12 @@ class Meta: from ietf.person.resources import PersonResource from ietf.name.resources import IprDisclosureStateNameResource, IprLicenseTypeNameResource -from ietf.doc.resources import DocAliasResource class HolderIprDisclosureResource(ModelResource): by = ToOneField(PersonResource, 'by') state = ToOneField(IprDisclosureStateNameResource, 'state') iprdisclosurebase_ptr = ToOneField(IprDisclosureBaseResource, 'iprdisclosurebase_ptr') licensing = ToOneField(IprLicenseTypeNameResource, 'licensing') - docs = ToManyField(DocAliasResource, 'docs', null=True) + docs = ToManyField(DocumentResource, 'docs', null=True) rel = ToManyField(IprDisclosureBaseResource, 'rel', null=True) class Meta: cache = SimpleCache() @@ -111,12 +109,11 @@ class Meta: from ietf.person.resources import PersonResource from ietf.name.resources import IprDisclosureStateNameResource -from ietf.doc.resources import DocAliasResource class ThirdPartyIprDisclosureResource(ModelResource): by = ToOneField(PersonResource, 'by') state = ToOneField(IprDisclosureStateNameResource, 'state') iprdisclosurebase_ptr = ToOneField(IprDisclosureBaseResource, 'iprdisclosurebase_ptr') - docs = ToManyField(DocAliasResource, 'docs', null=True) + docs = ToManyField(DocumentResource, 'docs', null=True) rel = ToManyField(IprDisclosureBaseResource, 'rel', null=True) class Meta: cache = SimpleCache() @@ -168,12 +165,11 @@ class Meta: from ietf.person.resources import PersonResource from ietf.name.resources import IprDisclosureStateNameResource -from ietf.doc.resources import DocAliasResource class NonDocSpecificIprDisclosureResource(ModelResource): by = ToOneField(PersonResource, 'by') state = ToOneField(IprDisclosureStateNameResource, 'state') iprdisclosurebase_ptr = ToOneField(IprDisclosureBaseResource, 'iprdisclosurebase_ptr') - docs = ToManyField(DocAliasResource, 'docs', null=True) + docs = ToManyField(DocumentResource, 'docs', null=True) rel = ToManyField(IprDisclosureBaseResource, 'rel', null=True) class Meta: cache = SimpleCache() @@ -207,12 +203,11 @@ class Meta: from ietf.person.resources import PersonResource from ietf.name.resources import IprDisclosureStateNameResource -from ietf.doc.resources import DocAliasResource class GenericIprDisclosureResource(ModelResource): by = ToOneField(PersonResource, 'by') state = ToOneField(IprDisclosureStateNameResource, 'state') iprdisclosurebase_ptr = ToOneField(IprDisclosureBaseResource, 'iprdisclosurebase_ptr') - docs = ToManyField(DocAliasResource, 'docs', null=True) + docs = ToManyField(DocumentResource, 'docs', null=True) rel = ToManyField(IprDisclosureBaseResource, 'rel', null=True) class Meta: cache = SimpleCache() @@ -300,3 +295,18 @@ class Meta: } api.ipr.register(LegacyMigrationIprEventResource()) + + +class RemovedIprDisclosureResource(ModelResource): + class Meta: + queryset = RemovedIprDisclosure.objects.all() + serializer = api.Serializer() + cache = SimpleCache() + #resource_name = 'removediprdisclosure' + ordering = ['id', ] + filtering = { + "id": ALL, + "removed_id": ALL, + "reason": ALL, + } +api.ipr.register(RemovedIprDisclosureResource()) diff --git a/ietf/ipr/templatetags/ipr_filters.py b/ietf/ipr/templatetags/ipr_filters.py index 0130387500..8b3b420c41 100644 --- a/ietf/ipr/templatetags/ipr_filters.py +++ b/ietf/ipr/templatetags/ipr_filters.py @@ -32,7 +32,7 @@ def to_class_name(value): return value.__class__.__name__ def draft_rev_at_time(iprdocrel): - draft = iprdocrel.document.document + draft = iprdocrel.document event = iprdocrel.disclosure.get_latest_event_posted() if event is None: return ("","The Internet-Draft's revision at the time this disclosure was posted could not be determined.") @@ -47,7 +47,7 @@ def draft_rev_at_time(iprdocrel): @register.filter def no_revisions_message(iprdocrel): - draft = iprdocrel.document.document + draft = iprdocrel.document if draft.type_id != "draft" or iprdocrel.revisions.strip() != "": return "" rev_at_time, exception = draft_rev_at_time(iprdocrel) diff --git a/ietf/ipr/tests.py b/ietf/ipr/tests.py index 66337bff20..53a599e2de 100644 --- a/ietf/ipr/tests.py +++ b/ietf/ipr/tests.py @@ -3,40 +3,52 @@ import datetime - +import json +from unittest import mock +import re from pyquery import PyQuery from urllib.parse import quote, urlparse from zoneinfo import ZoneInfo from django.conf import settings +from django.test.utils import override_settings from django.urls import reverse as urlreverse from django.utils import timezone +from django.db.models import Max + import debug # pyflakes:ignore -from ietf.doc.models import DocAlias +from ietf.api.views import EmailIngestionError from ietf.doc.factories import ( DocumentFactory, WgDraftFactory, WgRfcFactory, + RfcFactory, NewRevisionDocEventFactory ) +from ietf.doc.utils import prettify_std_name from ietf.group.factories import RoleFactory from ietf.ipr.factories import ( HolderIprDisclosureFactory, GenericIprDisclosureFactory, + IprDisclosureBaseFactory, IprDocRelFactory, - IprEventFactory + IprEventFactory, + ThirdPartyIprDisclosureFactory ) +from ietf.ipr.forms import DraftForm, HolderIprDisclosureForm from ietf.ipr.mail import (process_response_email, get_reply_to, get_update_submitter_emails, - get_pseudo_submitter, get_holders, get_update_cc_addrs) -from ietf.ipr.models import (IprDisclosureBase,GenericIprDisclosure,HolderIprDisclosure, - ThirdPartyIprDisclosure) + get_pseudo_submitter, get_holders, get_update_cc_addrs, UndeliverableIprResponseError) +from ietf.ipr.models import (IprDisclosureBase, GenericIprDisclosure, HolderIprDisclosure, RemovedIprDisclosure, + ThirdPartyIprDisclosure, IprEvent) from ietf.ipr.templatetags.ipr_filters import no_revisions_message -from ietf.ipr.utils import get_genitive, get_ipr_summary +from ietf.ipr.utils import get_genitive, get_ipr_summary, ingest_response_email from ietf.mailtrigger.utils import gather_address_lists +from ietf.message.factories import MessageFactory from ietf.message.models import Message +from ietf.person.factories import PersonFactory from ietf.utils.mail import outbox, empty_outbox, get_payload_text from ietf.utils.test_utils import TestCase, login_testing_unauthorized from ietf.utils.text import text_to_dict @@ -97,9 +109,46 @@ def test_get_update_submitter_emails(self): self.assertTrue(messages[0].startswith('To: %s' % ipr.submitter_email)) def test_showlist(self): + for disc_factory_type in (HolderIprDisclosureFactory, GenericIprDisclosureFactory, ThirdPartyIprDisclosureFactory): + ipr = disc_factory_type(state_id="removed") + r = self.client.get(urlreverse("ietf.ipr.views.showlist")) + self.assertContains(r, ipr.title) + self.assertContains(r, "removed at the request of the submitter") + self.assertNotContains(r, "removed as objectively false") + ipr.state_id="posted" + ipr.save() + r = self.client.get(urlreverse("ietf.ipr.views.showlist")) + self.assertContains(r, ipr.title) + self.assertNotContains(r, "removed at the request of the submitter") + self.assertNotContains(r, "removed as objectively false") + ipr.state_id="removed_objfalse" + ipr.save() + r = self.client.get(urlreverse("ietf.ipr.views.showlist")) + self.assertContains(r, ipr.title) + self.assertNotContains(r, "removed at the request of the submitter") + self.assertContains(r, "removed as objectively false") + ipr.delete() + + def test_show_delete(self): ipr = HolderIprDisclosureFactory() - r = self.client.get(urlreverse("ietf.ipr.views.showlist")) - self.assertContains(r, ipr.title) + removed = RemovedIprDisclosure.objects.create( + removed_id=ipr.pk, reason="Removed for reasons" + ) + url = urlreverse("ietf.ipr.views.show", kwargs=dict(id=removed.removed_id)) + r = self.client.get(url) + self.assertContains(r, "Removed for reasons") + q = PyQuery(r.content) + self.assertEqual(len(q("#deletion_warning")), 0) + self.client.login(username="secretary", password="secretary+password") + r = self.client.get(url) + self.assertContains(r, "Removed for reasons") + q = PyQuery(r.content) + self.assertEqual(len(q("#deletion_warning")), 1) + ipr.delete() + r = self.client.get(url) + self.assertContains(r, "Removed for reasons") + q = PyQuery(r.content) + self.assertEqual(len(q("#deletion_warning")), 0) def test_show_posted(self): ipr = HolderIprDisclosureFactory() @@ -136,12 +185,6 @@ def test_ipr_history(self): r = self.client.get(urlreverse("ietf.ipr.views.history", kwargs=dict(id=ipr.pk))) self.assertContains(r, ipr.title) - def test_iprs_for_drafts(self): - draft=WgDraftFactory() - ipr = HolderIprDisclosureFactory(docs=[draft,]) - r = self.client.get(urlreverse("ietf.ipr.views.by_draft_txt")) - self.assertContains(r, draft.name) - self.assertContains(r, str(ipr.pk)) def test_about(self): r = self.client.get(urlreverse("ietf.ipr.views.about")) @@ -180,12 +223,39 @@ def test_search(self): self.assertContains(r, draft.name) self.assertNotContains(r, ipr.title) - DocAlias.objects.create(name="rfc321").docs.add(draft) + rfc = RfcFactory(rfc_number=321) + draft.relateddocument_set.create(relationship_id="became_rfc",target=rfc) # find RFC r = self.client.get(url + "?submit=rfc&rfc=321") self.assertContains(r, ipr.title) + rfc_new = RfcFactory(rfc_number=322) + rfc_new.relateddocument_set.create(relationship_id="obs", target=rfc) + + # find RFC 322 which obsoletes RFC 321 whose draft has IPR + r = self.client.get(url + "?submit=rfc&rfc=322") + self.assertContains(r, ipr.title) + self.assertContains(r, "Total number of IPR disclosures found: 1") + self.assertContains(r, "Total number of documents searched: 3.") + self.assertContains( + r, + f'Results for {prettify_std_name(rfc_new.name)} ("{rfc_new.title}")', + html=True, + ) + self.assertContains( + r, + f'Results for {prettify_std_name(rfc.name)} ("{rfc.title}"), ' + f'which was obsoleted by {prettify_std_name(rfc_new.name)} ("{rfc_new.title}")', + html=True, + ) + self.assertContains( + r, + f'Results for {prettify_std_name(draft.name)} ("{draft.title}"), ' + f'which became rfc {prettify_std_name(rfc.name)} ("{rfc.title}")', + html=True, + ) + # find by patent owner r = self.client.get(url + "?submit=holder&holder=%s" % ipr.holder_legal_name) self.assertContains(r, ipr.title) @@ -239,16 +309,16 @@ def test_sitemap(self): def test_new_generic(self): """Ensure new-generic redirects to new-general""" - url = urlreverse("ietf.ipr.views.new", kwargs={ "type": "generic" }) + url = urlreverse("ietf.ipr.views.new", kwargs={ "_type": "generic" }) r = self.client.get(url) self.assertEqual(r.status_code,302) - self.assertEqual(urlparse(r["Location"]).path, urlreverse("ietf.ipr.views.new", kwargs={ "type": "general"})) + self.assertEqual(urlparse(r["Location"]).path, urlreverse("ietf.ipr.views.new", kwargs={ "_type": "general"})) def test_new_general(self): """Add a new general disclosure. Note: submitter does not need to be logged in. """ - url = urlreverse("ietf.ipr.views.new", kwargs={ "type": "general" }) + url = urlreverse("ietf.ipr.views.new", kwargs={ "_type": "general" }) # invalid post r = self.client.post(url, { @@ -285,8 +355,8 @@ def test_new_specific(self): """Add a new specific disclosure. Note: submitter does not need to be logged in. """ draft = WgDraftFactory() - WgRfcFactory() - url = urlreverse("ietf.ipr.views.new", kwargs={ "type": "specific" }) + rfc = WgRfcFactory() + url = urlreverse("ietf.ipr.views.new", kwargs={ "_type": "specific" }) # successful post empty_outbox() @@ -299,9 +369,9 @@ def test_new_specific(self): "ietfer_contact_info": "555-555-0101", "iprdocrel_set-TOTAL_FORMS": 2, "iprdocrel_set-INITIAL_FORMS": 0, - "iprdocrel_set-0-document": draft.docalias.first().pk, + "iprdocrel_set-0-document": draft.pk, "iprdocrel_set-0-revisions": '00', - "iprdocrel_set-1-document": DocAlias.objects.filter(name__startswith="rfc").first().pk, + "iprdocrel_set-1-document": rfc.pk, "patent_number": "SE12345678901", "patent_inventor": "A. Nonymous", "patent_title": "A method of transferring bits", @@ -341,8 +411,8 @@ def test_new_specific(self): def test_new_specific_no_revision(self): draft = WgDraftFactory() - WgRfcFactory() - url = urlreverse("ietf.ipr.views.new", kwargs={ "type": "specific" }) + rfc = WgRfcFactory() + url = urlreverse("ietf.ipr.views.new", kwargs={ "_type": "specific" }) # successful post empty_outbox() @@ -355,8 +425,8 @@ def test_new_specific_no_revision(self): "ietfer_contact_info": "555-555-0101", "iprdocrel_set-TOTAL_FORMS": 2, "iprdocrel_set-INITIAL_FORMS": 0, - "iprdocrel_set-0-document": draft.docalias.first().pk, - "iprdocrel_set-1-document": DocAlias.objects.filter(name__startswith="rfc").first().pk, + "iprdocrel_set-0-document": draft.pk, + "iprdocrel_set-1-document": rfc.pk, "patent_number": "SE12345678901", "patent_inventor": "A. Nonymous", "patent_title": "A method of transferring bits", @@ -375,8 +445,8 @@ def test_new_thirdparty(self): """Add a new third-party disclosure. Note: submitter does not need to be logged in. """ draft = WgDraftFactory() - WgRfcFactory() - url = urlreverse("ietf.ipr.views.new", kwargs={ "type": "third-party" }) + rfc = WgRfcFactory() + url = urlreverse("ietf.ipr.views.new", kwargs={ "_type": "third-party" }) # successful post empty_outbox() @@ -387,9 +457,9 @@ def test_new_thirdparty(self): "ietfer_contact_info": "555-555-0101", "iprdocrel_set-TOTAL_FORMS": 2, "iprdocrel_set-INITIAL_FORMS": 0, - "iprdocrel_set-0-document": draft.docalias.first().pk, + "iprdocrel_set-0-document": draft.pk, "iprdocrel_set-0-revisions": '00', - "iprdocrel_set-1-document": DocAlias.objects.filter(name__startswith="rfc").first().pk, + "iprdocrel_set-1-document": rfc.pk, "patent_number": "SE12345678901", "patent_inventor": "A. Nonymous", "patent_title": "A method of transferring bits", @@ -423,7 +493,7 @@ def test_edit(self): r = self.client.get(url) self.assertContains(r, original_ipr.holder_legal_name) - #url = urlreverse("ietf.ipr.views.new", kwargs={ "type": "specific" }) + #url = urlreverse("ietf.ipr.views.new", kwargs={ "_type": "specific" }) # successful post empty_outbox() post_data = { @@ -434,7 +504,7 @@ def test_edit(self): "holder_legal_name": "Test Legal", "ietfer_contact_info": "555-555-0101", "ietfer_name": "Test Participant", - "iprdocrel_set-0-document": draft.docalias.first().pk, + "iprdocrel_set-0-document": draft.pk, "iprdocrel_set-0-revisions": '00', "iprdocrel_set-INITIAL_FORMS": 0, "iprdocrel_set-TOTAL_FORMS": 1, @@ -462,7 +532,7 @@ def test_edit(self): def test_update(self): draft = WgDraftFactory() - WgRfcFactory() + rfc = WgRfcFactory() original_ipr = HolderIprDisclosureFactory(docs=[draft,]) # get @@ -470,7 +540,7 @@ def test_update(self): r = self.client.get(url) self.assertContains(r, original_ipr.title) - #url = urlreverse("ietf.ipr.views.new", kwargs={ "type": "specific" }) + #url = urlreverse("ietf.ipr.views.new", kwargs={ "_type": "specific" }) # successful post empty_outbox() r = self.client.post(url, { @@ -483,9 +553,9 @@ def test_update(self): "ietfer_contact_info": "555-555-0101", "iprdocrel_set-TOTAL_FORMS": 2, "iprdocrel_set-INITIAL_FORMS": 0, - "iprdocrel_set-0-document": draft.docalias.first().pk, + "iprdocrel_set-0-document": draft.pk, "iprdocrel_set-0-revisions": '00', - "iprdocrel_set-1-document": DocAlias.objects.filter(name__startswith="rfc").first().pk, + "iprdocrel_set-1-document": rfc.pk, "patent_number": "SE12345678901", "patent_inventor": "A. Nonymous", "patent_title": "A method of transferring bits", @@ -510,7 +580,7 @@ def test_update(self): def test_update_bad_post(self): draft = WgDraftFactory() - url = urlreverse("ietf.ipr.views.new", kwargs={ "type": "specific" }) + url = urlreverse("ietf.ipr.views.new", kwargs={ "_type": "specific" }) empty_outbox() r = self.client.post(url, { @@ -520,7 +590,7 @@ def test_update_bad_post(self): "holder_contact_email": "test@holder.com", "iprdocrel_set-TOTAL_FORMS": 1, "iprdocrel_set-INITIAL_FORMS": 0, - "iprdocrel_set-0-document": draft.docalias.first().pk, + "iprdocrel_set-0-document": draft.pk, "iprdocrel_set-0-revisions": '00', "patent_number": "SE12345678901", "patent_inventor": "A. Nonymous", @@ -679,8 +749,8 @@ def test_notify_generic(self): ) self.assertIn(f'{settings.IDTRACKER_BASE_URL}{urlreverse("ietf.ipr.views.showlist")}', get_payload_text(outbox[1]).replace('\n',' ')) - def send_ipr_email_helper(self): - ipr = HolderIprDisclosureFactory() + def send_ipr_email_helper(self) -> tuple[str, IprEvent, HolderIprDisclosure]: + ipr = HolderIprDisclosureFactory.create() # call create() explicitly so mypy sees correct type url = urlreverse('ietf.ipr.views.email',kwargs={ "id": ipr.id }) self.client.login(username="secretary", password="secretary+password") yesterday = date_today() - datetime.timedelta(1) @@ -697,10 +767,11 @@ def send_ipr_email_helper(self): q = Message.objects.filter(reply_to=data['reply_to']) self.assertEqual(q.count(),1) event = q[0].msgevents.first() + assert event is not None self.assertTrue(event.response_past_due()) self.assertEqual(len(outbox), 1) self.assertTrue('joe@test.com' in outbox[0]['To']) - return data['reply_to'], event + return data['reply_to'], event, ipr uninteresting_ipr_message_strings = [ ("To: {to}\nCc: {cc}\nFrom: joe@test.com\nDate: {date}\nSubject: test\n"), @@ -714,34 +785,46 @@ def send_ipr_email_helper(self): def test_process_response_email(self): # first send a mail - reply_to, event = self.send_ipr_email_helper() + reply_to, event, _ = self.send_ipr_email_helper() # test process response uninteresting messages addrs = gather_address_lists('ipr_disclosure_submitted').as_strings() for message_string in self.uninteresting_ipr_message_strings: - result = process_response_email( + process_response_email( message_string.format( to=addrs.to, cc=addrs.cc, date=timezone.now().ctime() ) ) - self.assertIsNone(result) - + # test process response message_string = """To: {} From: joe@test.com Date: {} Subject: test """.format(reply_to, timezone.now().ctime()) - result = process_response_email(message_string) - - self.assertIsInstance(result, Message) + process_response_email(message_string) self.assertFalse(event.response_past_due()) + # test with an unmatchable message identifier + bad_reply_to = re.sub( + r"\+.{16}@", + '+0123456789abcdef@', + reply_to, + ) + self.assertNotEqual(reply_to, bad_reply_to) + message_string = f"""To: {bad_reply_to} + From: joe@test.com + Date: {timezone.now().ctime()} + Subject: test + """ + with self.assertRaises(UndeliverableIprResponseError): + process_response_email(message_string) + def test_process_response_email_with_invalid_encoding(self): """Interesting emails with invalid encoding should be handled""" - reply_to, _ = self.send_ipr_email_helper() + reply_to, _, disclosure = self.send_ipr_email_helper() # test process response message_string = """To: {} From: joe@test.com @@ -749,8 +832,8 @@ def test_process_response_email_with_invalid_encoding(self): Subject: test """.format(reply_to, timezone.now().ctime()) message_bytes = message_string.encode('utf8') + b'\nInvalid stuff: \xfe\xff\n' - result = process_response_email(message_bytes) - self.assertIsInstance(result, Message) + process_response_email(message_bytes) + result = IprEvent.objects.filter(disclosure=disclosure).first().message # newest # \ufffd is a rhombus character with an inverse ?, used to replace invalid characters self.assertEqual(result.body, 'Invalid stuff: \ufffd\ufffd\n\n', # not sure where the extra \n is from 'Invalid characters should be replaced with \ufffd characters') @@ -765,8 +848,45 @@ def test_process_response_email_uninteresting_with_invalid_encoding(self): cc=addrs.cc, date=timezone.now().ctime(), ).encode('utf8') + b'\nInvalid stuff: \xfe\xff\n' - result = process_response_email(message_bytes) - self.assertIsNone(result) + process_response_email(message_bytes) + + @override_settings(ADMINS=(("Some Admin", "admin@example.com"),)) + @mock.patch("ietf.ipr.utils.process_response_email") + def test_ingest_response_email(self, mock_process_response_email): + message = b"What a nice message" + mock_process_response_email.side_effect = ValueError("ouch!") + with self.assertRaises(EmailIngestionError) as context: + ingest_response_email(message) + self.assertIsNone(context.exception.email_recipients) # default recipients + self.assertIsNotNone(context.exception.email_body) # body set + self.assertIsNotNone(context.exception.email_original_message) # original message attached + self.assertEqual(context.exception.email_attach_traceback, True) + self.assertTrue(mock_process_response_email.called) + self.assertEqual(mock_process_response_email.call_args, mock.call(message)) + mock_process_response_email.reset_mock() + + mock_process_response_email.side_effect = UndeliverableIprResponseError + mock_process_response_email.return_value = None + with self.assertRaises(EmailIngestionError) as context: + ingest_response_email(message) + self.assertIsNone(context.exception.as_emailmessage()) # should not send an email on a clean rejection + self.assertTrue(mock_process_response_email.called) + self.assertEqual(mock_process_response_email.call_args, mock.call(message)) + mock_process_response_email.reset_mock() + + mock_process_response_email.side_effect = None + mock_process_response_email.return_value = None # ignored message + ingest_response_email(message) # should not raise an exception + self.assertIsNone(context.exception.as_emailmessage()) # should not send an email on ignored message + self.assertTrue(mock_process_response_email.called) + self.assertEqual(mock_process_response_email.call_args, mock.call(message)) + mock_process_response_email.reset_mock() + + # successful operation + mock_process_response_email.return_value = MessageFactory() + ingest_response_email(message) + self.assertTrue(mock_process_response_email.called) + self.assertEqual(mock_process_response_email.call_args, mock.call(message)) def test_ajax_search(self): url = urlreverse('ietf.ipr.views.ajax_search') @@ -786,7 +906,7 @@ def test_edit_using_factory(self): 'iprdocrel_set-INITIAL_FORMS' : 0, 'iprdocrel_set-0-id': '', "iprdocrel_set-0-document": disclosure.docs.first().pk, - "iprdocrel_set-0-revisions": disclosure.docs.first().document.rev, + "iprdocrel_set-0-revisions": disclosure.docs.first().rev, 'holder_legal_name': disclosure.holder_legal_name, 'patent_number': patent_dict['Number'], 'patent_title': patent_dict['Title'], @@ -848,7 +968,7 @@ def test_no_revisions_message(self): NewRevisionDocEventFactory(doc=draft, rev=f"{rev:02d}", time=now-datetime.timedelta(days=30*(2-rev))) # Disclosure has non-empty revisions field on its related draft - iprdocrel = IprDocRelFactory(document=draft.docalias.first()) + iprdocrel = IprDocRelFactory(document=draft) IprEventFactory(type_id="posted",time=now,disclosure=iprdocrel.disclosure) self.assertEqual( no_revisions_message(iprdocrel), @@ -856,7 +976,7 @@ def test_no_revisions_message(self): ) # Disclosure has more than one revision, none called out, disclosure after submissions - iprdocrel = IprDocRelFactory(document=draft.docalias.first(), revisions="") + iprdocrel = IprDocRelFactory(document=draft, revisions="") IprEventFactory(type_id="posted",time=now,disclosure=iprdocrel.disclosure) self.assertEqual( no_revisions_message(iprdocrel), @@ -864,7 +984,7 @@ def test_no_revisions_message(self): ) # Disclosure has more than one revision, none called out, disclosure after 01 - iprdocrel = IprDocRelFactory(document=draft.docalias.first(), revisions="") + iprdocrel = IprDocRelFactory(document=draft, revisions="") e = IprEventFactory(type_id="posted",disclosure=iprdocrel.disclosure) e.time = now-datetime.timedelta(days=15) e.save() @@ -874,7 +994,7 @@ def test_no_revisions_message(self): ) # Disclosure has more than one revision, none called out, disclosure was before the 00 - iprdocrel = IprDocRelFactory(document=draft.docalias.first(), revisions="") + iprdocrel = IprDocRelFactory(document=draft, revisions="") e = IprEventFactory(type_id="posted",disclosure=iprdocrel.disclosure) e.time = now-datetime.timedelta(days=180) e.save() @@ -886,7 +1006,7 @@ def test_no_revisions_message(self): # disclosed draft has no NewRevisionDocEvents draft = WgDraftFactory(rev="20") draft.docevent_set.all().delete() - iprdocrel = IprDocRelFactory(document=draft.docalias.first(), revisions="") + iprdocrel = IprDocRelFactory(document=draft, revisions="") IprEventFactory(type_id="posted",disclosure=iprdocrel.disclosure) self.assertEqual( no_revisions_message(iprdocrel), @@ -895,9 +1015,178 @@ def test_no_revisions_message(self): # disclosed draft has only one revision draft = WgDraftFactory(rev="00") - iprdocrel = IprDocRelFactory(document=draft.docalias.first(), revisions="") + iprdocrel = IprDocRelFactory(document=draft, revisions="") IprEventFactory(type_id="posted",disclosure=iprdocrel.disclosure) self.assertEqual( no_revisions_message(iprdocrel), "No revisions for this Internet-Draft were specified in this disclosure. However, there is only one revision of this Internet-Draft." ) + + +class DraftFormTests(TestCase): + def setUp(self): + super().setUp() + self.disclosure = IprDisclosureBaseFactory() + self.draft = WgDraftFactory.create_batch(10)[-1] + self.rfc = RfcFactory() + + def test_revisions_valid(self): + post_data = { + # n.b., "document" is a SearchableDocumentField, which is a multiple choice field limited + # to a single choice. Its value must be an array of pks with one element. + "document": [str(self.draft.pk)], + "disclosure": str(self.disclosure.pk), + } + # The revisions field is just a char field that allows descriptions of the applicable + # document revisions. It's usually just a rev or "00-02", but the form allows anything + # not empty. The secretariat will review the value before the disclosure is posted so + # minimal validation is ok here. + self.assertTrue(DraftForm(post_data | {"revisions": "00"}).is_valid()) + self.assertTrue(DraftForm(post_data | {"revisions": "00-02"}).is_valid()) + self.assertTrue(DraftForm(post_data | {"revisions": "01,03, 05"}).is_valid()) + self.assertTrue(DraftForm(post_data | {"revisions": "all but 01"}).is_valid()) + # RFC instead of draft - allow empty / missing revisions + post_data["document"] = [str(self.rfc.pk)] + self.assertTrue(DraftForm(post_data).is_valid()) + self.assertTrue(DraftForm(post_data | {"revisions": ""}).is_valid()) + + def test_revisions_invalid(self): + missing_rev_error_msg = ( + "Revisions of this Internet-Draft for which this disclosure is relevant must be specified." + ) + null_char_error_msg = "Null characters are not allowed." + + post_data = { + # n.b., "document" is a SearchableDocumentField, which is a multiple choice field limited + # to a single choice. Its value must be an array of pks with one element. + "document": [str(self.draft.pk)], + "disclosure": str(self.disclosure.pk), + } + self.assertFormError( + DraftForm(post_data), "revisions", missing_rev_error_msg + ) + self.assertFormError( + DraftForm(post_data | {"revisions": ""}), "revisions", missing_rev_error_msg + ) + self.assertFormError( + DraftForm(post_data | {"revisions": "1\x00"}), + "revisions", + [null_char_error_msg, missing_rev_error_msg], + ) + # RFC instead of draft still validates the revisions field + self.assertFormError( + DraftForm(post_data | {"document": [str(self.rfc.pk)], "revisions": "1\x00"}), + "revisions", + null_char_error_msg, + ) + + +class HolderIprDisclosureFormTests(TestCase): + def setUp(self): + super().setUp() + # Checkboxes that are False are left out of the Form data, not sent back at all. These are + # commented out - if they were checked, their value would be "on". + self.data = { + "holder_legal_name": "Test Legal", + "holder_contact_name": "Test Holder", + "holder_contact_email": "test@holder.com", + "holder_contact_info": "555-555-0100", + "ietfer_name": "Test Participant", + "ietfer_contact_info": "555-555-0101", + "iprdocrel_set-TOTAL_FORMS": 2, + "iprdocrel_set-INITIAL_FORMS": 0, + "iprdocrel_set-0-document": "1234", # fake id - validates but won't save() + "iprdocrel_set-0-revisions": '00', + "iprdocrel_set-1-document": "4567", # fake id - validates but won't save() + # "is_blanket_disclosure": "on", + "patent_number": "SE12345678901", + "patent_inventor": "A. Nonymous", + "patent_title": "A method of transferring bits", + "patent_date": "2000-01-01", + # "has_patent_pending": "on", + "licensing": "reasonable", + "submitter_name": "Test Holder", + "submitter_email": "test@holder.com", + } + + def test_blanket_disclosure_licensing_restrictions(self): + """when is_blanket_disclosure is True only royalty-free licensing is valid + + Most of the form functionality is tested via the views in IprTests above. More thorough testing + of validation ought to move here so we don't have to exercise the whole Django plumbing repeatedly. + """ + self.assertTrue(HolderIprDisclosureForm(data=self.data).is_valid()) + self.data["is_blanket_disclosure"] = "on" + self.assertFalse(HolderIprDisclosureForm(data=self.data).is_valid()) + self.data["licensing"] = "royalty-free" + self.assertTrue(HolderIprDisclosureForm(data=self.data).is_valid()) + + def test_patent_details_required_unless_blanket(self): + self.assertTrue(HolderIprDisclosureForm(data=self.data).is_valid()) + patent_fields = ["patent_number", "patent_inventor", "patent_title", "patent_date"] + # any of the fields being missing should invalidate the form + for pf in patent_fields: + val = self.data.pop(pf) + self.assertFalse(HolderIprDisclosureForm(data=self.data).is_valid()) + self.data[pf] = val + + # should be optional if is_blanket_disclosure is True + self.data["is_blanket_disclosure"] = "on" + self.data["licensing"] = "royalty-free" # also needed for a blanket disclosure + for pf in patent_fields: + val = self.data.pop(pf) + self.assertTrue(HolderIprDisclosureForm(data=self.data).is_valid()) + self.data[pf] = val + +class JsonSnapshotTests(TestCase): + def test_json_snapshot(self): + h = HolderIprDisclosureFactory() + url = urlreverse("ietf.ipr.views.json_snapshot", kwargs=dict(id=h.id)) + login_testing_unauthorized(self, "secretary", url) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + dump = json.loads(r.content) + self.assertCountEqual( + [o["model"] for o in dump], + ["ipr.holderiprdisclosure", "ipr.iprdisclosurebase", "person.person"], + ) + h.docs.add(WgRfcFactory()) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + dump = json.loads(r.content) + self.assertCountEqual( + [o["model"] for o in dump], + [ + "ipr.holderiprdisclosure", + "ipr.iprdisclosurebase", + "ipr.iprdocrel", + "person.person", + ], + ) + IprEventFactory( + disclosure=h, + message=MessageFactory(by=PersonFactory()), + in_reply_to=MessageFactory(), + ) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + dump = json.loads(r.content) + self.assertCountEqual( + [o["model"] for o in dump], + [ + "ipr.holderiprdisclosure", + "ipr.iprdisclosurebase", + "ipr.iprdocrel", + "ipr.iprevent", + "message.message", + "message.message", + "person.person", + "person.person", + "person.person", + "person.person", + ], + ) + no_such_ipr_id = IprDisclosureBase.objects.aggregate(Max("id"))["id__max"] + 1 + url = urlreverse("ietf.ipr.views.json_snapshot", kwargs=dict(id=no_such_ipr_id)) + r = self.client.get(url) + self.assertEqual(r.status_code, 404) diff --git a/ietf/ipr/urls.py b/ietf/ipr/urls.py index 6f7b2d4080..2c8a26c624 100644 --- a/ietf/ipr/urls.py +++ b/ietf/ipr/urls.py @@ -12,8 +12,6 @@ url(r'^admin/$', RedirectView.as_view(url=reverse_lazy('ietf.ipr.views.admin',kwargs={'state':'pending'}), permanent=True)), url(r'^admin/(?Ppending|removed|parked)/$', views.admin), url(r'^ajax/search/$', views.ajax_search), - url(r'^by-draft/$', views.by_draft_txt), - url(r'^by-draft-recursive/$', views.by_draft_recursive_txt), url(r'^(?P\d+)/$', views.show), url(r'^(?P\d+)/addcomment/$', views.add_comment), url(r'^(?P\d+)/addemail/$', views.add_email), @@ -23,8 +21,9 @@ url(r'^(?P\d+)/notify/(?Pupdate|posted)/$', views.notify), url(r'^(?P\d+)/post/$', views.post), url(r'^(?P\d+)/state/$', views.state), + url(r'^(?P\d+)/json-snapshot/$', views.json_snapshot), url(r'^update/$', RedirectView.as_view(url=reverse_lazy('ietf.ipr.views.showlist'), permanent=True)), url(r'^update/(?P\d+)/$', views.update), - url(r'^new-(?P(specific|generic|general|third-party))/$', views.new), + url(r'^new-(?P<_type>(specific|generic|general|third-party))/$', views.new), url(r'^search/$', views.search), ] diff --git a/ietf/ipr/utils.py b/ietf/ipr/utils.py index f288803de2..bcbb052260 100644 --- a/ietf/ipr/utils.py +++ b/ietf/ipr/utils.py @@ -1,9 +1,16 @@ -# Copyright The IETF Trust 2014-2020, All Rights Reserved +# Copyright The IETF Trust 2014-2025, All Rights Reserved # -*- coding: utf-8 -*- -from ietf.ipr.models import IprDocRel +import json +import debug # pyflakes:ignore + +from textwrap import dedent + +from django.core import serializers -import debug # pyflakes:ignore +from ietf.ipr.mail import process_response_email, UndeliverableIprResponseError + +from ietf.ipr.models import IprDocRel def get_genitive(name): """Return the genitive form of name""" @@ -32,60 +39,69 @@ def get_ipr_summary(disclosure): return summary if len(summary) <= 128 else summary[:125]+'...' -def iprs_from_docs(aliases,**kwargs): - """Returns a list of IPRs related to doc aliases""" +def iprs_from_docs(docs,**kwargs): + """Returns a list of IPRs related to docs""" iprdocrels = [] - for alias in aliases: - for document in alias.docs.all(): - if document.ipr(**kwargs): - iprdocrels += document.ipr(**kwargs) + for document in docs: + if document.ipr(**kwargs): + iprdocrels += document.ipr(**kwargs) return list(set([i.disclosure for i in iprdocrels])) -def related_docs(alias, relationship=('replaces', 'obs')): +def related_docs(doc, relationship=('replaces', 'obs'), reverse_relationship=("became_rfc",)): """Returns list of related documents""" - results = [] - for doc in alias.docs.all(): - results += list(doc.docalias.all()) - - rels = [] - for doc in alias.docs.all(): - rels += list(doc.all_relations_that_doc(relationship)) - - for rel in rels: - rel_aliases = list(rel.target.document.docalias.all()) - - for x in rel_aliases: - x.related = rel - x.relation = rel.relationship.revname - results += rel_aliases - - return list(set(results)) + results = [doc] + rels = doc.all_relations_that_doc(relationship) -def generate_draft_recursive_txt(): - docipr = {} + for rel in rels: + rel.target.related = rel + rel.target.relation = rel.relationship.revname + results += [x.target for x in rels] - for o in IprDocRel.objects.filter(disclosure__state='posted').select_related('document'): - alias = o.document - name = alias.name - for document in alias.docs.all(): - related = set(document.docalias.all()) | set(document.all_related_that_doc(('obs', 'replaces'))) - for alias in related: - name = alias.name - if name.startswith("rfc"): - name = name.upper() - if not name in docipr: - docipr[name] = [] - docipr[name].append(o.disclosure_id) + rev_rels = doc.all_relations_that(reverse_relationship) + for rel in rev_rels: + rel.source.related = rel + rel.source.relation = rel.relationship.name + results += [x.source for x in rev_rels] - lines = [ "# Machine-readable list of IPR disclosures by Internet-Draft name" ] - for name, iprs in docipr.items(): - lines.append(name + "\t" + "\t".join(str(ipr_id) for ipr_id in sorted(iprs))) + return list(set(results)) - data = '\n'.join(lines) - filename = '/a/ietfdata/derived/ipr_draft_recursive.txt' - with open(filename, 'w') as f: - f.write(data) +def ingest_response_email(message: bytes): + from ietf.api.views import EmailIngestionError # avoid circular import + try: + process_response_email(message) + except UndeliverableIprResponseError: + # Message was rejected due to some problem the sender can fix, so bounce but don't send + # an email to the admins + raise EmailIngestionError("IPR response rejected", email_body=None) + except Exception as err: + # Message was rejected due to an unhandled exception. This is likely something + # the admins need to address, so send them a copy of the email. + raise EmailIngestionError( + "Datatracker IPR email ingestion error", + email_body=dedent("""\ + An error occurred while ingesting IPR email into the Datatracker. The original message is attached. + + {error_summary} + """), + email_original_message=message, + email_attach_traceback=True, + ) from err + +def json_dump_disclosure(disclosure): + objs = set() + objs.add(disclosure) + objs.add(disclosure.iprdisclosurebase_ptr) + objs.add(disclosure.by) + objs.update(IprDocRel.objects.filter(disclosure=disclosure)) + objs.update(disclosure.iprevent_set.all()) + objs.update([i.by for i in disclosure.iprevent_set.all()]) + objs.update([i.message for i in disclosure.iprevent_set.all() if i.message ]) + objs.update([i.message.by for i in disclosure.iprevent_set.all() if i.message ]) + objs.update([i.in_reply_to for i in disclosure.iprevent_set.all() if i.in_reply_to ]) + objs.update([i.in_reply_to.by for i in disclosure.iprevent_set.all() if i.in_reply_to ]) + objs = sorted(list(objs),key=lambda o:o.__class__.__name__) + return json.dumps(json.loads(serializers.serialize("json",objs)),indent=4) diff --git a/ietf/ipr/views.py b/ietf/ipr/views.py index e2ddb3bcc3..0a43ff2c27 100644 --- a/ietf/ipr/views.py +++ b/ietf/ipr/views.py @@ -18,7 +18,7 @@ import debug # pyflakes:ignore -from ietf.doc.models import DocAlias +from ietf.doc.models import Document from ietf.group.models import Role, Group from ietf.ietfauth.utils import role_required, has_role from ietf.ipr.mail import (message_from_message, get_reply_to, get_update_submitter_emails) @@ -28,17 +28,17 @@ AddCommentForm, AddEmailForm, NotifyForm, StateForm, NonDocSpecificIprDisclosureForm, GenericIprDisclosureForm) from ietf.ipr.models import (IprDisclosureStateName, IprDisclosureBase, - HolderIprDisclosure, GenericIprDisclosure, ThirdPartyIprDisclosure, + HolderIprDisclosure, GenericIprDisclosure, RemovedIprDisclosure, ThirdPartyIprDisclosure, NonDocSpecificIprDisclosure, IprDocRel, RelatedIpr,IprEvent) from ietf.ipr.utils import (get_genitive, get_ipr_summary, - iprs_from_docs, related_docs) + iprs_from_docs, json_dump_disclosure, related_docs) from ietf.mailtrigger.utils import gather_address_lists from ietf.message.models import Message from ietf.message.utils import infer_message from ietf.name.models import IprLicenseTypeName from ietf.person.models import Person -from ietf.secr.utils.document import get_rfc_num, is_draft +from ietf.utils import log from ietf.utils.draft_search import normalize_draftname from ietf.utils.mail import send_mail, send_mail_message from ietf.utils.response import permission_denied @@ -69,16 +69,20 @@ def get_document_emails(ipr): has been posted""" messages = [] for rel in ipr.iprdocrel_set.all(): - doc = rel.document.document + doc = rel.document - if is_draft(doc): + if doc.type_id=="draft": doc_info = 'Internet-Draft entitled "{}" ({})'.format(doc.title,doc.name) + elif doc.type_id=="rfc": + doc_info = 'RFC entitled "{}" (RFC{})'.format(doc.title, doc.rfc_number) else: - doc_info = 'RFC entitled "{}" (RFC{})'.format(doc.title,get_rfc_num(doc)) + log.unreachable("2023-08-15") + return "" addrs = gather_address_lists('ipr_posted_on_doc',doc=doc).as_strings(compact=False) - author_names = ', '.join(a.person.name for a in doc.documentauthor_set.select_related("person")) + # Get a list of author names for the salutation in the body of the email + author_names = ', '.join(doc.author_names()) context = dict( settings=settings, @@ -149,13 +153,13 @@ def ipr_rfc_number(disclosureDate, thirdPartyDisclosureFlag): # RFC publication date comes from the RFC Editor announcement ipr_rfc_pub_datetime = { - 1310 : datetime.datetime(1992, 3, 13, 0, 0, tzinfo=datetime.timezone.utc), - 1802 : datetime.datetime(1994, 3, 23, 0, 0, tzinfo=datetime.timezone.utc), - 2026 : datetime.datetime(1996, 10, 29, 0, 0, tzinfo=datetime.timezone.utc), - 3668 : datetime.datetime(2004, 2, 18, 0, 0, tzinfo=datetime.timezone.utc), - 3979 : datetime.datetime(2005, 3, 2, 2, 23, tzinfo=datetime.timezone.utc), - 4879 : datetime.datetime(2007, 4, 10, 18, 21, tzinfo=datetime.timezone.utc), - 8179 : datetime.datetime(2017, 5, 31, 23, 1, tzinfo=datetime.timezone.utc), + 1310 : datetime.datetime(1992, 3, 13, 0, 0, tzinfo=datetime.UTC), + 1802 : datetime.datetime(1994, 3, 23, 0, 0, tzinfo=datetime.UTC), + 2026 : datetime.datetime(1996, 10, 29, 0, 0, tzinfo=datetime.UTC), + 3668 : datetime.datetime(2004, 2, 18, 0, 0, tzinfo=datetime.UTC), + 3979 : datetime.datetime(2005, 3, 2, 2, 23, tzinfo=datetime.UTC), + 4879 : datetime.datetime(2007, 4, 10, 18, 21, tzinfo=datetime.UTC), + 8179 : datetime.datetime(2017, 5, 31, 23, 1, tzinfo=datetime.UTC), } if disclosureDate < ipr_rfc_pub_datetime[1310]: @@ -442,58 +446,35 @@ def history(request, id): 'selected_tab_entry':'history' }) -def by_draft_txt(request): - docipr = {} - for o in IprDocRel.objects.filter(disclosure__state='posted').select_related('document'): - name = o.document.name - if name.startswith("rfc"): - name = name.upper() - - if not name in docipr: - docipr[name] = [] - - docipr[name].append(o.disclosure_id) - - lines = [ "# Machine-readable list of IPR disclosures by draft name" ] - for name, iprs in docipr.items(): - lines.append(name + "\t" + "\t".join(str(ipr_id) for ipr_id in sorted(iprs))) - - return HttpResponse("\n".join(lines), content_type="text/plain; charset=%s"%settings.DEFAULT_CHARSET) - -def by_draft_recursive_txt(request): - """Returns machine-readable list of IPR disclosures by draft name, recursive. - NOTE: this view is expensive and should be removed _after_ tools.ietf.org is retired, - including util function and management commands that generate the content for - this view.""" - - with open('/a/ietfdata/derived/ipr_draft_recursive.txt') as f: - content = f.read() - return HttpResponse(content, content_type="text/plain; charset=%s"%settings.DEFAULT_CHARSET) - - -def new(request, type, updates=None): +def new(request, _type, updates=None): """Submit a new IPR Disclosure. If the updates field != None, this disclosure updates one or more other disclosures.""" # Note that URL patterns won't ever send updates - updates is only non-null when called from code # This odd construct flipping generic and general allows the URLs to say 'general' while having a minimal impact on the code. # A cleanup to change the code to switch on type 'general' should follow. - if type == 'generic' and updates: # Only happens when called directly from the updates view + if ( + _type == "generic" and updates + ): # Only happens when called directly from the updates view pass - elif type == 'generic': - return HttpResponseRedirect(urlreverse('ietf.ipr.views.new',kwargs=dict(type='general'))) - elif type == 'general': - type = 'generic' + elif _type == "generic": + return HttpResponseRedirect( + urlreverse("ietf.ipr.views.new", kwargs=dict(_type="general")) + ) + elif _type == "general": + _type = "generic" else: pass # 1 to show initially + the template - DraftFormset = inlineformset_factory(IprDisclosureBase, IprDocRel, form=DraftForm, can_delete=False, extra=1 + 1) + DraftFormset = inlineformset_factory( + IprDisclosureBase, IprDocRel, form=DraftForm, can_delete=False, extra=1 + 1 + ) - if request.method == 'POST': - form = ipr_form_mapping[type](request.POST) - if type != 'generic': + if request.method == "POST": + form = ipr_form_mapping[_type](request.POST) + if _type != "generic": draft_formset = DraftFormset(request.POST, instance=IprDisclosureBase()) else: draft_formset = None @@ -502,72 +483,92 @@ def new(request, type, updates=None): person = Person.objects.get(name="(System)") else: person = request.user.person - + # check formset validity - if type != 'generic': + if _type != "generic": valid_formsets = draft_formset.is_valid() else: valid_formsets = True - + if form.is_valid() and valid_formsets: - if 'updates' in form.cleaned_data: - updates = form.cleaned_data['updates'] - del form.cleaned_data['updates'] + if "updates" in form.cleaned_data: + updates = form.cleaned_data["updates"] + del form.cleaned_data["updates"] disclosure = form.save(commit=False) disclosure.by = person - disclosure.state = IprDisclosureStateName.objects.get(slug='pending') + disclosure.state = IprDisclosureStateName.objects.get(slug="pending") disclosure.save() - - if type != 'generic': + + if _type != "generic": draft_formset = DraftFormset(request.POST, instance=disclosure) draft_formset.save() set_disclosure_title(disclosure) disclosure.save() - + if updates: for ipr in updates: - RelatedIpr.objects.create(source=disclosure,target=ipr,relationship_id='updates') - + RelatedIpr.objects.create( + source=disclosure, target=ipr, relationship_id="updates" + ) + # create IprEvent IprEvent.objects.create( - type_id='submitted', + type_id="submitted", by=person, disclosure=disclosure, - desc="Disclosure Submitted") + desc="Disclosure Submitted", + ) # send email notification - (to, cc) = gather_address_lists('ipr_disclosure_submitted') - send_mail(request, to, ('IPR Submitter App', 'ietf-ipr@ietf.org'), - 'New IPR Submission Notification', + (to, cc) = gather_address_lists("ipr_disclosure_submitted") + send_mail( + request, + to, + ("IPR Submitter App", "ietf-ipr@ietf.org"), + "New IPR Submission Notification", "ipr/new_update_email.txt", - {"ipr": disclosure,}, - cc=cc) - + { + "ipr": disclosure, + }, + cc=cc, + ) + return render(request, "ipr/submitted.html") else: if updates: original = IprDisclosureBase(id=updates).get_child() initial = model_to_dict(original) - initial.update({'updates':str(updates), }) - patent_info = text_to_dict(initial.get('patent_info', '')) + initial.update( + { + "updates": str(updates), + } + ) + patent_info = text_to_dict(initial.get("patent_info", "")) if list(patent_info.keys()): - patent_dict = dict([ ('patent_'+k.lower(), v) for k,v in list(patent_info.items()) ]) + patent_dict = dict( + [("patent_" + k.lower(), v) for k, v in list(patent_info.items())] + ) else: - patent_dict = {'patent_notes': initial.get('patent_info', '')} + patent_dict = {"patent_notes": initial.get("patent_info", "")} initial.update(patent_dict) - form = ipr_form_mapping[type](initial=initial) + form = ipr_form_mapping[_type](initial=initial) else: - form = ipr_form_mapping[type]() - disclosure = IprDisclosureBase() # dummy disclosure for inlineformset + form = ipr_form_mapping[_type]() + disclosure = IprDisclosureBase() # dummy disclosure for inlineformset draft_formset = DraftFormset(instance=disclosure) - return render(request, "ipr/details_edit.html", { - 'form': form, - 'draft_formset':draft_formset, - 'type':type, - }) + return render( + request, + "ipr/details_edit.html", + { + "form": form, + "draft_formset": draft_formset, + "type": _type, + }, + ) + @role_required('Secretariat',) def notify(request, id, type): @@ -674,22 +675,53 @@ def search(request): doc = q if docid: - start = DocAlias.objects.filter(name__iexact=docid) - elif search_type == "draft": - q = normalize_draftname(q) - start = DocAlias.objects.filter(name__icontains=q, name__startswith="draft") - else: # search_type == "rfc" - start = DocAlias.objects.filter(name="rfc%s" % q.lstrip("0")) - + start = Document.objects.filter(name__iexact=docid) + else: + if search_type == "draft": + q = normalize_draftname(q) + start = Document.objects.filter(name__icontains=q, name__startswith="draft") + elif search_type == "rfc": + start = Document.objects.filter(name="rfc%s" % q.lstrip("0")) + # one match if len(start) == 1: first = start[0] - doc = first.document - docs = related_docs(first) - iprs = iprs_from_docs(docs,states=states) + doc = first + docs = set([first]) + docs.update( + related_docs( + first, relationship=("replaces", "obs"), reverse_relationship=() + ) + ) + docs.update( + set( + [ + draft + for drafts in [ + related_docs( + d, relationship=(), reverse_relationship=("became_rfc",) + ) + for d in docs + ] + for draft in drafts + ] + ) + ) + docs.discard(None) + docs = sorted( + docs, + key=lambda d: ( + d.rfc_number if d.rfc_number is not None else 0, + d.became_rfc().rfc_number if d.became_rfc() else 0, + ), + reverse=True, + ) + iprs = iprs_from_docs(docs, states=states) template = "ipr/search_doc_result.html" - updated_docs = related_docs(first, ('updates',)) - related_iprs = list(set(iprs_from_docs(updated_docs, states=states)) - set(iprs)) + updated_docs = related_docs(first, ("updates",)) + related_iprs = list( + set(iprs_from_docs(updated_docs, states=states)) - set(iprs) + ) # multiple matches, select just one elif start: docs = start @@ -716,27 +748,27 @@ def search(request): # Search by wg acronym # Document list with IPRs elif search_type == "group": - docs = list(DocAlias.objects.filter(docs__group=q)) + docs = list(Document.objects.filter(group=q)) related = [] for doc in docs: doc.product_of_this_wg = True related += related_docs(doc) iprs = iprs_from_docs(list(set(docs+related)),states=states) - docs = [ doc for doc in docs if doc.document.ipr() ] - docs = sorted(docs, key=lambda x: max([ipr.disclosure.time for ipr in x.document.ipr()]), reverse=True) + docs = [ doc for doc in docs if doc.ipr() ] + docs = sorted(docs, key=lambda x: max([ipr.disclosure.time for ipr in x.ipr()]), reverse=True) template = "ipr/search_wg_result.html" q = Group.objects.get(id=q).acronym # make acronym for use in template # Search by rfc and id title # Document list with IPRs elif search_type == "doctitle": - docs = list(DocAlias.objects.filter(docs__title__icontains=q)) + docs = list(Document.objects.filter(title__icontains=q)) related = [] for doc in docs: related += related_docs(doc) iprs = iprs_from_docs(list(set(docs+related)),states=states) - docs = [ doc for doc in docs if doc.document.ipr() ] - docs = sorted(docs, key=lambda x: max([ipr.disclosure.time for ipr in x.document.ipr()]), reverse=True) + docs = [ doc for doc in docs if doc.ipr() ] + docs = sorted(docs, key=lambda x: max([ipr.disclosure.time for ipr in x.ipr()]), reverse=True) template = "ipr/search_doctitle_result.html" # Search by title of IPR disclosure @@ -786,7 +818,14 @@ def get_details_tabs(ipr, selected): def show(request, id): """View of individual declaration""" - ipr = get_object_or_404(IprDisclosureBase, id=id).get_child() + ipr = IprDisclosureBase.objects.filter(id=id) + removed = RemovedIprDisclosure.objects.filter(removed_id=id) + if removed.exists(): + return render(request, "ipr/deleted.html", {"removed": removed.get(), "ipr": ipr}) + if not ipr.exists(): + raise Http404 + else: + ipr = ipr.get().get_child() if not has_role(request.user, 'Secretariat'): if ipr.state.slug in ['removed', 'removed_objfalse']: return render(request, "ipr/removed.html", { @@ -870,3 +909,8 @@ def update(request, id): child = ipr.get_child() type = class_to_type[child.__class__.__name__] return new(request, type, updates=id) + +@role_required("Secretariat") +def json_snapshot(request, id): + obj = get_object_or_404(IprDisclosureBase,id=id).get_child() + return HttpResponse(json_dump_disclosure(obj),content_type="application/json") diff --git a/ietf/liaisons/admin.py b/ietf/liaisons/admin.py index c7cb7a4dae..d873cce536 100644 --- a/ietf/liaisons/admin.py +++ b/ietf/liaisons/admin.py @@ -7,15 +7,16 @@ from ietf.liaisons.models import ( LiaisonStatement, LiaisonStatementEvent, RelatedLiaisonStatement, LiaisonStatementAttachment ) +from ietf.utils.admin import SaferTabularInline -class RelatedLiaisonStatementInline(admin.TabularInline): +class RelatedLiaisonStatementInline(SaferTabularInline): model = RelatedLiaisonStatement fk_name = 'source' raw_id_fields = ['target'] extra = 1 -class LiaisonStatementAttachmentInline(admin.TabularInline): +class LiaisonStatementAttachmentInline(SaferTabularInline): model = LiaisonStatementAttachment raw_id_fields = ['document'] extra = 1 @@ -24,7 +25,7 @@ class LiaisonStatementAdmin(admin.ModelAdmin): list_display = ['id', 'title', 'submitted', 'from_groups_short_display', 'purpose', 'related_to'] list_display_links = ['id', 'title'] ordering = ('title', ) - raw_id_fields = ('from_contact', 'attachments', 'from_groups', 'to_groups') + raw_id_fields = ('attachments', 'from_groups', 'to_groups') #filter_horizontal = ('from_groups', 'to_groups') inlines = [ RelatedLiaisonStatementInline, LiaisonStatementAttachmentInline ] @@ -50,4 +51,4 @@ class LiaisonStatementEventAdmin(admin.ModelAdmin): raw_id_fields = ["statement", "by"] admin.site.register(LiaisonStatement, LiaisonStatementAdmin) -admin.site.register(LiaisonStatementEvent, LiaisonStatementEventAdmin) \ No newline at end of file +admin.site.register(LiaisonStatementEvent, LiaisonStatementEventAdmin) diff --git a/ietf/liaisons/factories.py b/ietf/liaisons/factories.py index 6d93cf8cd2..ca588236e3 100644 --- a/ietf/liaisons/factories.py +++ b/ietf/liaisons/factories.py @@ -9,7 +9,7 @@ class Meta: skip_postgeneration_save = True title = factory.Faker('sentence') - from_contact = factory.SubFactory('ietf.person.factories.EmailFactory') + from_contact = factory.Faker('email') purpose_id = 'comment' body = factory.Faker('paragraph') state_id = 'posted' diff --git a/ietf/liaisons/forms.py b/ietf/liaisons/forms.py index 605c19902b..6ceda5ad38 100644 --- a/ietf/liaisons/forms.py +++ b/ietf/liaisons/forms.py @@ -3,38 +3,33 @@ import io -import os import operator - -from typing import Union # pyflakes:ignore - +import os from email.utils import parseaddr +from functools import reduce +from typing import Union, Optional # pyflakes:ignore from django import forms from django.conf import settings -from django.core.exceptions import ObjectDoesNotExist, ValidationError -from django.forms.utils import ErrorList -from django.db.models import Q -#from django.forms.widgets import RadioFieldRenderer +from django.core.exceptions import ValidationError from django.core.validators import validate_email +from django.db.models import Q, QuerySet +from django.forms.utils import ErrorList from django_stubs_ext import QuerySetAny -import debug # pyflakes:ignore - +from ietf.doc.models import Document +from ietf.group.models import Group from ietf.ietfauth.utils import has_role -from ietf.name.models import DocRelationshipName -from ietf.liaisons.utils import get_person_for_user,is_authorized_individual -from ietf.liaisons.widgets import ButtonWidget,ShowAttachmentsWidget -from ietf.liaisons.models import (LiaisonStatement, - LiaisonStatementEvent,LiaisonStatementAttachment,LiaisonStatementPurposeName) from ietf.liaisons.fields import SearchableLiaisonStatementsField -from ietf.group.models import Group -from ietf.person.models import Email -from ietf.person.fields import SearchableEmailField -from ietf.doc.models import Document, DocAlias -from ietf.utils.fields import DatepickerDateField +from ietf.liaisons.models import (LiaisonStatement, + LiaisonStatementEvent, LiaisonStatementAttachment, LiaisonStatementPurposeName) +from ietf.liaisons.utils import get_person_for_user, OUTGOING_LIAISON_ROLES, \ + INCOMING_LIAISON_ROLES +from ietf.liaisons.widgets import ButtonWidget, ShowAttachmentsWidget +from ietf.name.models import DocRelationshipName +from ietf.person.models import Person +from ietf.utils.fields import DatepickerDateField, ModelMultipleChoiceField from ietf.utils.timezone import date_today, datetime_from_date, DEADLINE_TZINFO -from functools import reduce ''' NOTES: @@ -51,45 +46,105 @@ def liaison_manager_sdos(person): return Group.objects.filter(type="sdo", state="active", role__person=person, role__name="liaiman").distinct() + def flatten_choices(choices): - '''Returns a flat choice list given one with option groups defined''' + """Returns a flat choice list given one with option groups defined + + n.b., Django allows mixing grouped options and top-level options. This helper only supports + the non-mixed case where every option is in an option group. + """ flat = [] - for optgroup,options in choices: + for optgroup, options in choices: flat.extend(options) return flat + + +def choices_from_group_queryset(groups: QuerySet[Group]): + """Get choices list for internal IETF groups user is authorized to select -def get_internal_choices(user): - '''Returns the set of internal IETF groups the user has permissions for, as a list - of choices suitable for use in a select widget. If user == None, all active internal - groups are included.''' + Returns a grouped list of choices suitable for use with a ChoiceField. If user is None, + includes all groups. + """ + main = [] + areas = [] + wgs = [] + for g in groups.distinct().order_by("acronym"): + if g.acronym in ("ietf", "iesg", "iab"): + main.append((g.pk, f"The {g.acronym.upper()}")) + elif g.type_id == "area": + areas.append((g.pk, f"{g.acronym} - {g.name}")) + elif g.type_id == "wg": + wgs.append((g.pk, f"{g.acronym} - {g.name}")) choices = [] - groups = get_groups_for_person(user.person if user else None) - main = [ (g.pk, 'The {}'.format(g.acronym.upper())) for g in groups.filter(acronym__in=('ietf','iesg','iab')) ] - areas = [ (g.pk, '{} - {}'.format(g.acronym,g.name)) for g in groups.filter(type='area') ] - wgs = [ (g.pk, '{} - {}'.format(g.acronym,g.name)) for g in groups.filter(type='wg') ] - choices.append(('Main IETF Entities', main)) - choices.append(('IETF Areas', areas)) - choices.append(('IETF Working Groups', wgs )) + if len(main) > 0: + choices.append(("Main IETF Entities", main)) + if len(areas) > 0: + choices.append(("IETF Areas", areas)) + if len(wgs) > 0: + choices.append(("IETF Working Groups", wgs)) return choices -def get_groups_for_person(person): - '''Returns queryset of internal Groups the person has interesting roles in. - This is a refactor of IETFHierarchyManager.get_entities_for_person(). If Person - is None or Secretariat or Liaison Manager all internal IETF groups are returned. - ''' - if person == None or has_role(person.user, "Secretariat") or has_role(person.user, "Liaison Manager"): - # collect all internal IETF groups - queries = [Q(acronym__in=('ietf','iesg','iab')), - Q(type='area',state='active'), - Q(type='wg',state='active')] + +def all_internal_groups(): + """Get a queryset of all IETF groups suitable for LS To/From assignment""" + return Group.objects.filter( + Q(acronym__in=("ietf", "iesg", "iab")) + | Q(type="area", state="active") + | Q(type="wg", state="active") + ).distinct() + + +def internal_groups_for_person(person: Optional[Person]): + """Get a queryset of IETF groups suitable for LS To/From assignment by person""" + if person is None: + return Group.objects.none() # no person = no roles + + if has_role( + person.user, + ( + "Secretariat", + "IETF Chair", + "IAB Chair", + "Liaison Manager", + "Liaison Coordinator", + "Authorized Individual", + ), + ): + return all_internal_groups() + # Interesting roles, as Group queries + queries = [ + Q(role__person=person, role__name="chair", acronym="ietf"), + Q(role__person=person, role__name="chair", acronym="iab"), + Q(role__person=person, role__name="ad", type="area", state="active"), + Q( + role__person=person, + role__name__in=("chair", "secretary"), + type="wg", + state="active", + ), + Q( + parent__role__person=person, + parent__role__name="ad", + type="wg", + state="active", + ), + ] + if has_role(person.user, "Area Director"): + queries.append(Q(acronym__in=("ietf", "iesg"))) # AD can also choose these + return Group.objects.filter(reduce(operator.or_, queries)).distinct() + + +def external_groups_for_person(person): + """Get a queryset of external groups suitable for LS To/From assignment by person""" + filter_expr = Q(pk__in=[]) # start with no groups + # These roles can add all external sdo groups + if has_role(person.user, set(INCOMING_LIAISON_ROLES + OUTGOING_LIAISON_ROLES) - {"Liaison Manager", "Authorized Individual"}): + filter_expr |= Q(type="sdo") else: - # Interesting roles, as Group queries - queries = [Q(role__person=person,role__name='chair',acronym='ietf'), - Q(role__person=person,role__name__in=('chair','execdir'),acronym='iab'), - Q(role__person=person,role__name='ad',type='area',state='active'), - Q(role__person=person,role__name__in=('chair','secretary'),type='wg',state='active'), - Q(parent__role__person=person,parent__role__name='ad',type='wg',state='active')] - return Group.objects.filter(reduce(operator.or_,queries)).order_by('acronym').distinct() + # The person cannot add all external sdo groups; add any for which they are Liaison Manager + filter_expr |= Q(type="sdo", role__person=person, role__name__in=["auth", "liaiman"]) + return Group.objects.filter(state="active").filter(filter_expr).distinct().order_by("name") + def liaison_form_factory(request, type=None, **kwargs): """Returns appropriate Liaison entry form""" @@ -154,7 +209,7 @@ def get_results(self): query = self.cleaned_data.get('text') if query: q = (Q(title__icontains=query) | - Q(from_contact__address__icontains=query) | + Q(from_contact__icontains=query) | Q(to_contacts__icontains=query) | Q(other_identifiers__icontains=query) | Q(body__icontains=query) | @@ -200,7 +255,7 @@ def get_results(self): return results -class CustomModelMultipleChoiceField(forms.ModelMultipleChoiceField): +class CustomModelMultipleChoiceField(ModelMultipleChoiceField): '''If value is a QuerySet, return it as is (for use in widget.render)''' def prepare_value(self, value): if isinstance(value, QuerySetAny): @@ -215,14 +270,9 @@ def prepare_value(self, value): class LiaisonModelForm(forms.ModelForm): '''Specify fields which require a custom widget or that are not part of the model. ''' - from_groups = forms.ModelMultipleChoiceField(queryset=Group.objects.all(),label='Groups',required=False) - from_groups.widget.attrs["class"] = "select2-field" - from_groups.widget.attrs['data-minimum-input-length'] = 0 - from_contact = forms.EmailField() # type: Union[forms.EmailField, SearchableEmailField] + from_groups = ModelMultipleChoiceField(queryset=Group.objects.all(),label='Groups',required=False) to_contacts = forms.CharField(label="Contacts", widget=forms.Textarea(attrs={'rows':'3', }), strip=False) - to_groups = forms.ModelMultipleChoiceField(queryset=Group.objects,label='Groups',required=False) - to_groups.widget.attrs["class"] = "select2-field" - to_groups.widget.attrs['data-minimum-input-length'] = 0 + to_groups = ModelMultipleChoiceField(queryset=Group.objects,label='Groups',required=False) deadline = DatepickerDateField(date_format="yyyy-mm-dd", picker_settings={"autoclose": "1" }, label='Deadline', required=True) related_to = SearchableLiaisonStatementsField(label='Related Liaison Statement', required=False) submitted_date = DatepickerDateField(date_format="yyyy-mm-dd", picker_settings={"autoclose": "1" }, label='Submission date', required=True, initial=lambda: date_today(DEADLINE_TZINFO)) @@ -245,13 +295,17 @@ def __init__(self, user, *args, **kwargs): self.person = get_person_for_user(user) self.is_new = not self.instance.pk + self.fields["from_groups"].widget.attrs["class"] = "select2-field" + self.fields["from_groups"].widget.attrs["data-minimum-input-length"] = 0 self.fields["from_groups"].widget.attrs["data-placeholder"] = "Type in name to search for group" + self.fields["to_groups"].widget.attrs["class"] = "select2-field" + self.fields["to_groups"].widget.attrs["data-minimum-input-length"] = 0 self.fields["to_groups"].widget.attrs["data-placeholder"] = "Type in name to search for group" self.fields["to_contacts"].label = 'Contacts' self.fields["other_identifiers"].widget.attrs["rows"] = 2 - + # add email validators - for field in ['from_contact','to_contacts','technical_contacts','action_holder_contacts','cc_contacts']: + for field in ['to_contacts','technical_contacts','action_holder_contacts','cc_contacts']: if field in self.fields: self.fields[field].validators.append(validate_emails) @@ -270,18 +324,6 @@ def clean_to_groups(self): raise forms.ValidationError('You must specify a To Group') return to_groups - def clean_from_contact(self): - contact = self.cleaned_data.get('from_contact') - from_groups = self.cleaned_data.get('from_groups') - try: - email = Email.objects.get(address=contact) - if not email.origin: - email.origin = "liaison: %s" % (','.join([ g.acronym for g in from_groups.all() ])) - email.save() - except ObjectDoesNotExist: - raise forms.ValidationError('Email address does not exist') - return email - # Note to future person: This is the wrong place to fix the new lines # in cc_contacts and to_contacts. Those belong in the save function. # Or at least somewhere other than here. @@ -375,12 +417,12 @@ def save_attachments(self): uploaded_filename = name + extension, ) ) - if created: - DocAlias.objects.create(name=attach.name).docs.add(attach) LiaisonStatementAttachment.objects.create(statement=self.instance,document=attach) attach_file = io.open(os.path.join(settings.LIAISON_ATTACH_PATH, attach.name + extension), 'wb') attach_file.write(attached_file.read()) attach_file.close() + attached_file.seek(0) + attach.store_file(attach.uploaded_filename, attached_file) if not self.is_new: # create modified event @@ -424,42 +466,39 @@ def set_to_fields(self): assert NotImplemented class IncomingLiaisonForm(LiaisonModelForm): - def clean(self): - if 'send' in list(self.data.keys()) and self.get_post_only(): - raise forms.ValidationError('As an IETF Liaison Manager you can not send incoming liaison statements, you only can post them') - return super(IncomingLiaisonForm, self).clean() def is_approved(self): '''Incoming Liaison Statements do not required approval''' return True - def get_post_only(self): - from_groups = self.cleaned_data.get('from_groups') - if has_role(self.user, "Secretariat") or is_authorized_individual(self.user,from_groups): - return False - return True - def set_from_fields(self): - '''Set from_groups and from_contact options and initial value based on user - accessing the form.''' - if has_role(self.user, "Secretariat"): - queryset = Group.objects.filter(type="sdo", state="active").order_by('name') - else: - queryset = Group.objects.filter(type="sdo", state="active", role__person=self.person, role__name__in=("liaiman", "auth")).distinct().order_by('name') - self.fields['from_contact'].initial = self.person.role_set.filter(group=queryset[0]).first().email.address - self.fields['from_contact'].widget.attrs['disabled'] = True - self.fields['from_groups'].queryset = queryset - self.fields['from_groups'].widget.submitter = str(self.person) - + """Configure from "From" fields based on user roles""" + qs = external_groups_for_person(self.person) + self.fields["from_groups"].queryset = qs + self.fields["from_groups"].widget.submitter = str(self.person) # if there's only one possibility make it the default - if len(queryset) == 1: - self.fields['from_groups'].initial = queryset + if len(qs) == 1: + self.fields['from_groups'].initial = qs + + # Note that the IAB chair currently doesn't get to work with incoming liaison statements + + # Removing this block at the request of the IAB - as a workaround until the new liaison tool is + # create, anyone with access to the form can set any from_contact value + # + # if not ( + # has_role(self.user, "Secretariat") + # or has_role(self.user, "Liaison Coordinator") + # ): + # self.fields["from_contact"].initial = ( + # self.person.role_set.filter(group=qs[0]).first().email.formatted_email() + # ) + # self.fields["from_contact"].widget.attrs["disabled"] = True def set_to_fields(self): '''Set to_groups and to_contacts options and initial value based on user accessing the form. For incoming Liaisons, to_groups choices is the full set. ''' - self.fields['to_groups'].choices = get_internal_choices(None) + self.fields['to_groups'].choices = choices_from_group_queryset(all_internal_groups()) class OutgoingLiaisonForm(LiaisonModelForm): @@ -473,46 +512,56 @@ def is_approved(self): return self.cleaned_data['approved'] def set_from_fields(self): - '''Set from_groups and from_contact options and initial value based on user - accessing the form''' - choices = get_internal_choices(self.user) - self.fields['from_groups'].choices = choices - - # set initial value if only one entry - flat_choices = flatten_choices(choices) + """Configure from "From" fields based on user roles""" + self.set_from_groups_field() + self.set_from_contact_field() + + def set_from_groups_field(self): + """Configure the from_groups field based on roles""" + grouped_choices = choices_from_group_queryset(internal_groups_for_person(self.person)) + flat_choices = flatten_choices(grouped_choices) if len(flat_choices) == 1: - self.fields['from_groups'].initial = [flat_choices[0][0]] - - if has_role(self.user, "Secretariat"): - self.fields['from_contact'] = SearchableEmailField(only_users=True) # secretariat can edit this field! - return - - if self.person.role_set.filter(name='liaiman',group__state='active'): - email = self.person.role_set.filter(name='liaiman',group__state='active').first().email.address - elif self.person.role_set.filter(name__in=('ad','chair'),group__state='active'): - email = self.person.role_set.filter(name__in=('ad','chair'),group__state='active').first().email.address + self.fields["from_groups"].choices = flat_choices + self.fields["from_groups"].initial = [flat_choices[0][0]] else: - email = self.person.email_address() + self.fields["from_groups"].choices = grouped_choices - # Non-secretariat user cannot change the from_contact field. Fill in its value. + def set_from_contact_field(self): + """Configure the from_contact field based on user roles""" + # Secretariat can set this to any valid address but gets no default + if has_role(self.user, "Secretariat"): + return + elif has_role(self.user, ["IAB Chair", "Liaison Coordinator"]): + self.fields["from_contact"].initial = "IAB Chair " + return + elif has_role(self.user, "IETF Chair"): + self.fields["from_contact"].initial = "IETF Chair " + return + # ... others have it set to the correct value and cannot change it self.fields['from_contact'].disabled = True - self.fields['from_contact'].initial = email - - def set_to_fields(self): - '''Set to_groups and to_contacts options and initial value based on user - accessing the form''' - # set options. if the user is a Liaison Manager and nothing more, reduce set to his SDOs - if has_role(self.user, "Liaison Manager") and not self.person.role_set.filter(name__in=('ad','chair'),group__state='active'): - queryset = Group.objects.filter(type="sdo", state="active", role__person=self.person, role__name="liaiman").distinct().order_by('name') + # Set up the querysets we might use - only evaluated as needed + liaison_manager_role = self.person.role_set.filter(name="liaiman", group__state="active") + chair_or_ad_role = self.person.role_set.filter( + name__in=("ad", "chair"), group__state="active" + ) + if liaison_manager_role.exists(): + from_contact_email = liaison_manager_role.first().email + elif chair_or_ad_role.exists(): + from_contact_email = chair_or_ad_role.first().email else: - # get all outgoing entities - queryset = Group.objects.filter(type="sdo", state="active").order_by('name') + from_contact_email = self.person.email() + self.fields['from_contact'].initial = from_contact_email.formatted_email() - self.fields['to_groups'].queryset = queryset + def set_to_fields(self): + """Configure the "To" fields based on user roles""" + qs = external_groups_for_person(self.person) + self.fields['to_groups'].queryset = qs # set initial if has_role(self.user, "Liaison Manager"): - self.fields['to_groups'].initial = [queryset.first()] + self.fields['to_groups'].initial = [ + qs.filter(role__person=self.person, role__name="liaiman").first() + ] class EditLiaisonForm(LiaisonModelForm): @@ -533,32 +582,20 @@ def save(self, *args, **kwargs): return self.instance def set_from_fields(self): - '''Set from_groups and from_contact options and initial value based on user - accessing the form.''' + """Configure from "From" fields based on user roles""" if self.instance.is_outgoing(): - self.fields['from_groups'].choices = get_internal_choices(self.user) + self.fields['from_groups'].choices = choices_from_group_queryset(internal_groups_for_person(self.person)) else: - if has_role(self.user, "Secretariat"): - queryset = Group.objects.filter(type="sdo").order_by('name') - else: - queryset = Group.objects.filter(type="sdo", role__person=self.person, role__name__in=("liaiman", "auth")).distinct().order_by('name') + self.fields["from_groups"].queryset = external_groups_for_person(self.person) + if not has_role(self.user, "Secretariat"): self.fields['from_contact'].widget.attrs['disabled'] = True - self.fields['from_groups'].queryset = queryset def set_to_fields(self): - '''Set to_groups and to_contacts options and initial value based on user - accessing the form. For incoming Liaisons, to_groups choices is the full set. - ''' + """Configure the "To" fields based on user roles""" if self.instance.is_outgoing(): - # if the user is a Liaison Manager and nothing more, reduce to set to his SDOs - if has_role(self.user, "Liaison Manager") and not self.person.role_set.filter(name__in=('ad','chair'),group__state='active'): - queryset = Group.objects.filter(type="sdo", role__person=self.person, role__name="liaiman").distinct().order_by('name') - else: - # get all outgoing entities - queryset = Group.objects.filter(type="sdo").order_by('name') - self.fields['to_groups'].queryset = queryset + self.fields['to_groups'].queryset = external_groups_for_person(self.person) else: - self.fields['to_groups'].choices = get_internal_choices(None) + self.fields['to_groups'].choices = choices_from_group_queryset(all_internal_groups()) class EditAttachmentForm(forms.Form): diff --git a/ietf/liaisons/mails.py b/ietf/liaisons/mails.py index 8708c8a078..878aada576 100644 --- a/ietf/liaisons/mails.py +++ b/ietf/liaisons/mails.py @@ -14,7 +14,10 @@ def send_liaison_by_email(request, liaison): subject = 'New Liaison Statement, "%s"' % (liaison.title) from_email = settings.LIAISON_UNIVERSAL_FROM - (to_email, cc) = gather_address_lists('liaison_statement_posted',liaison=liaison) + if liaison.is_outgoing(): + (to_email, cc) = gather_address_lists('liaison_statement_posted_outgoing',liaison=liaison) + else: + (to_email, cc) = gather_address_lists('liaison_statement_posted_incoming',liaison=liaison) bcc = ['statements@ietf.org'] body = render_to_string('liaisons/liaison_mail.txt', dict(liaison=liaison)) diff --git a/ietf/liaisons/migrations/0002_alter_liaisonstatement_response_contacts.py b/ietf/liaisons/migrations/0002_alter_liaisonstatement_response_contacts.py new file mode 100644 index 0000000000..ac0a11101b --- /dev/null +++ b/ietf/liaisons/migrations/0002_alter_liaisonstatement_response_contacts.py @@ -0,0 +1,20 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("liaisons", "0001_initial"), + ] + + operations = [ + migrations.AlterField( + model_name="liaisonstatement", + name="response_contacts", + field=models.TextField( + blank=True, help_text="Where to send a response", max_length=1024 + ), + ), + ] diff --git a/ietf/liaisons/migrations/0003_liaisonstatement_from_contact_tmp.py b/ietf/liaisons/migrations/0003_liaisonstatement_from_contact_tmp.py new file mode 100644 index 0000000000..de2ce7ff59 --- /dev/null +++ b/ietf/liaisons/migrations/0003_liaisonstatement_from_contact_tmp.py @@ -0,0 +1,22 @@ +# Copyright The IETF Trust 2025 All Rights Reserved +from django.db import migrations, models +import ietf.utils.validators + + +class Migration(migrations.Migration): + dependencies = [ + ("liaisons", "0002_alter_liaisonstatement_response_contacts"), + ] + + operations = [ + migrations.AddField( + model_name="liaisonstatement", + name="from_contact_tmp", + field=models.CharField( + blank=True, + help_text="Address of the formal sender of the statement", + max_length=512, + validators=[ietf.utils.validators.validate_mailbox_address], + ), + ), + ] diff --git a/ietf/liaisons/migrations/0004_populate_liaisonstatement_from_contact_tmp.py b/ietf/liaisons/migrations/0004_populate_liaisonstatement_from_contact_tmp.py new file mode 100644 index 0000000000..dbab326b0c --- /dev/null +++ b/ietf/liaisons/migrations/0004_populate_liaisonstatement_from_contact_tmp.py @@ -0,0 +1,60 @@ +# Copyright The IETF Trust 2025, All Rights Reserved +from itertools import islice + +from django.db import migrations + +from ietf.person.name import plain_name +from ietf.utils.mail import formataddr +from ietf.utils.validators import validate_mailbox_address + + +def forward(apps, schema_editor): + def _formatted_email(email): + """Format an email address to match Email.formatted_email()""" + person = email.person + if person: + return formataddr( + ( + # inlined Person.plain_name(), minus the caching + person.plain if person.plain else plain_name(person.name), + email.address, + ) + ) + return email.address + + def _batched(iterable, n): + """Split an iterable into lists of length <= n + + (based on itertools example code for batched(), which is added in py312) + """ + iterator = iter(iterable) + batch = list(islice(iterator, n)) # consumes first n iterations + while batch: + yield batch + batch = list(islice(iterator, n)) # consumes next n iterations + + LiaisonStatement = apps.get_model("liaisons", "LiaisonStatement") + LiaisonStatement.objects.update(from_contact_tmp="") # ensure they're all blank + for batch in _batched( + LiaisonStatement.objects.exclude(from_contact=None).select_related( + "from_contact" + ), + 100, + ): + for ls in batch: + ls.from_contact_tmp = _formatted_email(ls.from_contact) + validate_mailbox_address( + ls.from_contact_tmp + ) # be sure it's permitted before we accept it + + LiaisonStatement.objects.bulk_update(batch, fields=["from_contact_tmp"]) + + +class Migration(migrations.Migration): + dependencies = [ + ("liaisons", "0003_liaisonstatement_from_contact_tmp"), + ] + + operations = [ + migrations.RunPython(forward), + ] diff --git a/ietf/liaisons/migrations/0005_replace_liaisonstatement_from_contact.py b/ietf/liaisons/migrations/0005_replace_liaisonstatement_from_contact.py new file mode 100644 index 0000000000..e1702ae3bc --- /dev/null +++ b/ietf/liaisons/migrations/0005_replace_liaisonstatement_from_contact.py @@ -0,0 +1,20 @@ +# Copyright The IETF Trust 2025 All Rights Reserved +from django.db import migrations + + +class Migration(migrations.Migration): + dependencies = [ + ("liaisons", "0004_populate_liaisonstatement_from_contact_tmp"), + ] + + operations = [ + migrations.RemoveField( + model_name="liaisonstatement", + name="from_contact", + ), + migrations.RenameField( + model_name="liaisonstatement", + old_name="from_contact_tmp", + new_name="from_contact", + ), + ] diff --git a/ietf/liaisons/models.py b/ietf/liaisons/models.py index 6302bea779..a2d79ea476 100644 --- a/ietf/liaisons/models.py +++ b/ietf/liaisons/models.py @@ -7,13 +7,14 @@ from django.db import models from django.utils.text import slugify -from ietf.person.models import Email, Person +from ietf.person.models import Person from ietf.name.models import (LiaisonStatementPurposeName, LiaisonStatementState, LiaisonStatementEventTypeName, LiaisonStatementTagName, DocRelationshipName) from ietf.doc.models import Document from ietf.group.models import Group from ietf.utils.models import ForeignKey +from ietf.utils.validators import validate_mailbox_address # maps (previous state id, new state id) to event type id STATE_EVENT_MAPPING = { @@ -29,11 +30,16 @@ class LiaisonStatement(models.Model): title = models.CharField(max_length=255) from_groups = models.ManyToManyField(Group, blank=True, related_name='liaisonstatement_from_set') - from_contact = ForeignKey(Email, blank=True, null=True) + from_contact = models.CharField( + blank=True, + max_length=512, + help_text="Address of the formal sender of the statement", + validators=(validate_mailbox_address,) + ) to_groups = models.ManyToManyField(Group, blank=True, related_name='liaisonstatement_to_set') to_contacts = models.CharField(max_length=2000, help_text="Contacts at recipient group") - response_contacts = models.CharField(blank=True, max_length=255, help_text="Where to send a response") # RFC4053 + response_contacts = models.TextField(blank=True, max_length=1024, help_text="Where to send a response") # RFC4053 technical_contacts = models.CharField(blank=True, max_length=255, help_text="Who to contact for clarification") # RFC4053 action_holder_contacts = models.CharField(blank=True, max_length=255, help_text="Who makes sure action is completed") # incoming only? cc_contacts = models.TextField(blank=True) @@ -44,7 +50,7 @@ class LiaisonStatement(models.Model): body = models.TextField(blank=True) tags = models.ManyToManyField(LiaisonStatementTagName, blank=True) - attachments = models.ManyToManyField(Document, through='LiaisonStatementAttachment', blank=True) + attachments = models.ManyToManyField(Document, through='liaisons.LiaisonStatementAttachment', blank=True) state = ForeignKey(LiaisonStatementState, default='pending') class Meta: @@ -85,7 +91,7 @@ def name(self): if self.from_groups.count(): frm = ', '.join([i.acronym or i.name for i in self.from_groups.all()]) else: - frm = self.from_contact.person.name + frm = self.from_contact if self.to_groups.count(): to = ', '.join([i.acronym or i.name for i in self.to_groups.all()]) else: diff --git a/ietf/liaisons/resources.py b/ietf/liaisons/resources.py index 8f31ea3a64..02cd159a11 100644 --- a/ietf/liaisons/resources.py +++ b/ietf/liaisons/resources.py @@ -15,12 +15,10 @@ RelatedLiaisonStatement) -from ietf.person.resources import EmailResource from ietf.group.resources import GroupResource from ietf.name.resources import LiaisonStatementPurposeNameResource, LiaisonStatementTagNameResource, LiaisonStatementStateResource from ietf.doc.resources import DocumentResource class LiaisonStatementResource(ModelResource): - from_contact = ToOneField(EmailResource, 'from_contact', null=True) purpose = ToOneField(LiaisonStatementPurposeNameResource, 'purpose') state = ToOneField(LiaisonStatementStateResource, 'state') from_groups = ToManyField(GroupResource, 'from_groups', null=True) @@ -36,6 +34,7 @@ class Meta: filtering = { "id": ALL, "title": ALL, + "from_contact": ALL, "to_contacts": ALL, "response_contacts": ALL, "technical_contacts": ALL, @@ -44,9 +43,6 @@ class Meta: "deadline": ALL, "other_identifiers": ALL, "body": ALL, - "from_name": ALL, - "to_name": ALL, - "from_contact": ALL_WITH_RELATIONS, "purpose": ALL_WITH_RELATIONS, "state": ALL_WITH_RELATIONS, "from_groups": ALL_WITH_RELATIONS, diff --git a/ietf/liaisons/tests.py b/ietf/liaisons/tests.py index a0186f6a01..e29045443f 100644 --- a/ietf/liaisons/tests.py +++ b/ietf/liaisons/tests.py @@ -19,6 +19,7 @@ from io import StringIO from pyquery import PyQuery +from ietf.doc.storage_utils import retrieve_str from ietf.utils.test_utils import TestCase, login_testing_unauthorized from ietf.utils.mail import outbox @@ -109,65 +110,74 @@ def test_help_pages(self): self.assertEqual(self.client.get('/liaison/help/from_ietf/').status_code, 200) self.assertEqual(self.client.get('/liaison/help/to_ietf/').status_code, 200) + def test_list_other_sdo(self): + GroupFactory(type_id="sdo", state_id="conclude", acronym="third") + GroupFactory(type_id="sdo", state_id="active", acronym="second") + GroupFactory(type_id="sdo", state_id="active", acronym="first") + url = urlreverse("ietf.liaisons.views.list_other_sdo") + login_testing_unauthorized(self, "secretary", url) + r = self.client.get(url) + q = PyQuery(r.content) + self.assertEqual(len(q("h1")), 2) + first_td_elements_text = [e.text for e in q("tr").find("td:first-child a")] + self.assertEqual(first_td_elements_text, ["first", "second", "third"]) class UnitTests(TestCase): - def test_get_cc(self): - from ietf.liaisons.views import get_cc,EMAIL_ALIASES + def test_get_contacts_for_liaison_messages_for_group_primary(self): + from ietf.mailtrigger.utils import get_contacts_for_liaison_messages_for_group_primary,EMAIL_ALIASES # test IETF - cc = get_cc(Group.objects.get(acronym='ietf')) + cc = get_contacts_for_liaison_messages_for_group_primary(Group.objects.get(acronym='ietf')) self.assertTrue(EMAIL_ALIASES['IESG'] in cc) self.assertTrue(EMAIL_ALIASES['IETFCHAIR'] in cc) # test IAB - cc = get_cc(Group.objects.get(acronym='iab')) + cc = get_contacts_for_liaison_messages_for_group_primary(Group.objects.get(acronym='iab')) self.assertTrue(EMAIL_ALIASES['IAB'] in cc) self.assertTrue(EMAIL_ALIASES['IABCHAIR'] in cc) - self.assertTrue(EMAIL_ALIASES['IABEXECUTIVEDIRECTOR'] in cc) # test an Area area = Group.objects.filter(type='area').first() - cc = get_cc(area) + cc = get_contacts_for_liaison_messages_for_group_primary(area) self.assertTrue(EMAIL_ALIASES['IETFCHAIR'] in cc) self.assertTrue(contacts_from_roles([area.ad_role()]) in cc) # test a Working Group wg = Group.objects.filter(type='wg').first() - cc = get_cc(wg) + cc = get_contacts_for_liaison_messages_for_group_primary(wg) self.assertTrue(contacts_from_roles([wg.parent.ad_role()]) in cc) self.assertTrue(contacts_from_roles([wg.get_chair()]) in cc) # test an SDO sdo = RoleFactory(name_id='liaiman',group__type_id='sdo',).group - cc = get_cc(sdo) + cc = get_contacts_for_liaison_messages_for_group_primary(sdo) self.assertTrue(contacts_from_roles([sdo.role_set.filter(name='liaiman').first()]) in cc) # test a cc_contact role cc_contact_role = RoleFactory(name_id='liaison_cc_contact', group=sdo) - cc = get_cc(sdo) + cc = get_contacts_for_liaison_messages_for_group_primary(sdo) self.assertIn(contact_email_from_role(cc_contact_role), cc) - def test_get_contacts_for_group(self): - from ietf.liaisons.views import get_contacts_for_group, EMAIL_ALIASES + def test_get_contacts_for_liaison_messages_for_group_secondary(self): + from ietf.mailtrigger.utils import get_contacts_for_liaison_messages_for_group_secondary,EMAIL_ALIASES - # test explicit + # test explicit group contacts sdo = GroupFactory(type_id='sdo') contact_email = RoleFactory(name_id='liaison_contact', group=sdo).email.address - contacts = get_contacts_for_group(sdo) + contacts = get_contacts_for_liaison_messages_for_group_secondary(sdo) self.assertIsNotNone(contact_email) self.assertIn(contact_email, contacts) # test area area = Group.objects.filter(type='area').first() - contacts = get_contacts_for_group(area) + contacts = get_contacts_for_liaison_messages_for_group_secondary(area) self.assertTrue(area.ad_role().email.address in contacts) # test wg wg = Group.objects.filter(type='wg').first() - contacts = get_contacts_for_group(wg) + contacts = get_contacts_for_liaison_messages_for_group_secondary(wg) self.assertTrue(wg.get_chair().email.address in contacts) # test ietf - contacts = get_contacts_for_group(Group.objects.get(acronym='ietf')) + contacts = get_contacts_for_liaison_messages_for_group_secondary(Group.objects.get(acronym='ietf')) self.assertTrue(EMAIL_ALIASES['IETFCHAIR'] in contacts) # test iab - contacts = get_contacts_for_group(Group.objects.get(acronym='iab')) + contacts = get_contacts_for_liaison_messages_for_group_secondary(Group.objects.get(acronym='iab')) self.assertTrue(EMAIL_ALIASES['IABCHAIR'] in contacts) - self.assertTrue(EMAIL_ALIASES['IABEXECUTIVEDIRECTOR'] in contacts) # test iesg - contacts = get_contacts_for_group(Group.objects.get(acronym='iesg')) + contacts = get_contacts_for_liaison_messages_for_group_secondary(Group.objects.get(acronym='iesg')) self.assertTrue(EMAIL_ALIASES['IESG'] in contacts) def test_needs_approval(self): @@ -204,7 +214,6 @@ def test_ajax(self): self.assertEqual(r.status_code, 200) data = r.json() self.assertEqual(data["error"], False) - self.assertEqual(data["post_only"], False) self.assertTrue('cc' in data) self.assertTrue('needs_approval' in data) self.assertTrue('to_contacts' in data) @@ -364,6 +373,9 @@ def test_approval_process(self): self.assertEqual(len(q('form button[name=approved]')), 0) # check the detail page / authorized + r = self.client.post(url, dict(dead="1")) + self.assertEqual(r.status_code, 403) + mailbox_before = len(outbox) self.client.login(username="ulm-liaiman", password="ulm-liaiman+password") r = self.client.get(url) self.assertEqual(r.status_code, 200) @@ -414,7 +426,8 @@ def test_edit_liaison(self): # edit attachments_before = liaison.attachments.count() - test_file = StringIO("hello world") + test_content = "hello world" + test_file = StringIO(test_content) test_file.name = "unnamed" r = self.client.post(url, dict(from_groups=str(from_group.pk), @@ -452,16 +465,20 @@ def test_edit_liaison(self): self.assertEqual(attachment.title, "attachment") with (Path(settings.LIAISON_ATTACH_PATH) / attachment.uploaded_filename).open() as f: written_content = f.read() + self.assertEqual(written_content, test_content) + self.assertEqual( + retrieve_str(attachment.type_id, attachment.uploaded_filename), + test_content, + ) - test_file.seek(0) - self.assertEqual(written_content, test_file.read()) def test_incoming_access(self): - '''Ensure only Secretariat, Liaison Managers, and Authorized Individuals + '''Ensure only Secretariat, Liaison Managers, Liaison Coordinators, and Authorized Individuals have access to incoming liaisons. ''' sdo = RoleFactory(name_id='liaiman',group__type_id='sdo', person__user__username='ulm-liaiman').group RoleFactory(name_id='auth',group=sdo,person__user__username='ulm-auth') + RoleFactory(name_id='liaison_coordinator', group__acronym='iab', person__user__username='liaison-coordinator') stmt = LiaisonStatementFactory(from_groups=[sdo,]) LiaisonStatementEventFactory(statement=stmt,type_id='posted') RoleFactory(name_id='chair',person__user__username='marschairman',group__acronym='mars') @@ -494,6 +511,15 @@ def test_incoming_access(self): r = self.client.get(addurl) self.assertEqual(r.status_code, 200) + # Liaison Coordinator has access + self.client.login(username="liaison-coordinator", password="liaison-coordinator+password") + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertEqual(len(q('a.btn:contains("New incoming liaison")')), 1) + r = self.client.get(addurl) + self.assertEqual(r.status_code, 200) + # Authorized Individual has access self.client.login(username="ulm-auth", password="ulm-auth+password") r = self.client.get(url) @@ -516,9 +542,9 @@ def test_outgoing_access(self): sdo = RoleFactory(name_id='liaiman',group__type_id='sdo', person__user__username='ulm-liaiman').group RoleFactory(name_id='auth',group=sdo,person__user__username='ulm-auth') + RoleFactory(name_id='liaison_coordinator', group__acronym='iab', person__user__username='liaison-coordinator') mars = RoleFactory(name_id='chair',person__user__username='marschairman',group__acronym='mars').group RoleFactory(name_id='secr',group=mars,person__user__username='mars-secr') - RoleFactory(name_id='execdir',group=Group.objects.get(acronym='iab'),person__user__username='iab-execdir') url = urlreverse('ietf.liaisons.views.liaison_list') addurl = urlreverse('ietf.liaisons.views.liaison_add', kwargs={'type':'outgoing'}) @@ -576,17 +602,17 @@ def test_outgoing_access(self): r = self.client.get(addurl) self.assertEqual(r.status_code, 200) - # IAB Executive Director - self.assertTrue(self.client.login(username="iab-execdir", password="iab-execdir+password")) + # Liaison Manager has access + self.assertTrue(self.client.login(username="ulm-liaiman", password="ulm-liaiman+password")) r = self.client.get(url) self.assertEqual(r.status_code, 200) q = PyQuery(r.content) - self.assertEqual(len(q("a.btn:contains('New outgoing liaison')")), 1) + self.assertEqual(len(q('a.btn:contains("New outgoing liaison")')), 1) r = self.client.get(addurl) self.assertEqual(r.status_code, 200) - # Liaison Manager has access - self.assertTrue(self.client.login(username="ulm-liaiman", password="ulm-liaiman+password")) + # Liaison Coordinator has access + self.assertTrue(self.client.login(username="liaison-coordinator", password="liaison-coordinator+password")) r = self.client.get(url) self.assertEqual(r.status_code, 200) q = PyQuery(r.content) @@ -704,12 +730,13 @@ def test_add_incoming_liaison(self): # add new mailbox_before = len(outbox) - test_file = StringIO("hello world") + test_content = "hello world" + test_file = StringIO(test_content) test_file.name = "unnamed" from_groups = [ str(g.pk) for g in Group.objects.filter(type="sdo") ] to_group = Group.objects.get(acronym="mars") submitter = Person.objects.get(user__username="marschairman") - today = date_today(datetime.timezone.utc) + today = date_today(datetime.UTC) related_liaison = liaison r = self.client.post(url, dict(from_groups=from_groups, @@ -734,7 +761,7 @@ def test_add_incoming_liaison(self): l = LiaisonStatement.objects.all().order_by("-id")[0] self.assertEqual(l.from_groups.count(),2) - self.assertEqual(l.from_contact.address, submitter.email_address()) + self.assertEqual(l.from_contact, submitter.email_address()) self.assertSequenceEqual(l.to_groups.all(),[to_group]) self.assertEqual(l.technical_contacts, "technical_contact@example.com") self.assertEqual(l.action_holder_contacts, "action_holder_contacts@example.com") @@ -756,6 +783,11 @@ def test_add_incoming_liaison(self): self.assertEqual(attachment.title, "attachment") with (Path(settings.LIAISON_ATTACH_PATH) / attachment.uploaded_filename).open() as f: written_content = f.read() + self.assertEqual(written_content, test_content) + self.assertEqual( + retrieve_str(attachment.type_id, attachment.uploaded_filename), + test_content + ) test_file.seek(0) self.assertEqual(written_content, test_file.read()) @@ -764,8 +796,11 @@ def test_add_incoming_liaison(self): self.assertTrue("Liaison Statement" in outbox[-1]["Subject"]) self.assertTrue('to_contacts@' in outbox[-1]['To']) + self.assertTrue(submitter.email_address(), outbox[-1]['To']) self.assertTrue('cc@' in outbox[-1]['Cc']) + + def test_add_outgoing_liaison(self): RoleFactory(name_id='liaiman',group__type_id='sdo', person__user__username='ulm-liaiman') wg = RoleFactory(name_id='chair',person__user__username='marschairman',group__acronym='mars').group @@ -783,12 +818,13 @@ def test_add_outgoing_liaison(self): # add new mailbox_before = len(outbox) - test_file = StringIO("hello world") + test_content = "hello world" + test_file = StringIO(test_content) test_file.name = "unnamed" from_group = Group.objects.get(acronym="mars") to_group = Group.objects.filter(type="sdo")[0] submitter = Person.objects.get(user__username="marschairman") - today = date_today(datetime.timezone.utc) + today = date_today(datetime.UTC) related_liaison = liaison r = self.client.post(url, dict(from_groups=str(from_group.pk), @@ -813,7 +849,7 @@ def test_add_outgoing_liaison(self): l = LiaisonStatement.objects.all().order_by("-id")[0] self.assertSequenceEqual(l.from_groups.all(), [from_group]) - self.assertEqual(l.from_contact.address, submitter.email_address()) + self.assertEqual(l.from_contact, submitter.email_address()) self.assertSequenceEqual(l.to_groups.all(), [to_group]) self.assertEqual(l.to_contacts, "to_contacts@example.com") self.assertEqual(l.technical_contacts, "technical_contact@example.com") @@ -835,44 +871,16 @@ def test_add_outgoing_liaison(self): self.assertEqual(attachment.title, "attachment") with (Path(settings.LIAISON_ATTACH_PATH) / attachment.uploaded_filename).open() as f: written_content = f.read() - - test_file.seek(0) - self.assertEqual(written_content, test_file.read()) + self.assertEqual(written_content, test_content) + self.assertEqual( + retrieve_str(attachment.type_id, attachment.uploaded_filename), + test_content + ) self.assertEqual(len(outbox), mailbox_before + 1) self.assertTrue("Liaison Statement" in outbox[-1]["Subject"]) self.assertTrue('aread@' in outbox[-1]['To']) - - def test_add_outgoing_liaison_unapproved_post_only(self): - RoleFactory(name_id='liaiman',group__type_id='sdo', person__user__username='ulm-liaiman') - mars = RoleFactory(name_id='chair',person__user__username='marschairman',group__acronym='mars').group - RoleFactory(name_id='ad',group=mars) - - url = urlreverse('ietf.liaisons.views.liaison_add', kwargs={'type':'outgoing'}) - login_testing_unauthorized(self, "secretary", url) - - # add new - mailbox_before = len(outbox) - from_group = Group.objects.get(acronym="mars") - to_group = Group.objects.filter(type="sdo")[0] - submitter = Person.objects.get(user__username="marschairman") - today = date_today(datetime.timezone.utc) - r = self.client.post(url, - dict(from_groups=str(from_group.pk), - from_contact=submitter.email_address(), - to_groups=str(to_group.pk), - to_contacts='to_contacts@example.com', - approved="", - purpose="info", - title="title", - submitted_date=today.strftime("%Y-%m-%d"), - body="body", - post_only="1", - )) - self.assertEqual(r.status_code, 302) - l = LiaisonStatement.objects.all().order_by("-id")[0] - self.assertEqual(l.state.slug,'pending') - self.assertEqual(len(outbox), mailbox_before + 1) + self.assertTrue(submitter.email_address(), outbox[-1]['Cc']) def test_liaison_add_attachment(self): liaison = LiaisonStatementFactory(deadline=date_today(DEADLINE_TZINFO)+datetime.timedelta(days=1)) @@ -882,11 +890,12 @@ def test_liaison_add_attachment(self): # get minimum edit post data - file = StringIO('dummy file') + test_data = "dummy file" + file = StringIO(test_data) file.name = "upload.txt" post_data = dict( from_groups = ','.join([ str(x.pk) for x in liaison.from_groups.all() ]), - from_contact = liaison.from_contact.address, + from_contact = liaison.from_contact, to_groups = ','.join([ str(x.pk) for x in liaison.to_groups.all() ]), to_contacts = 'to_contacts@example.com', purpose = liaison.purpose.slug, @@ -909,28 +918,50 @@ def test_liaison_add_attachment(self): self.assertEqual(liaison.attachments.count(),1) event = liaison.liaisonstatementevent_set.order_by('id').last() self.assertTrue(event.desc.startswith('Added attachment')) + attachment = liaison.attachments.get() + self.assertEqual( + retrieve_str(attachment.type_id, attachment.uploaded_filename), + test_data + ) def test_liaison_edit_attachment(self): - - attachment = LiaisonStatementAttachmentFactory(document__name='liaiatt-1') - url = urlreverse('ietf.liaisons.views.liaison_edit_attachment', kwargs=dict(object_id=attachment.statement_id,doc_id=attachment.document_id)) + attachment = LiaisonStatementAttachmentFactory(document__name="liaiatt-1") + url = urlreverse( + "ietf.liaisons.views.liaison_edit_attachment", + kwargs=dict( + object_id=attachment.statement_id, doc_id=attachment.document_id + ), + ) login_testing_unauthorized(self, "secretary", url) r = self.client.get(url) self.assertEqual(r.status_code, 200) - post_data = dict(title='New Title') - r = self.client.post(url,post_data) + post_data = dict(title="New Title") + r = self.client.post(url, post_data) attachment = LiaisonStatementAttachment.objects.get(pk=attachment.pk) self.assertEqual(r.status_code, 302) - self.assertEqual(attachment.document.title,'New Title') - - def test_liaison_delete_attachment(self): - attachment = LiaisonStatementAttachmentFactory(document__name='liaiatt-1') - liaison = attachment.statement - url = urlreverse('ietf.liaisons.views.liaison_delete_attachment', kwargs=dict(object_id=liaison.pk,attach_id=attachment.pk)) - login_testing_unauthorized(self, "secretary", url) + self.assertEqual(attachment.document.title, "New Title") + + # ensure attempts to edit attachments not attached to this liaison statement fail + other_attachment = LiaisonStatementAttachmentFactory(document__name="liaiatt-2") + url = urlreverse( + "ietf.liaisons.views.liaison_edit_attachment", + kwargs=dict( + object_id=attachment.statement_id, doc_id=other_attachment.document_id + ), + ) r = self.client.get(url) - self.assertEqual(r.status_code, 302) - self.assertEqual(liaison.liaisonstatementattachment_set.filter(removed=False).count(),0) + self.assertEqual(r.status_code, 404) + r = self.client.post(url, dict(title="New Title")) + self.assertEqual(r.status_code, 404) + + # def test_liaison_delete_attachment(self): + # attachment = LiaisonStatementAttachmentFactory(document__name='liaiatt-1') + # liaison = attachment.statement + # url = urlreverse('ietf.liaisons.views.liaison_delete_attachment', kwargs=dict(object_id=liaison.pk,attach_id=attachment.pk)) + # login_testing_unauthorized(self, "secretary", url) + # r = self.client.get(url) + # self.assertEqual(r.status_code, 302) + # self.assertEqual(liaison.liaisonstatementattachment_set.filter(removed=False).count(),0) def test_in_response(self): '''A statement with purpose=in_response must have related statement specified''' @@ -1034,7 +1065,7 @@ def test_search(self): LiaisonStatementEventFactory(type_id='posted', statement__body="Has recently in its body",statement__from_groups=[GroupFactory(type_id='sdo',acronym='ulm'),]) # Statement 2 s2 = LiaisonStatementEventFactory(type_id='posted', statement__body="That word does not occur here", statement__title="Nor does it occur here") - s2.time=datetime.datetime(2010, 1, 1, tzinfo=datetime.timezone.utc) + s2.time=datetime.datetime(2010, 1, 1, tzinfo=datetime.UTC) s2.save() # test list only, no search filters @@ -1099,17 +1130,6 @@ def test_redirect_for_approval(self): # ------------------------------------------------- # Form validations # ------------------------------------------------- - def test_post_and_send_fail(self): - RoleFactory(name_id='liaiman',person__user__username='ulm-liaiman',group__type_id='sdo',group__acronym='ulm') - GroupFactory(type_id='wg',acronym='mars') - - url = urlreverse('ietf.liaisons.views.liaison_add', kwargs={'type':'incoming'}) - login_testing_unauthorized(self, "ulm-liaiman", url) - - r = self.client.post(url,get_liaison_post_data(),follow=True) - - self.assertEqual(r.status_code, 200) - self.assertContains(r, 'As an IETF Liaison Manager you can not send incoming liaison statements') def test_deadline_field(self): '''Required for action, comment, not info, response''' diff --git a/ietf/liaisons/tests_forms.py b/ietf/liaisons/tests_forms.py new file mode 100644 index 0000000000..101c0c8298 --- /dev/null +++ b/ietf/liaisons/tests_forms.py @@ -0,0 +1,217 @@ +# Copyright The IETF Trust 2025, All Rights Reserved +from ietf.group.factories import GroupFactory, RoleFactory +from ietf.group.models import Group +from ietf.liaisons.forms import ( + flatten_choices, + choices_from_group_queryset, + all_internal_groups, + internal_groups_for_person, + external_groups_for_person, +) +from ietf.person.factories import PersonFactory +from ietf.person.models import Person +from ietf.utils.test_utils import TestCase + + +class HelperTests(TestCase): + @staticmethod + def _alphabetically_by_acronym(group_list): + return sorted(group_list, key=lambda item: item.acronym) + + def test_choices_from_group_queryset(self): + main_groups = list(Group.objects.filter(acronym__in=["ietf", "iab"])) + areas = GroupFactory.create_batch(2, type_id="area") + wgs = GroupFactory.create_batch(2) + + # No groups + self.assertEqual( + choices_from_group_queryset(Group.objects.none()), + [], + ) + + # Main groups only + choices = choices_from_group_queryset( + Group.objects.filter(pk__in=[g.pk for g in main_groups]) + ) + self.assertEqual(len(choices), 1, "show one optgroup, hide empty ones") + self.assertEqual(choices[0][0], "Main IETF Entities") + self.assertEqual( + [val for val, _ in choices[0][1]], # extract the choice value + [g.pk for g in self._alphabetically_by_acronym(main_groups)], + ) + + # Area groups only + choices = choices_from_group_queryset( + Group.objects.filter(pk__in=[g.pk for g in areas]) + ) + self.assertEqual(len(choices), 1, "show one optgroup, hide empty ones") + self.assertEqual(choices[0][0], "IETF Areas") + self.assertEqual( + [val for val, _ in choices[0][1]], # extract the choice value + [g.pk for g in self._alphabetically_by_acronym(areas)], + ) + + # WGs only + choices = choices_from_group_queryset( + Group.objects.filter(pk__in=[g.pk for g in wgs]) + ) + self.assertEqual(len(choices), 1, "show one optgroup, hide empty ones") + self.assertEqual(choices[0][0], "IETF Working Groups") + self.assertEqual( + [val for val, _ in choices[0][1]], # extract the choice value + [g.pk for g in self._alphabetically_by_acronym(wgs)], + ) + + # All together + choices = choices_from_group_queryset( + Group.objects.filter(pk__in=[g.pk for g in main_groups + areas + wgs]) + ) + self.assertEqual(len(choices), 3, "show all three optgroups") + self.assertEqual( + [optgroup_label for optgroup_label, _ in choices], + ["Main IETF Entities", "IETF Areas", "IETF Working Groups"], + ) + self.assertEqual( + [val for val, _ in choices[0][1]], # extract the choice value + [g.pk for g in self._alphabetically_by_acronym(main_groups)], + ) + self.assertEqual( + [val for val, _ in choices[1][1]], # extract the choice value + [g.pk for g in self._alphabetically_by_acronym(areas)], + ) + self.assertEqual( + [val for val, _ in choices[2][1]], # extract the choice value + [g.pk for g in self._alphabetically_by_acronym(wgs)], + ) + + def test_all_internal_groups(self): + # test relies on the data created in ietf.utils.test_data.make_immutable_test_data() + self.assertCountEqual( + all_internal_groups().values_list("acronym", flat=True), + {"ietf", "iab", "iesg", "farfut", "ops", "sops"}, + ) + + def test_internal_groups_for_person(self): + # test relies on the data created in ietf.utils.test_data.make_immutable_test_data() + # todo add liaison coordinator when modeled + RoleFactory( + name_id="auth", + group__type_id="sdo", + group__acronym="sdo", + person__user__username="sdo-authperson", + ) + + self.assertQuerysetEqual( + internal_groups_for_person(None), + Group.objects.none(), + msg="no Person means no groups", + ) + self.assertQuerysetEqual( + internal_groups_for_person(PersonFactory()), + Group.objects.none(), + msg="no Role means no groups", + ) + + for username in ( + "secretary", + "ietf-chair", + "iab-chair", + "sdo-authperson", + ): + returned_queryset = internal_groups_for_person( + Person.objects.get(user__username=username) + ) + self.assertCountEqual( + returned_queryset.values_list("acronym", flat=True), + {"ietf", "iab", "iesg", "farfut", "ops", "sops"}, + f"{username} should get all groups", + ) + + # "ops-ad" user is the AD of the "ops" area, which contains the "sops" wg + self.assertCountEqual( + internal_groups_for_person( + Person.objects.get(user__username="ops-ad") + ).values_list("acronym", flat=True), + {"ietf", "iesg", "ops", "sops"}, + "area director should get only their area, its wgs, and the ietf/iesg groups", + ) + + self.assertCountEqual( + internal_groups_for_person( + Person.objects.get(user__username="sopschairman"), + ).values_list("acronym", flat=True), + {"sops"}, + "wg chair should get only their wg", + ) + + def test_external_groups_for_person(self): + RoleFactory(name_id="liaison_coordinator", group__acronym="iab", person__user__username="liaison-coordinator") + the_sdo = GroupFactory(type_id="sdo", acronym="the-sdo") + liaison_manager = RoleFactory(name_id="liaiman", group=the_sdo).person + authperson = RoleFactory(name_id="auth", group=the_sdo).person + + GroupFactory(acronym="other-sdo", type_id="sdo") + for username in ( + "secretary", + "ietf-chair", + "iab-chair", + "liaison-coordinator", + "ad", + "sopschairman", + "sopssecretary", + ): + person = Person.objects.get(user__username=username) + self.assertCountEqual( + external_groups_for_person( + person, + ).values_list("acronym", flat=True), + {"the-sdo", "other-sdo"}, + f"{username} should get all SDO groups", + ) + tmp_role = RoleFactory(name_id="chair", group__type_id="wg", person=person) + self.assertCountEqual( + external_groups_for_person( + person, + ).values_list("acronym", flat=True), + {"the-sdo", "other-sdo"}, + f"{username} should still get all SDO groups when they also a liaison manager", + ) + tmp_role.delete() + + self.assertCountEqual( + external_groups_for_person(liaison_manager).values_list( + "acronym", flat=True + ), + {"the-sdo"}, + "liaison manager should get only their SDO group", + ) + self.assertCountEqual( + external_groups_for_person(authperson).values_list("acronym", flat=True), + {"the-sdo"}, + "authorized individual should get only their SDO group", + ) + + def test_flatten_choices(self): + self.assertEqual(flatten_choices([]), []) + self.assertEqual( + flatten_choices( + ( + ("group A", ()), + ("group B", (("val0", "label0"), ("val1", "label1"))), + ("group C", (("val2", "label2"),)), + ) + ), + [("val0", "label0"), ("val1", "label1"), ("val2", "label2")], + ) + + +class IncomingLiaisonFormTests(TestCase): + pass + + +class OutgoingLiaisonFormTests(TestCase): + pass + + +class EditLiaisonFormTests(TestCase): + pass diff --git a/ietf/liaisons/urls.py b/ietf/liaisons/urls.py index a4afbfef5d..498df3b965 100644 --- a/ietf/liaisons/urls.py +++ b/ietf/liaisons/urls.py @@ -26,8 +26,8 @@ url(r'^(?P\d+)/$', views.liaison_detail), url(r'^(?P\d+)/addcomment/$', views.add_comment), url(r'^(?P\d+)/edit/$', views.liaison_edit), - url(r'^(?P\d+)/edit-attachment/(?P[A-Za-z0-9._+-]+)$', views.liaison_edit_attachment), - url(r'^(?P\d+)/delete-attachment/(?P[A-Za-z0-9._+-]+)$', views.liaison_delete_attachment), + url(r'^(?P\d+)/edit-attachment/(?P[0-9]+)$', views.liaison_edit_attachment), + url(r'^(?P\d+)/delete-attachment/(?P[0-9]+)$', views.liaison_delete_attachment), url(r'^(?P\d+)/history/$', views.liaison_history), url(r'^(?P\d+)/reply/$', views.liaison_reply), url(r'^(?P\d+)/resend/$', views.liaison_resend), @@ -37,4 +37,5 @@ url(r'^add/$', views.redirect_add), url(r'^for_approval/$', views.redirect_for_approval), url(r'^for_approval/(?P\d+)/$', views.redirect_for_approval), + url(r"^list_other_sdo/$", views.list_other_sdo), ] diff --git a/ietf/liaisons/utils.py b/ietf/liaisons/utils.py index df48831917..469bbc5c87 100644 --- a/ietf/liaisons/utils.py +++ b/ietf/liaisons/utils.py @@ -4,6 +4,21 @@ from ietf.liaisons.models import LiaisonStatement from ietf.ietfauth.utils import has_role, passes_test_decorator +# Roles allowed to create and manage outgoing liaison statements. +OUTGOING_LIAISON_ROLES = [ + "Area Director", + "IAB Chair", + "IETF Chair", + "Liaison Manager", + "Liaison Coordinator", + "Secretariat", + "WG Chair", + "WG Secretary", +] + +# Roles allowed to create and manage incoming liaison statements. +INCOMING_LIAISON_ROLES = ["Authorized Individual", "Liaison Manager", "Liaison Coordinator", "Secretariat"] + can_submit_liaison_required = passes_test_decorator( lambda u, *args, **kwargs: can_add_liaison(u), "Restricted to participants who are authorized to submit liaison statements on behalf of the various IETF entities") @@ -30,13 +45,13 @@ def can_edit_liaison(user, liaison): '''Returns True if user has edit / approval authority. True if: - - user is Secretariat + - user is Secretariat or Liaison Coordinator - liaison is outgoing and user has approval authority - user is liaison manager of all SDOs involved ''' if not user.is_authenticated: return False - if has_role(user, "Secretariat"): + if has_role(user, "Secretariat") or has_role(user, "Liaison Coordinator"): return True if liaison.is_outgoing() and liaison in approvable_liaison_statements(user): @@ -59,11 +74,10 @@ def get_person_for_user(user): return None def can_add_outgoing_liaison(user): - return has_role(user, ["Area Director","WG Chair","WG Secretary","IETF Chair","IAB Chair", - "IAB Executive Director","Liaison Manager","Secretariat"]) + return has_role(user, OUTGOING_LIAISON_ROLES) def can_add_incoming_liaison(user): - return has_role(user, ["Liaison Manager","Authorized Individual","Secretariat"]) + return has_role(user, INCOMING_LIAISON_ROLES) def can_add_liaison(user): return can_add_incoming_liaison(user) or can_add_outgoing_liaison(user) diff --git a/ietf/liaisons/views.py b/ietf/liaisons/views.py index a8e80a5194..59c6ea69fc 100644 --- a/ietf/liaisons/views.py +++ b/ietf/liaisons/views.py @@ -7,19 +7,17 @@ from django.contrib import messages from django.urls import reverse as urlreverse -from django.core.exceptions import ValidationError +from django.core.exceptions import ValidationError, ObjectDoesNotExist, PermissionDenied from django.core.validators import validate_email from django.db.models import Q, Prefetch -from django.http import HttpResponse +from django.http import Http404, HttpResponse from django.shortcuts import render, get_object_or_404, redirect import debug # pyflakes:ignore -from ietf.doc.models import Document from ietf.ietfauth.utils import role_required, has_role from ietf.group.models import Group, Role -from ietf.liaisons.models import (LiaisonStatement,LiaisonStatementEvent, - LiaisonStatementAttachment) +from ietf.liaisons.models import LiaisonStatement,LiaisonStatementEvent from ietf.liaisons.utils import (get_person_for_user, can_add_outgoing_liaison, can_add_incoming_liaison, can_edit_liaison,can_submit_liaison_required, can_add_liaison) @@ -29,13 +27,6 @@ from ietf.name.models import LiaisonStatementTagName from ietf.utils.response import permission_denied -EMAIL_ALIASES = { - 'IETFCHAIR':'The IETF Chair ', - 'IESG':'The IESG ', - 'IAB':'The IAB ', - 'IABCHAIR':'The IAB Chair ', - 'IABEXECUTIVEDIRECTOR':'The IAB Executive Director '} - # ------------------------------------------------- # Helper Functions # ------------------------------------------------- @@ -57,7 +48,7 @@ def _can_take_care(liaison, user): return False if user.is_authenticated: - if has_role(user, "Secretariat"): + if has_role(user, "Secretariat") or has_role(user, "Liaison Coordinator"): return True else: return _find_person_in_emails(liaison, get_person_for_user(user)) @@ -84,8 +75,6 @@ def _find_person_in_emails(liaison, person): return True elif addr in ('iab@iab.org', 'iab-chair@iab.org') and has_role(person.user, "IAB Chair"): return True - elif addr in ('execd@iab.org', ) and has_role(person.user, "IAB Executive Director"): - return True return False @@ -97,66 +86,6 @@ def contacts_from_roles(roles): emails = [ contact_email_from_role(r) for r in roles ] return ','.join(emails) -def get_cc(group): - '''Returns list of emails to use as CC for group. Simplified refactor of IETFHierarchy - get_cc() and get_from_cc() - ''' - emails = [] - - # role based CCs - if group.acronym in ('ietf','iesg'): - emails.append(EMAIL_ALIASES['IESG']) - emails.append(EMAIL_ALIASES['IETFCHAIR']) - elif group.acronym in ('iab'): - emails.append(EMAIL_ALIASES['IAB']) - emails.append(EMAIL_ALIASES['IABCHAIR']) - emails.append(EMAIL_ALIASES['IABEXECUTIVEDIRECTOR']) - elif group.type_id == 'area': - emails.append(EMAIL_ALIASES['IETFCHAIR']) - ad_roles = group.role_set.filter(name='ad') - emails.extend([ contact_email_from_role(r) for r in ad_roles ]) - elif group.type_id == 'wg': - ad_roles = group.parent.role_set.filter(name='ad') - emails.extend([ contact_email_from_role(r) for r in ad_roles ]) - chair_roles = group.role_set.filter(name='chair') - emails.extend([ contact_email_from_role(r) for r in chair_roles ]) - if group.list_email: - emails.append('{} Discussion List <{}>'.format(group.name,group.list_email)) - elif group.type_id == 'sdo': - liaiman_roles = group.role_set.filter(name='liaiman') - emails.extend([ contact_email_from_role(r) for r in liaiman_roles ]) - - # explicit CCs - liaison_cc_roles = group.role_set.filter(name='liaison_cc_contact') - emails.extend([ contact_email_from_role(r) for r in liaison_cc_roles ]) - - return emails - -def get_contacts_for_group(group): - '''Returns default contacts for groups as a comma separated string''' - # use explicit default contacts if defined - explicit_contacts = contacts_from_roles(group.role_set.filter(name='liaison_contact')) - if explicit_contacts: - return explicit_contacts - - # otherwise construct based on group type - contacts = [] - if group.type_id == 'area': - roles = group.role_set.filter(name='ad') - contacts.append(contacts_from_roles(roles)) - elif group.type_id == 'wg': - roles = group.role_set.filter(name='chair') - contacts.append(contacts_from_roles(roles)) - elif group.acronym == 'ietf': - contacts.append(EMAIL_ALIASES['IETFCHAIR']) - elif group.acronym == 'iab': - contacts.append(EMAIL_ALIASES['IABCHAIR']) - contacts.append(EMAIL_ALIASES['IABEXECUTIVEDIRECTOR']) - elif group.acronym == 'iesg': - contacts.append(EMAIL_ALIASES['IESG']) - - return ','.join(contacts) - def get_details_tabs(stmt, selected): return [ t + (t[0].lower() == selected.lower(),) @@ -171,7 +100,7 @@ def needs_approval(group,person): user = person.user if group.acronym in ('ietf','iesg') and has_role(user, 'IETF Chair'): return False - if group.acronym == 'iab' and (has_role(user,'IAB Chair') or has_role(user,'IAB Executive Director')): + if group.acronym == 'iab' and has_role(user,'IAB Chair'): return False if group.type_id == 'area' and group.role_set.filter(name='ad',person=person): return False @@ -189,23 +118,14 @@ def normalize_sort(request): return sort, order_by -def post_only(group,person): - '''Returns true if the user is restricted to post_only (vs. post_and_send) for this - group. This is for incoming liaison statements. - - Secretariat have full access. - - Authorized Individuals have full access for the group they are associated with - - Liaison Managers can post only - ''' - if group.type_id == 'sdo' and ( not(has_role(person.user,"Secretariat") or group.role_set.filter(name='auth',person=person)) ): - return True - else: - return False # ------------------------------------------------- # Ajax Functions # ------------------------------------------------- @can_submit_liaison_required def ajax_get_liaison_info(request): + from ietf.mailtrigger.utils import get_contacts_for_liaison_messages_for_group_primary,get_contacts_for_liaison_messages_for_group_secondary + '''Returns dictionary of info to update entry form given the groups that have been selected ''' @@ -222,20 +142,18 @@ def ajax_get_liaison_info(request): cc = [] does_need_approval = [] - can_post_only = [] to_contacts = [] response_contacts = [] - result = {'response_contacts':[],'to_contacts': [], 'cc': [], 'needs_approval': False, 'post_only': False, 'full_list': []} + result = {'response_contacts':[],'to_contacts': [], 'cc': [], 'needs_approval': False, 'full_list': []} for group in from_groups: - cc.extend(get_cc(group)) + cc.extend(get_contacts_for_liaison_messages_for_group_primary(group)) does_need_approval.append(needs_approval(group,person)) - can_post_only.append(post_only(group,person)) - response_contacts.append(get_contacts_for_group(group)) + response_contacts.append(get_contacts_for_liaison_messages_for_group_secondary(group)) for group in to_groups: - cc.extend(get_cc(group)) - to_contacts.append(get_contacts_for_group(group)) + cc.extend(get_contacts_for_liaison_messages_for_group_primary(group)) + to_contacts.append(get_contacts_for_liaison_messages_for_group_secondary(group)) # if there are from_groups and any need approval if does_need_approval: @@ -246,12 +164,15 @@ def ajax_get_liaison_info(request): else: does_need_approval = True - result.update({'error': False, - 'cc': list(set(cc)), - 'response_contacts':list(set(response_contacts)), - 'to_contacts': list(set(to_contacts)), - 'needs_approval': does_need_approval, - 'post_only': any(can_post_only)}) + result.update( + { + "error": False, + "cc": list(set(cc)), + "response_contacts": list(set(response_contacts)), + "to_contacts": list(set(to_contacts)), + "needs_approval": does_need_approval, + } + ) json_result = json.dumps(result) return HttpResponse(json_result, content_type='application/json') @@ -375,23 +296,29 @@ def liaison_history(request, object_id): def liaison_delete_attachment(request, object_id, attach_id): liaison = get_object_or_404(LiaisonStatement, pk=object_id) - attach = get_object_or_404(LiaisonStatementAttachment, pk=attach_id) + if not can_edit_liaison(request.user, liaison): permission_denied(request, "You are not authorized for this action.") - - # FIXME: this view should use POST instead of GET when deleting - attach.removed = True - attach.save() - - # create event - LiaisonStatementEvent.objects.create( - type_id='modified', - by=get_person_for_user(request.user), - statement=liaison, - desc='Attachment Removed: {}'.format(attach.document.title) - ) - messages.success(request, 'Attachment Deleted') - return redirect('ietf.liaisons.views.liaison_detail', object_id=liaison.pk) + else: + permission_denied(request, "This operation is temporarily unavailable. Ask the secretariat to mark the attachment as removed using the admin.") + + # The following will be replaced with a different approach in the next generation of the liaison tool + # attach = get_object_or_404(LiaisonStatementAttachment, pk=attach_id) + + # # FIXME: this view should use POST instead of GET when deleting + # attach.removed = True + # debug.say("Got here") + # attach.save() + + # # create event + # LiaisonStatementEvent.objects.create( + # type_id='modified', + # by=get_person_for_user(request.user), + # statement=liaison, + # desc='Attachment Removed: {}'.format(attach.document.title) + # ) + # messages.success(request, 'Attachment Deleted') + # return redirect('ietf.liaisons.views.liaison_detail', object_id=liaison.pk) def liaison_detail(request, object_id): liaison = get_object_or_404(LiaisonStatement, pk=object_id) @@ -402,22 +329,28 @@ def liaison_detail(request, object_id): if request.method == 'POST': - if request.POST.get('approved'): - liaison.change_state(state_id='approved',person=person) - liaison.change_state(state_id='posted',person=person) - send_liaison_by_email(request, liaison) - messages.success(request,'Liaison Statement Approved and Posted') - elif request.POST.get('dead'): - liaison.change_state(state_id='dead',person=person) - messages.success(request,'Liaison Statement Killed') - elif request.POST.get('resurrect'): - liaison.change_state(state_id='pending',person=person) - messages.success(request,'Liaison Statement Resurrected') - elif request.POST.get('do_action_taken') and can_take_care: + if request.POST.get('do_action_taken') and can_take_care: liaison.tags.remove('required') liaison.tags.add('taken') can_take_care = False messages.success(request,'Action handled') + else: + if can_edit: + if request.POST.get('approved'): + liaison.change_state(state_id='approved',person=person) + liaison.change_state(state_id='posted',person=person) + send_liaison_by_email(request, liaison) + messages.success(request,'Liaison Statement Approved and Posted') + elif request.POST.get('dead'): + liaison.change_state(state_id='dead',person=person) + messages.success(request,'Liaison Statement Killed') + elif request.POST.get('resurrect'): + liaison.change_state(state_id='pending',person=person) + messages.success(request,'Liaison Statement Resurrected') + else: + pass + else: + raise PermissionDenied() relations_by = [i.target for i in liaison.source_of_set.filter(target__state__slug='posted')] relations_to = [i.source for i in liaison.target_of_set.filter(source__state__slug='posted')] @@ -441,7 +374,11 @@ def liaison_edit(request, object_id): def liaison_edit_attachment(request, object_id, doc_id): '''Edit the Liaison Statement attachment title''' liaison = get_object_or_404(LiaisonStatement, pk=object_id) - doc = get_object_or_404(Document, pk=doc_id) + try: + doc = liaison.attachments.get(pk=doc_id) + except ObjectDoesNotExist: + raise Http404 + if not can_edit_liaison(request.user, liaison): permission_denied(request, "You are not authorized for this action.") @@ -572,3 +509,17 @@ def liaison_resend(request, object_id): messages.success(request,'Liaison Statement resent') return redirect('ietf.liaisons.views.liaison_list') + +@role_required("Secretariat", "IAB", "Liaison Coordinator", "Liaison Manager") +def list_other_sdo(request): + def _sdo_order_key(obj:Group)-> tuple[str,str]: + state_order = { + "active" : "a", + "conclude": "b", + } + return (state_order.get(obj.state.slug,f"c{obj.state.slug}"), obj.acronym) + + sdos = sorted(list(Group.objects.filter(type="sdo")),key = _sdo_order_key) + for sdo in sdos: + sdo.liaison_managers =[r.person for r in sdo.role_set.filter(name="liaiman")] + return render(request,"liaisons/list_other_sdo.html",dict(sdos=sdos)) diff --git a/ietf/liaisons/widgets.py b/ietf/liaisons/widgets.py index 74368e83f2..48db8af0a3 100644 --- a/ietf/liaisons/widgets.py +++ b/ietf/liaisons/widgets.py @@ -26,7 +26,9 @@ def render(self, name, value, **kwargs): html += '%s' % conditional_escape(i) required_str = 'Please fill in %s to attach a new file' % conditional_escape(self.required_label) html += '%s' % conditional_escape(required_str) - html += '' % conditional_escape(self.label) + html += ''.format( + f"id_{name}", conditional_escape(self.label) + ) return mark_safe(html) diff --git a/ietf/mailinglists/admin.py b/ietf/mailinglists/admin.py index 90efaf9c93..081ee6477c 100644 --- a/ietf/mailinglists/admin.py +++ b/ietf/mailinglists/admin.py @@ -2,20 +2,15 @@ from django.contrib import admin -from ietf.mailinglists.models import List, Subscribed, Allowlisted +from ietf.mailinglists.models import NonWgMailingList, Allowlisted -class ListAdmin(admin.ModelAdmin): - list_display = ('id', 'name', 'description', 'advertised') - search_fields = ('name',) -admin.site.register(List, ListAdmin) -class SubscribedAdmin(admin.ModelAdmin): - list_display = ('id', 'time', 'email') - raw_id_fields = ('lists',) - search_fields = ('email',) -admin.site.register(Subscribed, SubscribedAdmin) +class NonWgMailingListAdmin(admin.ModelAdmin): + list_display = ('id', 'name', 'domain', 'description') + search_fields = ('name', 'domain') +admin.site.register(NonWgMailingList, NonWgMailingListAdmin) class AllowlistedAdmin(admin.ModelAdmin): diff --git a/ietf/mailinglists/factories.py b/ietf/mailinglists/factories.py index bc6b2b8203..3be5770d76 100644 --- a/ietf/mailinglists/factories.py +++ b/ietf/mailinglists/factories.py @@ -3,16 +3,15 @@ import factory -import random -from ietf.mailinglists.models import List +from ietf.mailinglists.models import NonWgMailingList -class ListFactory(factory.django.DjangoModelFactory): +class NonWgMailingListFactory(factory.django.DjangoModelFactory): class Meta: - model = List + model = NonWgMailingList name = factory.Sequence(lambda n: "list-name-%s" % n) + domain = factory.Sequence(lambda n: "domain-%s.org" % n) description = factory.Faker('sentence', nb_words=10) - advertised = factory.LazyAttribute(lambda obj: random.randint(0, 1)) diff --git a/ietf/mailinglists/management/commands/import_mailman_listinfo.py b/ietf/mailinglists/management/commands/import_mailman_listinfo.py deleted file mode 100644 index 8d23964112..0000000000 --- a/ietf/mailinglists/management/commands/import_mailman_listinfo.py +++ /dev/null @@ -1,130 +0,0 @@ -# Copyright The IETF Trust 2016-2019, All Rights Reserved - -import json -import sys -import subprocess -import time -from textwrap import dedent - -import debug # pyflakes:ignore - -from pathlib import Path - -from django.conf import settings -from django.core.management.base import BaseCommand -from django.core.exceptions import MultipleObjectsReturned - - -from ietf.mailinglists.models import List, Subscribed -from ietf.utils.log import log - -mark = time.time() - -def import_mailman_listinfo(verbosity=0): - def note(msg): - if verbosity > 2: - sys.stdout.write(msg) - sys.stdout.write('\n') - def log_time(msg): - global mark - if verbosity > 1: - t = time.time() - log(msg+' (%.1fs)'% (t-mark)) - mark = t - - cmd = str(Path(settings.BASE_DIR) / "bin" / "mailman_listinfo.py") - result = subprocess.run([cmd], capture_output=True) - if result.stderr: - log("Error exporting information from mailmain") - log(result.stderr) - return - mailman_export = json.loads(result.stdout) - - names = sorted(mailman_export.keys()) - addr_max_length = Subscribed._meta.get_field('email').max_length - - subscribed = { l.name: set(l.subscribed_set.values_list('email', flat=True)) for l in List.objects.all().prefetch_related('subscribed_set') } - - for name in names: - note("List: %s" % mailman_export[name]['internal_name']) - - lists = List.objects.filter(name=mailman_export[name]['real_name']) - if lists.count() > 1: - # Arbitrary choice; we'll update the remaining item next - for item in lists[1:]: - item.delete() - mmlist, created = List.objects.get_or_create(name=mailman_export[name]['real_name']) - dirty = False - desc = mailman_export[name]['description'][:256] - if mmlist.description != desc: - mmlist.description = desc - dirty = True - if mmlist.advertised != mailman_export[name]['advertised']: - mmlist.advertised = mailman_export[name]['advertised'] - dirty = True - if dirty: - mmlist.save() - # The following calls return lowercased addresses - if mailman_export[name]['advertised']: - members = set(mailman_export[name]['members']) - if not mailman_export[name]['real_name'] in subscribed: - # 2022-7-29: lots of these going into the logs but being ignored... - # log("Note: didn't find '%s' in the dictionary of subscriptions" % mailman_export[name]['real_name']) - continue - known = subscribed[mailman_export[name]['real_name']] - log_time(" Fetched known list members from database") - to_remove = known - members - to_add = members - known - for addr in to_remove: - note(" Removing subscription: %s" % (addr)) - old = Subscribed.objects.get(email=addr) # Intentionally leaving this as case-sensitive in postgres - old.lists.remove(mmlist) - if old.lists.count() == 0: - note(" Removing address with no subscriptions: %s" % (addr)) - old.delete() - if to_remove: - log(" Removed %s addresses from %s" % (len(to_remove), name)) - for addr in to_add: - if len(addr) > addr_max_length: - sys.stderr.write(" ** Email address subscribed to '%s' too long for table: <%s>\n" % (name, addr)) - continue - note(" Adding subscription: %s" % (addr)) - try: - new, created = Subscribed.objects.get_or_create(email=addr) # Intentionally leaving this as case-sensitive in postgres - except MultipleObjectsReturned as e: - sys.stderr.write(" ** Error handling %s in %s: %s\n" % (addr, name, e)) - continue - new.lists.add(mmlist) - if to_add: - log(" Added %s addresses to %s" % (len(to_add), name)) - log("Completed import of list info from Mailman") - -class Command(BaseCommand): - """ - Import list information from Mailman. - - Import announced list names, descriptions, and subscribers, by calling the - appropriate Mailman functions and adding entries to the database. - - Run this from cron regularly, with sufficient permissions to access the - mailman database files. - - """ - - help = dedent(__doc__).strip() - - #option_list = BaseCommand.option_list + ( ) - - - def handle(self, *filenames, **options): - """ - - * Import announced lists, with appropriate meta-information. - - * For each list, import the members. - - """ - - verbosity = int(options.get('verbosity')) - - import_mailman_listinfo(verbosity) diff --git a/ietf/mailinglists/migrations/0002_nonwgmailinglist.py b/ietf/mailinglists/migrations/0002_nonwgmailinglist.py new file mode 100644 index 0000000000..dfc941db90 --- /dev/null +++ b/ietf/mailinglists/migrations/0002_nonwgmailinglist.py @@ -0,0 +1,628 @@ +# Copyright The IETF Trust 2024, All Rights Reserved + +from django.db import migrations, models + + +def forward(apps, schema_editor): + NonWgMailingList = apps.get_model("mailinglists", "NonWgMailingList") + List = apps.get_model("mailinglists", "List") + + for l in List.objects.filter( + pk__in=[ + 10754, + 10769, + 10770, + 10768, + 10787, + 10785, + 10791, + 10786, + 10816, + 10817, + 10819, + 10818, + 10922, + 10923, + 10921, + 10940, + 10941, + 10942, + 572, + 10297, + 182, + 43, + 10704, + 10314, + 201, + 419, + 282, + 149, + 223, + 10874, + 10598, + 10639, + 10875, + 10737, + 105, + 65, + 10781, + 10771, + 10946, + 518, + 421, + 214, + 285, + 393, + 445, + 553, + 183, + 10725, + 33, + 10766, + 114, + 417, + 10789, + 10876, + 4244, + 10705, + 10706, + 10878, + 10324, + 10879, + 10642, + 10821, + 547, + 532, + 10636, + 10592, + 327, + 248, + 10697, + 288, + 346, + 10731, + 10955, + 10857, + 446, + 55, + 10799, + 10800, + 10801, + 10612, + 73, + 3, + 358, + 9640, + 10868, + 378, + 462, + 6595, + 10914, + 10915, + 197, + 63, + 558, + 10824, + 124, + 10881, + 177, + 312, + 252, + 185, + 523, + 4572, + 10618, + 206, + 68, + 10859, + 560, + 513, + 246, + 7817, + 148, + 10864, + 10589, + 10773, + 10748, + 364, + 311, + 10302, + 10272, + 10929, + 171, + 10865, + 10919, + 377, + 469, + 467, + 411, + 505, + 6318, + 10811, + 10304, + 10882, + 10845, + 568, + 10883, + 4774, + 264, + 10779, + 10884, + 10303, + 409, + 10590, + 451, + 10749, + 10765, + 486, + 519, + 10593, + 10313, + 550, + 10707, + 307, + 10861, + 10654, + 10708, + 10275, + 134, + 460, + 10911, + 10574, + 10885, + 10814, + 10676, + 10747, + 10305, + 10688, + 36, + 10844, + 10620, + 458, + 10282, + 10594, + 10752, + 389, + 296, + 10684, + 48, + 533, + 443, + 10739, + 491, + 139, + 461, + 10690, + 424, + 290, + 336, + 31, + 10709, + 382, + 10866, + 10724, + 539, + 10710, + 559, + 10609, + 74, + 10582, + 133, + 10621, + 34, + 10596, + 442, + 13, + 56, + 128, + 323, + 10285, + 80, + 315, + 3520, + 10949, + 10950, + 189, + 2599, + 10822, + 164, + 10267, + 10286, + 464, + 440, + 254, + 262, + 10943, + 465, + 75, + 179, + 162, + 457, + 10572, + 372, + 452, + 10273, + 88, + 366, + 331, + 140, + 407, + 416, + 91, + 10632, + 542, + 151, + 117, + 431, + 10628, + 10271, + 14, + 540, + 278, + 352, + 159, + 10851, + 9981, + 10694, + 10619, + 10732, + 320, + 348, + 338, + 349, + 10678, + 468, + 293, + 350, + 402, + 57, + 524, + 141, + 71, + 67, + 508, + 7828, + 10268, + 10631, + 10713, + 10889, + 345, + 78, + 342, + 190, + 10869, + 46, + 334, + 255, + 5823, + 400, + 10867, + 23, + 10666, + 10685, + 405, + 2801, + 92, + 137, + 10640, + 10656, + 104, + 123, + 10643, + 10891, + 466, + 10567, + 10318, + 526, + 30, + 222, + 194, + 10735, + 10714, + 247, + 493, + 1162, + 414, + 10648, + 10677, + 126, + 16, + 422, + 271, + 295, + 81, + 10634, + 544, + 10850, + 426, + 573, + 353, + 10829, + 538, + 10913, + 10566, + 167, + 10675, + 272, + 10673, + 10767, + 528, + 284, + 564, + 268, + 10825, + 231, + 520, + 10645, + 10872, + 515, + 10956, + 10947, + 569, + 233, + 10952, + 195, + 10938, + 2809, + 10591, + 10665, + 9639, + 10775, + 10760, + 10715, + 10716, + 10667, + 361, + 184, + 10935, + 10957, + 10944, + 94, + 449, + 525, + 1962, + 10300, + 10894, + 9156, + 10774, + 256, + 289, + 218, + 187, + 40, + 10777, + 10761, + 10670, + 249, + 10764, + 420, + 548, + 232, + 410, + 196, + 72, + 335, + 70, + 146, + 10287, + 10299, + 10311, + 10895, + 10617, + 531, + 343, + 10934, + 10933, + 10597, + 158, + 10600, + 10692, + 8630, + 556, + 324, + 11, + 10784, + 498, + 10772, + 478, + 10833, + 10691, + 391, + 10565, + 10669, + 113, + 110, + 7831, + 10855, + 10312, + 10315, + 10896, + 10672, + 10306, + 438, + 395, + 82, + 10599, + 10953, + 10858, + 10807, + 10717, + 310, + 10808, + 119, + 10595, + 10718, + 10317, + 10898, + 454, + 427, + 10583, + 10916, + 403, + 10843, + 10899, + 291, + 10812, + 10900, + 10794, + 341, + 121, + 230, + 136, + 166, + 394, + 234, + 10901, + 2466, + 10573, + 10939, + 221, + 490, + 10820, + 10873, + 10792, + 10870, + 10793, + 10904, + 181, + 10693, + 482, + 10611, + 125, + 10568, + 10788, + 211, + 10756, + 10719, + 100, + 228, + 5833, + 251, + 122, + 39, + 534, + 437, + 504, + 10613, + 439, + 306, + 10863, + 10823, + 10926, + 76, + 227, + 59, + 42, + 455, + 10927, + 10928, + 204, + 430, + 10720, + 267, + 396, + 10849, + 10308, + 281, + 10905, + 10736, + 168, + 153, + 385, + 89, + 529, + 412, + 215, + 484, + 10951, + 66, + 173, + 10633, + 10681, + 3613, + 10274, + 10750, + 367, + 387, + 10832, + 35, + 147, + 10325, + 10671, + 565, + 313, + 10871, + 10751, + 37, + 10936, + 10937, + 287, + 496, + 244, + 10841, + 10683, + 10906, + 10584, + 479, + 10856, + 163, + 10910, + 257, + 276, + 10840, + 10689, + 365, + 10847, + 99, + 77, + 435, + 213, + 15, + 10932, + 58, + 10722, + 131, + 363, + 10674, + 322, + 180, + 10917, + 10918, + 10738, + 10954, + 10581, + 208, + 337, + 4, + 571, + 10668, + 10291, + ] + ): + NonWgMailingList.objects.create(name=l.name, description=l.description) + +class Migration(migrations.Migration): + + dependencies = [ + ("mailinglists", "0001_initial"), + ] + + operations = [ + migrations.CreateModel( + name="NonWgMailingList", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("name", models.CharField(max_length=32)), + ("description", models.CharField(max_length=256)), + ], + ), + migrations.RunPython(forward), + ] diff --git a/ietf/mailinglists/migrations/0003_remove_subscribed_lists_delete_list_and_more.py b/ietf/mailinglists/migrations/0003_remove_subscribed_lists_delete_list_and_more.py new file mode 100644 index 0000000000..6171136b2a --- /dev/null +++ b/ietf/mailinglists/migrations/0003_remove_subscribed_lists_delete_list_and_more.py @@ -0,0 +1,23 @@ +# Generated by Django 4.2.9 on 2024-02-02 23:04 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("mailinglists", "0002_nonwgmailinglist"), + ] + + operations = [ + migrations.RemoveField( + model_name="subscribed", + name="lists", + ), + migrations.DeleteModel( + name="List", + ), + migrations.DeleteModel( + name="Subscribed", + ), + ] diff --git a/ietf/mailinglists/migrations/0004_nonwgmailinglist_domain.py b/ietf/mailinglists/migrations/0004_nonwgmailinglist_domain.py new file mode 100644 index 0000000000..b977313a87 --- /dev/null +++ b/ietf/mailinglists/migrations/0004_nonwgmailinglist_domain.py @@ -0,0 +1,59 @@ +# Generated by Django 4.2.13 on 2024-06-05 17:51 + +from django.db import migrations, models +from django.db.models.functions import Lower + +IAB_NAMES = ["iab", "iab-stream"] +RFCED_NAMES = [ + "auth48archive", + "rfc-dist", + "rfc-editor-rfi", + "rfc-interest", + "rpat", + "rsab", +] +IRTF_NAMES = [ + "anrp-select", + "anrw-sc", + "anrw-tpc", + "crypto-panel", + "dtn-interest", + "irsg", + "irtf-announce", + "smart", + "teaching", + "travel-grants-commitee", +] + + +def forward(apps, schema_editor): + NonWgMailingList = apps.get_model("mailinglists", "NonWgMailingList") + NonWgMailingList.objects.annotate(lowername=Lower("name")).filter( + lowername__in=IAB_NAMES + ).update(domain="iab.org") + NonWgMailingList.objects.annotate(lowername=Lower("name")).filter( + lowername__in=IRTF_NAMES + ).update(domain="irtf.org") + NonWgMailingList.objects.annotate(lowername=Lower("name")).filter( + lowername__in=RFCED_NAMES + ).update(domain="rfc-editor.org") + + +def reverse(apps, schema_editor): + pass + + +class Migration(migrations.Migration): + + dependencies = [ + ("mailinglists", "0003_remove_subscribed_lists_delete_list_and_more"), + ] + + operations = [ + migrations.AddField( + model_name="nonwgmailinglist", + name="domain", + field=models.CharField(default="ietf.org", max_length=32), + ), + migrations.RunPython(forward, reverse), + ] diff --git a/ietf/mailinglists/models.py b/ietf/mailinglists/models.py index 21f3a76710..828d3823a4 100644 --- a/ietf/mailinglists/models.py +++ b/ietf/mailinglists/models.py @@ -9,25 +9,21 @@ from ietf.person.models import Person from ietf.utils.models import ForeignKey -class List(models.Model): + +# NonWgMailingList is a temporary bridging class to hold information known about mailman2 +# while decoupling from mailman2 until we integrate with mailman3 +class NonWgMailingList(models.Model): name = models.CharField(max_length=32) + domain = models.CharField(max_length=32, default="ietf.org") description = models.CharField(max_length=256) - advertised = models.BooleanField(default=True) def __str__(self): - return "" % self.name + return "" % self.name def info_url(self): - return settings.MAILING_LIST_INFO_URL % {'list_addr': self.name } - -class Subscribed(models.Model): - time = models.DateTimeField(auto_now_add=True) - email = models.CharField(max_length=128, validators=[validate_email]) - lists = models.ManyToManyField(List) - def __str__(self): - return "" % (self.email, self.time) - class Meta: - verbose_name_plural = "Subscribed" + return settings.MAILING_LIST_INFO_URL % {'list_addr': self.name.lower(), 'domain': self.domain.lower() } +# Allowlisted is unused, but is not being dropped until its human-curated content +# is archived outside this database. class Allowlisted(models.Model): time = models.DateTimeField(auto_now_add=True) email = models.CharField("Email address", max_length=64, validators=[validate_email]) diff --git a/ietf/mailinglists/resources.py b/ietf/mailinglists/resources.py index 018a8327b1..4d1713b7b6 100644 --- a/ietf/mailinglists/resources.py +++ b/ietf/mailinglists/resources.py @@ -11,7 +11,7 @@ from ietf import api from ietf.api import ToOneField # pyflakes:ignore -from ietf.mailinglists.models import Allowlisted, List, Subscribed +from ietf.mailinglists.models import Allowlisted, NonWgMailingList from ietf.person.resources import PersonResource @@ -31,34 +31,20 @@ class Meta: } api.mailinglists.register(AllowlistedResource()) -class ListResource(ModelResource): +class NonWgMailingListResource(ModelResource): class Meta: - queryset = List.objects.all() + queryset = NonWgMailingList.objects.all() serializer = api.Serializer() cache = SimpleCache() - #resource_name = 'list' + #resource_name = 'nonwgmailinglist' ordering = ['id', ] filtering = { "id": ALL, "name": ALL, + "domain": ALL, "description": ALL, - "advertised": ALL, } -api.mailinglists.register(ListResource()) +api.mailinglists.register(NonWgMailingListResource()) + -class SubscribedResource(ModelResource): - lists = ToManyField(ListResource, 'lists', null=True) - class Meta: - queryset = Subscribed.objects.all() - serializer = api.Serializer() - cache = SimpleCache() - #resource_name = 'subscribed' - ordering = ['id', ] - filtering = { - "id": ALL, - "time": ALL, - "email": ALL, - "lists": ALL_WITH_RELATIONS, - } -api.mailinglists.register(SubscribedResource()) diff --git a/ietf/mailinglists/tests.py b/ietf/mailinglists/tests.py index 0c983da80c..8c5a550dfc 100644 --- a/ietf/mailinglists/tests.py +++ b/ietf/mailinglists/tests.py @@ -9,7 +9,7 @@ import debug # pyflakes:ignore from ietf.group.factories import GroupFactory -from ietf.mailinglists.factories import ListFactory +from ietf.mailinglists.factories import NonWgMailingListFactory from ietf.utils.test_utils import TestCase @@ -32,23 +32,15 @@ def test_groups(self): def test_nonwg(self): - groups = list() - groups.append(GroupFactory(type_id='wg', acronym='mars', list_archive='https://ietf.org/mars')) - groups.append(GroupFactory(type_id='wg', acronym='ames', state_id='conclude', list_archive='https://ietf.org/ames')) - groups.append(GroupFactory(type_id='wg', acronym='newstuff', state_id='bof', list_archive='https://ietf.org/newstuff')) - groups.append(GroupFactory(type_id='rg', acronym='research', list_archive='https://irtf.org/research')) - lists = ListFactory.create_batch(7) + + lists = NonWgMailingListFactory.create_batch(7) url = urlreverse("ietf.mailinglists.views.nonwg") r = self.client.get(url) + q = PyQuery(r.content) for l in lists: - if l.advertised: self.assertContains(r, l.name) self.assertContains(r, l.description) - else: - self.assertNotContains(r, l.name, html=True) - self.assertNotContains(r, l.description, html=True) + self.assertNotEqual(q(f"a[href=\"{l.info_url()}\"]"), []) - for g in groups: - self.assertNotContains(r, g.acronym, html=True) diff --git a/ietf/mailinglists/views.py b/ietf/mailinglists/views.py index 51c31c546f..460f30e164 100644 --- a/ietf/mailinglists/views.py +++ b/ietf/mailinglists/views.py @@ -1,33 +1,25 @@ # Copyright The IETF Trust 2007-2022, All Rights Reserved -import re - from django.shortcuts import render -import debug # pyflakes:ignore +import debug # pyflakes:ignore from ietf.group.models import Group -from ietf.mailinglists.models import List +from ietf.mailinglists.models import NonWgMailingList + def groups(request): - groups = Group.objects.filter(type__features__acts_like_wg=True, list_archive__startswith='http').exclude(state__in=('bof', 'conclude')).order_by("acronym") + groups = ( + Group.objects.filter( + type__features__acts_like_wg=True, list_archive__startswith="http" + ) + .exclude(state__in=("bof", "conclude")) + .order_by("acronym") + ) + + return render(request, "mailinglists/group_archives.html", {"groups": groups}) - return render(request, "mailinglists/group_archives.html", { "groups": groups } ) def nonwg(request): - groups = Group.objects.filter(type__features__acts_like_wg=True).exclude(state__in=['bof']).order_by("acronym") - - #urls = [ g.list_archive for g in groups if '.ietf.org' in g.list_archive ] - - wg_lists = set() - for g in groups: - wg_lists.add(g.acronym) - match = re.search(r'^(https?://mailarchive.ietf.org/arch/(browse/|search/\?email-list=))(?P[^/]*)/?$', g.list_archive) - if match: - wg_lists.add(match.group('name').lower()) - - lists = List.objects.filter(advertised=True) - #debug.show('lists.count()') - lists = lists.exclude(name__in=wg_lists).order_by('name') - #debug.show('lists.count()') - return render(request, "mailinglists/nonwg.html", { "lists": lists } ) + lists = NonWgMailingList.objects.order_by("name") + return render(request, "mailinglists/nonwg.html", {"lists": lists}) diff --git a/ietf/mailtrigger/admin.py b/ietf/mailtrigger/admin.py index a60fd5b072..8c73f2ae02 100644 --- a/ietf/mailtrigger/admin.py +++ b/ietf/mailtrigger/admin.py @@ -1,9 +1,10 @@ -# Copyright The IETF Trust 2015-2019, All Rights Reserved +# Copyright The IETF Trust 2015-2025, All Rights Reserved from django.contrib import admin +from simple_history.admin import SimpleHistoryAdmin from ietf.mailtrigger.models import MailTrigger, Recipient -class RecipientAdmin(admin.ModelAdmin): +class RecipientAdmin(SimpleHistoryAdmin): list_display = [ 'slug', 'desc', 'template', 'has_code', ] def has_code(self, obj): return hasattr(obj,'gather_%s'%obj.slug) @@ -11,7 +12,7 @@ def has_code(self, obj): admin.site.register(Recipient, RecipientAdmin) -class MailTriggerAdmin(admin.ModelAdmin): +class MailTriggerAdmin(SimpleHistoryAdmin): list_display = [ 'slug', 'desc', ] filter_horizontal = [ 'to', 'cc', ] admin.site.register(MailTrigger, MailTriggerAdmin) diff --git a/ietf/mailtrigger/forms.py b/ietf/mailtrigger/forms.py index 366c429d8c..8d13c5edf3 100644 --- a/ietf/mailtrigger/forms.py +++ b/ietf/mailtrigger/forms.py @@ -11,6 +11,7 @@ class CcSelectForm(forms.Form): expansions = dict() # type: Dict[str, List[str]] cc_choices = forms.MultipleChoiceField( + required=False, label='Cc', choices=[], widget=forms.CheckboxSelectMultiple(), diff --git a/ietf/mailtrigger/migrations/0005_rfc_recipients.py b/ietf/mailtrigger/migrations/0005_rfc_recipients.py new file mode 100644 index 0000000000..dee49d9133 --- /dev/null +++ b/ietf/mailtrigger/migrations/0005_rfc_recipients.py @@ -0,0 +1,25 @@ +# Copyright The IETF Trust 2023, All Rights Reserved + +from django.db import migrations + + +def forward(apps, schema_editor): + Recipient = apps.get_model("mailtrigger", "Recipient") + Recipient.objects.filter(slug="doc_authors").update( + template='{% if doc.type_id == "draft" or doc.type_id == "rfc" %}<{{doc.name}}@ietf.org>{% endif %}' + ) + + +def reverse(apps, schema_editor): + Recipient = apps.get_model("mailtrigger", "Recipient") + Recipient.objects.filter(slug="doc_authors").update( + template='{% if doc.type_id == "draft" %}<{{doc.name}}@ietf.org>{% endif %}' + ) + + +class Migration(migrations.Migration): + dependencies = [ + ("mailtrigger", "0004_slides_approved"), + ] + + operations = [migrations.RunPython(forward, reverse)] diff --git a/ietf/mailtrigger/migrations/0006_call_for_adoption_and_last_call_issued.py b/ietf/mailtrigger/migrations/0006_call_for_adoption_and_last_call_issued.py new file mode 100644 index 0000000000..7adad150eb --- /dev/null +++ b/ietf/mailtrigger/migrations/0006_call_for_adoption_and_last_call_issued.py @@ -0,0 +1,43 @@ +# Copyright The IETF Trust 2023, All Rights Reserved + +from django.db import migrations + + +def forward(apps, schema_editor): + MailTrigger = apps.get_model("mailtrigger", "MailTrigger") + Recipient = apps.get_model("mailtrigger", "Recipient") + recipients = list( + Recipient.objects.filter( + slug__in=( + "doc_group_mail_list", + "doc_authors", + "doc_group_chairs", + "doc_shepherd", + ) + ) + ) + call_for_adoption = MailTrigger.objects.create( + slug="doc_wg_call_for_adoption_issued", + desc="Recipients when a working group call for adoption is issued", + ) + call_for_adoption.to.add(*recipients) + wg_last_call = MailTrigger.objects.create( + slug="doc_wg_last_call_issued", + desc="Recipients when a working group last call is issued", + ) + wg_last_call.to.add(*recipients) + + +def reverse(apps, schema_editor): + MailTrigger = apps.get_model("mailtrigger", "MailTrigger") + MailTrigger.objects.filter( + slug_in=("doc_wg_call_for_adoption_issued", "doc_wg_last_call_issued") + ).delete() + + +class Migration(migrations.Migration): + dependencies = [ + ("mailtrigger", "0005_rfc_recipients"), + ] + + operations = [migrations.RunPython(forward, reverse)] diff --git a/ietf/mailtrigger/migrations/0007_historicalrecipient_historicalmailtrigger.py b/ietf/mailtrigger/migrations/0007_historicalrecipient_historicalmailtrigger.py new file mode 100644 index 0000000000..d23b72d737 --- /dev/null +++ b/ietf/mailtrigger/migrations/0007_historicalrecipient_historicalmailtrigger.py @@ -0,0 +1,122 @@ +# Copyright The IETF Trust 2025, All Rights Reserved +from io import StringIO + +from django.conf import settings +from django.core import management +from django.db import migrations, models +import django.db.models.deletion +import simple_history.models + +from ietf.utils.log import log + + +def forward(apps, schema_editor): + # Fill in history for existing data using the populate_history management command + captured_stdout = StringIO() + captured_stderr = StringIO() + try: + management.call_command( + "populate_history", + "mailtrigger.MailTrigger", + "mailtrigger.Recipient", + stdout=captured_stdout, + stderr=captured_stderr, + ) + except management.CommandError as err: + log( + "Failed to populate history for mailtrigger models.\n" + "\n" + f"stdout:\n{captured_stdout.getvalue() or ''}\n" + "\n" + f"stderr:\n{captured_stderr.getvalue() or ''}\n" + ) + raise RuntimeError("Failed to populate history for mailtrigger models") from err + log( + "Populated history for mailtrigger models.\n" + "\n" + f"stdout:\n{captured_stdout.getvalue() or ''}\n" + "\n" + f"stderr:\n{captured_stderr.getvalue() or ''}\n" + ) + + +def reverse(apps, schema_editor): + pass # nothing to do + + +class Migration(migrations.Migration): + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ("mailtrigger", "0006_call_for_adoption_and_last_call_issued"), + ] + + operations = [ + migrations.CreateModel( + name="HistoricalRecipient", + fields=[ + ("slug", models.CharField(db_index=True, max_length=32)), + ("desc", models.TextField(blank=True)), + ("template", models.TextField(blank=True, null=True)), + ("history_id", models.AutoField(primary_key=True, serialize=False)), + ("history_date", models.DateTimeField(db_index=True)), + ("history_change_reason", models.CharField(max_length=100, null=True)), + ( + "history_type", + models.CharField( + choices=[("+", "Created"), ("~", "Changed"), ("-", "Deleted")], + max_length=1, + ), + ), + ( + "history_user", + models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="+", + to=settings.AUTH_USER_MODEL, + ), + ), + ], + options={ + "verbose_name": "historical recipient", + "verbose_name_plural": "historical recipients", + "ordering": ("-history_date", "-history_id"), + "get_latest_by": ("history_date", "history_id"), + }, + bases=(simple_history.models.HistoricalChanges, models.Model), + ), + migrations.CreateModel( + name="HistoricalMailTrigger", + fields=[ + ("slug", models.CharField(db_index=True, max_length=64)), + ("desc", models.TextField(blank=True)), + ("history_id", models.AutoField(primary_key=True, serialize=False)), + ("history_date", models.DateTimeField(db_index=True)), + ("history_change_reason", models.CharField(max_length=100, null=True)), + ( + "history_type", + models.CharField( + choices=[("+", "Created"), ("~", "Changed"), ("-", "Deleted")], + max_length=1, + ), + ), + ( + "history_user", + models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="+", + to=settings.AUTH_USER_MODEL, + ), + ), + ], + options={ + "verbose_name": "historical mail trigger", + "verbose_name_plural": "historical mail triggers", + "ordering": ("-history_date", "-history_id"), + "get_latest_by": ("history_date", "history_id"), + }, + bases=(simple_history.models.HistoricalChanges, models.Model), + ), + migrations.RunPython(forward, reverse), + ] diff --git a/ietf/mailtrigger/migrations/0008_liaison_statement_incoming_and_outgoing_posted.py b/ietf/mailtrigger/migrations/0008_liaison_statement_incoming_and_outgoing_posted.py new file mode 100644 index 0000000000..189a783a2e --- /dev/null +++ b/ietf/mailtrigger/migrations/0008_liaison_statement_incoming_and_outgoing_posted.py @@ -0,0 +1,72 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +from django.db import migrations + + +def forward(apps, schema_editor): + Mailtrigger = apps.get_model("mailtrigger", "MailTrigger") + Recipient = apps.get_model("mailtrigger", "Recipient") + recipients_to = Recipient.objects.get(pk="liaison_to_contacts") + recipients_cc = list( + Recipient.objects.filter( + slug__in=( + "liaison_cc", + "liaison_coordinators", + "liaison_response_contacts", + "liaison_technical_contacts", + ) + ) + ) + recipient_from = Recipient.objects.get(pk="liaison_from_contact") + + liaison_posted_outgoing = Mailtrigger.objects.create( + slug="liaison_statement_posted_outgoing", + desc="Recipients for a message when a new outgoing liaison statement is posted", + ) + liaison_posted_outgoing.to.add(recipients_to) + liaison_posted_outgoing.cc.add(*recipients_cc) + liaison_posted_outgoing.cc.add(recipient_from) + + liaison_posted_incoming = Mailtrigger.objects.create( + slug="liaison_statement_posted_incoming", + desc="Recipients for a message when a new incoming liaison statement is posted", + ) + liaison_posted_incoming.to.add(recipients_to) + liaison_posted_incoming.cc.add(*recipients_cc) + + Mailtrigger.objects.filter(slug=("liaison_statement_posted")).delete() + + +def reverse(apps, schema_editor): + Mailtrigger = apps.get_model("mailtrigger", "MailTrigger") + Recipient = apps.get_model("mailtrigger", "Recipient") + + Mailtrigger.objects.filter( + slug__in=( + "liaison_statement_posted_outgoing", + "liaison_statement_posted_incoming", + ) + ).delete() + + liaison_statement_posted = Mailtrigger.objects.create( + slug="liaison_statement_posted", + desc="Recipients for a message when a new liaison statement is posted", + ) + + liaison_to_contacts = Recipient.objects.get(slug="liaison_to_contacts") + recipients_ccs = Recipient.objects.filter( + slug__in=( + "liaison_cc", + "liaison_coordinators", + "liaison_response_contacts", + "liaison_technical_contacts", + ) + ) + liaison_statement_posted.to.add(liaison_to_contacts) + liaison_statement_posted.cc.add(*recipients_ccs) + + +class Migration(migrations.Migration): + dependencies = [("mailtrigger", "0007_historicalrecipient_historicalmailtrigger")] + + operations = [migrations.RunPython(forward, reverse)] diff --git a/ietf/mailtrigger/models.py b/ietf/mailtrigger/models.py index 171dbd85ed..435729f893 100644 --- a/ietf/mailtrigger/models.py +++ b/ietf/mailtrigger/models.py @@ -1,4 +1,4 @@ -# Copyright The IETF Trust 2015-2020, All Rights Reserved +# Copyright The IETF Trust 2015-2025, All Rights Reserved # -*- coding: utf-8 -*- @@ -7,6 +7,8 @@ from email.utils import parseaddr +from simple_history.models import HistoricalRecords + from ietf.doc.utils_bofreq import bofreq_editors, bofreq_responsible from ietf.utils.mail import formataddr, get_email_addresses_from_text from ietf.group.models import Group, Role @@ -36,8 +38,9 @@ def clean_duplicates(addrlist): class MailTrigger(models.Model): slug = models.CharField(max_length=64, primary_key=True) desc = models.TextField(blank=True) - to = models.ManyToManyField('Recipient', blank=True, related_name='used_in_to') - cc = models.ManyToManyField('Recipient', blank=True, related_name='used_in_cc') + to = models.ManyToManyField('mailtrigger.Recipient', blank=True, related_name='used_in_to') + cc = models.ManyToManyField('mailtrigger.Recipient', blank=True, related_name='used_in_cc') + history = HistoricalRecords() class Meta: ordering = ["slug"] @@ -49,6 +52,7 @@ class Recipient(models.Model): slug = models.CharField(max_length=32, primary_key=True) desc = models.TextField(blank=True) template = models.TextField(null=True, blank=True) + history = HistoricalRecords() class Meta: ordering = ["slug"] @@ -96,35 +100,35 @@ def gather_doc_affecteddoc_authors(self, **kwargs): addrs = [] if 'doc' in kwargs: for reldoc in kwargs['doc'].related_that_doc(('conflrev','tohist','tois','tops')): - addrs.extend(Recipient.objects.get(slug='doc_authors').gather(**{'doc':reldoc.document})) + addrs.extend(Recipient.objects.get(slug='doc_authors').gather(**{'doc':reldoc})) return addrs def gather_doc_affecteddoc_group_chairs(self, **kwargs): addrs = [] if 'doc' in kwargs: for reldoc in kwargs['doc'].related_that_doc(('conflrev','tohist','tois','tops')): - addrs.extend(Recipient.objects.get(slug='doc_group_chairs').gather(**{'doc':reldoc.document})) + addrs.extend(Recipient.objects.get(slug='doc_group_chairs').gather(**{'doc':reldoc})) return addrs def gather_doc_affecteddoc_notify(self, **kwargs): addrs = [] if 'doc' in kwargs: for reldoc in kwargs['doc'].related_that_doc(('conflrev','tohist','tois','tops')): - addrs.extend(Recipient.objects.get(slug='doc_notify').gather(**{'doc':reldoc.document})) + addrs.extend(Recipient.objects.get(slug='doc_notify').gather(**{'doc':reldoc})) return addrs def gather_conflict_review_stream_manager(self, **kwargs): addrs = [] if 'doc' in kwargs: for reldoc in kwargs['doc'].related_that_doc(('conflrev',)): - addrs.extend(Recipient.objects.get(slug='doc_stream_manager').gather(**{'doc':reldoc.document})) + addrs.extend(Recipient.objects.get(slug='doc_stream_manager').gather(**{'doc':reldoc})) return addrs def gather_conflict_review_steering_group(self,**kwargs): addrs = [] if 'doc' in kwargs: for reldoc in kwargs['doc'].related_that_doc(('conflrev',)): - if reldoc.document.stream_id=='irtf': + if reldoc.stream_id=='irtf': addrs.append('"Internet Research Steering Group" ') return addrs @@ -138,16 +142,16 @@ def gather_group_steering_group(self,**kwargs): def gather_stream_managers(self, **kwargs): addrs = [] manager_map = dict( - ise = '', - irtf = '', - ietf = '', - iab = '', + ise = [''], + irtf = [''], + ietf = [''], + iab = [''], editorial = Role.objects.filter(group__acronym="rsab",name_id="chair").values_list("email__address", flat=True), ) if 'streams' in kwargs: for stream in kwargs['streams']: if stream in manager_map: - addrs.append(manager_map[stream]) + addrs.extend(manager_map[stream]) return addrs def gather_doc_stream_manager(self, **kwargs): @@ -234,7 +238,7 @@ def gather_submission_submitter(self, **kwargs): try: submitter = Alias.objects.get(name=submission.submitter).person if submitter and submitter.email(): - addrs.extend(["%s <%s>" % (submitter.name, submitter.email().address)]) + addrs.append(f"{submitter.name} <{submitter.email().address}>") except (Alias.DoesNotExist, Alias.MultipleObjectsReturned): pass return addrs diff --git a/ietf/mailtrigger/resources.py b/ietf/mailtrigger/resources.py index eb5466618a..daca055bf4 100644 --- a/ietf/mailtrigger/resources.py +++ b/ietf/mailtrigger/resources.py @@ -7,7 +7,7 @@ from ietf import api -from ietf.mailtrigger.models import Recipient, MailTrigger +from ietf.mailtrigger.models import MailTrigger, Recipient class RecipientResource(ModelResource): @@ -37,3 +37,43 @@ class Meta: } api.mailtrigger.register(MailTriggerResource()) +from ietf.utils.resources import UserResource +class HistoricalMailTriggerResource(ModelResource): + history_user = ToOneField(UserResource, 'history_user', null=True) + class Meta: + queryset = MailTrigger.history.model.objects.all() + serializer = api.Serializer() + cache = SimpleCache() + #resource_name = 'historicalmailtrigger' + ordering = ['history_id', ] + filtering = { + "slug": ALL, + "desc": ALL, + "history_id": ALL, + "history_date": ALL, + "history_change_reason": ALL, + "history_type": ALL, + "history_user": ALL_WITH_RELATIONS, + } +api.mailtrigger.register(HistoricalMailTriggerResource()) + +from ietf.utils.resources import UserResource +class HistoricalRecipientResource(ModelResource): + history_user = ToOneField(UserResource, 'history_user', null=True) + class Meta: + queryset = Recipient.history.model.objects.all() + serializer = api.Serializer() + cache = SimpleCache() + #resource_name = 'historicalrecipient' + ordering = ['history_id', ] + filtering = { + "slug": ALL, + "desc": ALL, + "template": ALL, + "history_id": ALL, + "history_date": ALL, + "history_change_reason": ALL, + "history_type": ALL, + "history_user": ALL_WITH_RELATIONS, + } +api.mailtrigger.register(HistoricalRecipientResource()) diff --git a/ietf/mailtrigger/utils.py b/ietf/mailtrigger/utils.py index 496f20dc28..bcdaf5e44e 100644 --- a/ietf/mailtrigger/utils.py +++ b/ietf/mailtrigger/utils.py @@ -2,44 +2,61 @@ from collections import namedtuple -import debug # pyflakes:ignore +import debug # pyflakes:ignore from ietf.mailtrigger.models import MailTrigger, Recipient from ietf.submit.models import Submission from ietf.utils.mail import excludeaddrs -class AddrLists(namedtuple('AddrLists',['to','cc'])): - __slots__ = () +EMAIL_ALIASES = { + "IETFCHAIR": "The IETF Chair ", + "IESG": "The IESG ", + "IAB": "The IAB ", + "IABCHAIR": "The IAB Chair ", +} + - def as_strings(self,compact=True): +class AddrLists(namedtuple("AddrLists", ["to", "cc"])): + __slots__ = () + def as_strings(self, compact=True): separator = ", " if compact else ",\n " to_string = separator.join(self.to) cc_string = separator.join(self.cc) - return namedtuple('AddrListsAsStrings',['to','cc'])(to=to_string,cc=cc_string) + return namedtuple("AddrListsAsStrings", ["to", "cc"])( + to=to_string, cc=cc_string + ) -def gather_address_lists(slug, skipped_recipients=None, create_from_slug_if_not_exists=None, - desc_if_not_exists=None, **kwargs): - mailtrigger = get_mailtrigger(slug, create_from_slug_if_not_exists, desc_if_not_exists) +def gather_address_lists( + slug, + skipped_recipients=None, + create_from_slug_if_not_exists=None, + desc_if_not_exists=None, + **kwargs +): + mailtrigger = get_mailtrigger( + slug, create_from_slug_if_not_exists, desc_if_not_exists + ) to = set() for recipient in mailtrigger.to.all(): to.update(recipient.gather(**kwargs)) - to.discard('') + to.discard("") if skipped_recipients: to = excludeaddrs(to, skipped_recipients) cc = set() for recipient in mailtrigger.cc.all(): cc.update(recipient.gather(**kwargs)) - cc.discard('') + cc.discard("") if skipped_recipients: cc = excludeaddrs(cc, skipped_recipients) - return AddrLists(to=sorted(list(to)),cc=sorted(list(cc))) + return AddrLists(to=sorted(list(to)), cc=sorted(list(cc))) + def get_mailtrigger(slug, create_from_slug_if_not_exists, desc_if_not_exists): try: @@ -50,77 +67,158 @@ def get_mailtrigger(slug, create_from_slug_if_not_exists, desc_if_not_exists): mailtrigger = MailTrigger.objects.create(slug=slug, desc=desc_if_not_exists) mailtrigger.to.set(template.to.all()) mailtrigger.cc.set(template.cc.all()) - if slug.startswith('review_completed') and slug.endswith('early'): - mailtrigger.cc.remove('ietf_last_call') + if slug.startswith("review_completed") and slug.endswith("early"): + mailtrigger.cc.remove("ietf_last_call") else: raise return mailtrigger -def gather_relevant_expansions(**kwargs): - - def starts_with(prefix): - return MailTrigger.objects.filter(slug__startswith=prefix).values_list('slug',flat=True) - - relevant = set() - - if 'doc' in kwargs: - - doc = kwargs['doc'] - - relevant.add('doc_state_edited') - - if not doc.type_id in ['bofreq', 'statement']: - relevant.update(['doc_telechat_details_changed','ballot_deferred','iesg_ballot_saved']) +def get_contacts_for_liaison_messages_for_group_primary(group): + from ietf.liaisons.views import contact_email_from_role + + '''Returns list of emails to use in liaison message for group + ''' + emails = [] + + # role based emails + if group.acronym in ('ietf','iesg'): + emails.append(EMAIL_ALIASES['IESG']) + emails.append(EMAIL_ALIASES['IETFCHAIR']) + elif group.acronym in ('iab'): + emails.append(EMAIL_ALIASES['IAB']) + emails.append(EMAIL_ALIASES['IABCHAIR']) + elif group.type_id == 'area': + emails.append(EMAIL_ALIASES['IETFCHAIR']) + ad_roles = group.role_set.filter(name='ad') + emails.extend([ contact_email_from_role(r) for r in ad_roles ]) + elif group.type_id == 'wg': + ad_roles = group.parent.role_set.filter(name='ad') + emails.extend([ contact_email_from_role(r) for r in ad_roles ]) + chair_roles = group.role_set.filter(name='chair') + emails.extend([ contact_email_from_role(r) for r in chair_roles ]) + if group.list_email: + emails.append('{} Discussion List <{}>'.format(group.name,group.list_email)) + elif group.type_id == 'sdo': + liaiman_roles = group.role_set.filter(name='liaiman') + emails.extend([ contact_email_from_role(r) for r in liaiman_roles ]) + + # explicit CCs + liaison_cc_roles = group.role_set.filter(name='liaison_cc_contact') + emails.extend([ contact_email_from_role(r) for r in liaison_cc_roles ]) + + return emails + + +def get_contacts_for_liaison_messages_for_group_secondary(group): + from ietf.liaisons.views import contacts_from_roles + + '''Returns default contacts for groups as a comma separated string''' + # use explicit default contacts if defined + explicit_contacts = contacts_from_roles(group.role_set.filter(name='liaison_contact')) + if explicit_contacts: + return explicit_contacts + + # otherwise construct based on group type + contacts = [] + if group.type_id == 'area': + roles = group.role_set.filter(name='ad') + contacts.append(contacts_from_roles(roles)) + elif group.type_id == 'wg': + roles = group.role_set.filter(name='chair') + contacts.append(contacts_from_roles(roles)) + elif group.acronym == 'ietf': + contacts.append(EMAIL_ALIASES['IETFCHAIR']) + elif group.acronym == 'iab': + contacts.append(EMAIL_ALIASES['IABCHAIR']) + elif group.acronym == 'iesg': + contacts.append(EMAIL_ALIASES['IESG']) + + return ','.join(contacts) - if doc.type_id in ['draft','statchg']: - relevant.update(starts_with('last_call_')) - if doc.type_id == 'draft': - relevant.update(starts_with('doc_')) - relevant.update(starts_with('resurrection_')) - relevant.update(['ipr_posted_on_doc',]) - if doc.stream_id == 'ietf': - relevant.update(['ballot_approved_ietf_stream','pubreq_iesg']) +def gather_relevant_expansions(**kwargs): + def starts_with(prefix): + return MailTrigger.objects.filter(slug__startswith=prefix).values_list( + "slug", flat=True + ) + + relevant = set() + + if "doc" in kwargs: + doc = kwargs["doc"] + + relevant.add("doc_state_edited") + + if not doc.type_id in ["bofreq", "statement", "rfc"]: + relevant.update( + ["doc_telechat_details_changed", "ballot_deferred", "iesg_ballot_saved"] + ) + + if doc.type_id in ["draft", "statchg"]: + relevant.update(starts_with("last_call_")) + + if doc.type_id == "rfc": + relevant.update( + [ + "doc_added_comment", + "doc_external_resource_change_requested", + "doc_state_edited", + "ipr_posted_on_doc", + ] + ) + + if doc.type_id == "draft": + relevant.update(starts_with("doc_")) + relevant.update(starts_with("resurrection_")) + relevant.update( + [ + "ipr_posted_on_doc", + ] + ) + if doc.stream_id == "ietf": + relevant.update(["ballot_approved_ietf_stream", "pubreq_iesg"]) else: - relevant.update(['pubreq_rfced']) - last_submission = Submission.objects.filter(name=doc.name,state='posted').order_by('-rev').first() - if last_submission and 'submission' not in kwargs: - kwargs['submission'] = last_submission - - if doc.type_id == 'conflrev': - relevant.update(['conflrev_requested','ballot_approved_conflrev']) - if doc.type_id == 'charter': - relevant.update(['charter_external_review','ballot_approved_charter']) - - if doc.type_id == 'bofreq': - relevant.update(starts_with('bofreq')) - - if 'group' in kwargs: - - relevant.update(starts_with('group_')) - relevant.update(starts_with('milestones_')) - group = kwargs['group'] + relevant.update(["pubreq_rfced"]) + last_submission = ( + Submission.objects.filter(name=doc.name, state="posted") + .order_by("-rev") + .first() + ) + if last_submission and "submission" not in kwargs: + kwargs["submission"] = last_submission + + if doc.type_id == "conflrev": + relevant.update(["conflrev_requested", "ballot_approved_conflrev"]) + if doc.type_id == "charter": + relevant.update(["charter_external_review", "ballot_approved_charter"]) + + if doc.type_id == "bofreq": + relevant.update(starts_with("bofreq")) + + if "group" in kwargs: + relevant.update(starts_with("group_")) + relevant.update(starts_with("milestones_")) + group = kwargs["group"] if group.features.acts_like_wg: - relevant.update(starts_with('session_')) + relevant.update(starts_with("session_")) if group.features.has_chartering_process: - relevant.update(['charter_external_review',]) - - if 'submission' in kwargs: + relevant.update( + [ + "charter_external_review", + ] + ) - relevant.update(starts_with('sub_')) + if "submission" in kwargs: + relevant.update(starts_with("sub_")) rule_list = [] for mailtrigger in MailTrigger.objects.filter(slug__in=relevant): - addrs = gather_address_lists(mailtrigger.slug,**kwargs) + addrs = gather_address_lists(mailtrigger.slug, **kwargs) if addrs.to or addrs.cc: - rule_list.append((mailtrigger.slug,mailtrigger.desc,addrs.to,addrs.cc)) + rule_list.append((mailtrigger.slug, mailtrigger.desc, addrs.to, addrs.cc)) return sorted(rule_list) -def get_base_submission_message_address(): - return Recipient.objects.get(slug='submission_manualpost_handling').gather()[0] def get_base_ipr_request_address(): - return Recipient.objects.get(slug='ipr_requests').gather()[0] - - + return Recipient.objects.get(slug="ipr_requests").gather()[0] diff --git a/ietf/meeting/admin.py b/ietf/meeting/admin.py index e975dd38a6..03abf5c029 100644 --- a/ietf/meeting/admin.py +++ b/ietf/meeting/admin.py @@ -3,11 +3,14 @@ from django.contrib import admin +from django.db.models import Count from ietf.meeting.models import (Attended, Meeting, Room, Session, TimeSlot, Constraint, Schedule, SchedTimeSessAssignment, ResourceAssociation, FloorPlan, UrlResource, SessionPresentation, ImportantDate, SlideSubmission, SchedulingEvent, BusinessConstraint, - ProceedingsMaterial, MeetingHost) + ProceedingsMaterial, MeetingHost, Registration, RegistrationTicket, + AttendanceTypeName) +from ietf.utils.admin import SaferTabularInline class UrlResourceAdmin(admin.ModelAdmin): @@ -16,7 +19,7 @@ class UrlResourceAdmin(admin.ModelAdmin): raw_id_fields = ['room', ] admin.site.register(UrlResource, UrlResourceAdmin) -class UrlResourceInline(admin.TabularInline): +class UrlResourceInline(SaferTabularInline): model = UrlResource class RoomAdmin(admin.ModelAdmin): @@ -26,7 +29,7 @@ class RoomAdmin(admin.ModelAdmin): admin.site.register(Room, RoomAdmin) -class RoomInline(admin.TabularInline): +class RoomInline(SaferTabularInline): model = Room class MeetingAdmin(admin.ModelAdmin): @@ -91,7 +94,7 @@ def name_lower(self, instance): admin.site.register(Constraint, ConstraintAdmin) -class SchedulingEventInline(admin.TabularInline): +class SchedulingEventInline(SaferTabularInline): model = SchedulingEvent raw_id_fields = ["by"] @@ -189,7 +192,7 @@ class ImportantDateAdmin(admin.ModelAdmin): class SlideSubmissionAdmin(admin.ModelAdmin): model = SlideSubmission list_display = ['session', 'submitter', 'title'] - raw_id_fields = ['submitter', 'session'] + raw_id_fields = ['submitter', 'session', 'doc'] admin.site.register(SlideSubmission, SlideSubmissionAdmin) @@ -213,3 +216,76 @@ class AttendedAdmin(admin.ModelAdmin): search_fields = ["person__name", "session__group__acronym", "session__meeting__number", "session__name", "session__purpose__name"] raw_id_fields= ["person", "session"] admin.site.register(Attended, AttendedAdmin) + +class MeetingFilter(admin.SimpleListFilter): + title = 'Meeting Filter' + parameter_name = 'meeting_id' + + def lookups(self, request, model_admin): + # only include meetings with registration records + meetings = Meeting.objects.filter(type='ietf').annotate(reg_count=Count('registration')).filter(reg_count__gt=0).order_by('-date') + choices = meetings.values_list('id', 'number') + return choices + + def queryset(self, request, queryset): + if self.value(): + return queryset.filter(meeting__id=self.value()) + return queryset + +class AttendanceFilter(admin.SimpleListFilter): + title = 'Attendance Type' + parameter_name = 'attendance_type' + + def lookups(self, request, model_admin): + choices = AttendanceTypeName.objects.all().values_list('slug', 'name') + return choices + + def queryset(self, request, queryset): + if self.value(): + return queryset.filter(tickets__attendance_type__slug=self.value()).distinct() + return queryset + +class RegistrationTicketInline(SaferTabularInline): + model = RegistrationTicket + +class RegistrationAdmin(admin.ModelAdmin): + model = Registration + list_filter = [AttendanceFilter, MeetingFilter] + list_display = ['meeting', 'first_name', 'last_name', 'display_attendance', 'affiliation', 'country_code', 'email', ] + search_fields = ['first_name', 'last_name', 'affiliation', 'country_code', 'email', ] + raw_id_fields = ['person'] + inlines = [RegistrationTicketInline, ] + ordering = ['-meeting__date', 'last_name'] + + def display_attendance(self, instance): + '''Only display the most significant ticket in the list. + To see all the tickets inspect the individual instance + ''' + if instance.tickets.filter(attendance_type__slug='onsite').exists(): + return 'onsite' + elif instance.tickets.filter(attendance_type__slug='remote').exists(): + return 'remote' + elif instance.tickets.filter(attendance_type__slug='hackathon_onsite').exists(): + return 'hackathon onsite' + elif instance.tickets.filter(attendance_type__slug='hackathon_remote').exists(): + return 'hackathon remote' + display_attendance.short_description = "Attendance" # type: ignore # https://github.com/python/mypy/issues/2087 + +admin.site.register(Registration, RegistrationAdmin) + +class RegistrationTicketAdmin(admin.ModelAdmin): + model = RegistrationTicket + list_filter = ['attendance_type', ] + # not available until Django 5.2, the name of a related field, using the __ notation + # list_display = ['registration__meeting', 'registration', 'attendance_type', 'ticket_type', 'registration__email'] + # list_select_related = ('registration',) + list_display = ['registration', 'attendance_type', 'ticket_type', 'display_meeting'] + search_fields = ['registration__first_name', 'registration__last_name', 'registration__email'] + raw_id_fields = ['registration'] + ordering = ['-registration__meeting__date', 'registration__last_name'] + + def display_meeting(self, instance): + return instance.registration.meeting.number + display_meeting.short_description = "Meeting" # type: ignore # https://github.com/python/mypy/issues/2087 + +admin.site.register(RegistrationTicket, RegistrationTicketAdmin) diff --git a/ietf/meeting/factories.py b/ietf/meeting/factories.py index 69c1f0421b..fc0ce8387c 100644 --- a/ietf/meeting/factories.py +++ b/ietf/meeting/factories.py @@ -9,9 +9,10 @@ from django.core.files.base import ContentFile from django.db.models import Q +from ietf.doc.storage_utils import store_str from ietf.meeting.models import (Attended, Meeting, Session, SchedulingEvent, Schedule, TimeSlot, SessionPresentation, FloorPlan, Room, SlideSubmission, Constraint, - MeetingHost, ProceedingsMaterial) + MeetingHost, ProceedingsMaterial, Registration, RegistrationTicket) from ietf.name.models import (ConstraintName, SessionStatusName, ProceedingsMaterialTypeName, TimerangeName, SessionPurposeName) from ietf.doc.factories import ProceedingsMaterialDocFactory @@ -239,6 +240,10 @@ class Meta: make_file = factory.PostGeneration( lambda obj, create, extracted, **kwargs: open(obj.staged_filepath(),'a').close() ) + + store_submission = factory.PostGeneration( + lambda obj, create, extracted, **kwargs: store_str("staging", obj.filename, "") + ) class ConstraintFactory(factory.django.DjangoModelFactory): class Meta: @@ -313,3 +318,48 @@ class Meta: session = factory.SubFactory(SessionFactory) person = factory.SubFactory(PersonFactory) + + +class RegistrationFactory(factory.django.DjangoModelFactory): + """ + This will create an associated onsite week_pass ticket by default. + Methods of calling: + + RegistrationFactory() create a ticket with defaults, onsite + RegistrationFactory(with_ticket=True) same as above + RegistrationFactory(with_ticket={'attendance_type_id': 'remote'}) creates ticket with overrides + RegistrationFactory(with_ticket=False) does not create a ticket + """ + class Meta: + model = Registration + skip_postgeneration_save = True + + meeting = factory.SubFactory(MeetingFactory) + person = factory.SubFactory(PersonFactory) + email = factory.LazyAttribute(lambda obj: obj.person.email()) + first_name = factory.LazyAttribute(lambda obj: obj.person.first_name()) + last_name = factory.LazyAttribute(lambda obj: obj.person.last_name()) + affiliation = factory.Faker('company') + country_code = factory.Faker('country_code') + attended = False + checkedin = False + + @factory.post_generation + def with_ticket(self, create, extracted, **kwargs): + if not create: + return + if extracted is False: + # Explicitly disable ticket creation + return + ticket_kwargs = extracted if isinstance(extracted, dict) else {} + RegistrationTicketFactory(registration=self, **ticket_kwargs) + + +class RegistrationTicketFactory(factory.django.DjangoModelFactory): + class Meta: + model = RegistrationTicket + skip_postgeneration_save = True + + registration = factory.SubFactory(RegistrationFactory) + attendance_type_id = factory.LazyAttribute(lambda _: 'onsite') + ticket_type_id = factory.LazyAttribute(lambda _: 'week_pass') diff --git a/ietf/meeting/forms.py b/ietf/meeting/forms.py index 822f56b97c..e5b1697f86 100644 --- a/ietf/meeting/forms.py +++ b/ietf/meeting/forms.py @@ -1,4 +1,4 @@ -# Copyright The IETF Trust 2016-2020, All Rights Reserved +# Copyright The IETF Trust 2016-2025, All Rights Reserved # -*- coding: utf-8 -*- @@ -15,27 +15,38 @@ from django.core import validators from django.core.exceptions import ValidationError from django.forms import BaseInlineFormSet +from django.template.defaultfilters import pluralize from django.utils.functional import cached_property +from django.utils.safestring import mark_safe import debug # pyflakes:ignore -from ietf.doc.models import Document, DocAlias, State, NewRevisionDocEvent +from ietf.doc.models import Document, State, NewRevisionDocEvent from ietf.group.models import Group from ietf.group.utils import groups_managed_by -from ietf.meeting.models import Session, Meeting, Schedule, countries, timezones, TimeSlot, Room +from ietf.meeting.models import (Session, Meeting, Schedule, COUNTRIES, TIMEZONES, TimeSlot, Room, + Constraint, ResourceAssociation) from ietf.meeting.helpers import get_next_interim_number, make_materials_directories from ietf.meeting.helpers import is_interim_meeting_approved, get_next_agenda_name from ietf.message.models import Message -from ietf.name.models import TimeSlotTypeName, SessionPurposeName +from ietf.name.models import TimeSlotTypeName, SessionPurposeName, TimerangeName, ConstraintName +from ietf.person.fields import SearchablePersonsField from ietf.person.models import Person -from ietf.utils.fields import DatepickerDateField, DurationField, MultiEmailField, DatepickerSplitDateTimeWidget +from ietf.utils import log +from ietf.utils.fields import ( + DatepickerDateField, + DatepickerSplitDateTimeWidget, + DurationField, + ModelMultipleChoiceField, + MultiEmailField, +) +from ietf.utils.html import clean_text_field from ietf.utils.validators import ( validate_file_size, validate_mime_type, validate_file_extension, validate_no_html_frame) -# need to insert empty option for use in ChoiceField -# countries.insert(0, ('', '-'*9 )) -countries.insert(0, ('', '-' * 9)) -timezones.insert(0, ('', '-' * 9)) +NUM_SESSION_CHOICES = (('', '--Please select'), ('1', '1'), ('2', '2')) +SESSION_TIME_RELATION_CHOICES = (('', 'No preference'),) + Constraint.TIME_RELATION_CHOICES +JOINT_FOR_SESSION_CHOICES = (('1', 'First session'), ('2', 'Second session'), ('3', 'Third session'), ) # ------------------------------------------------- # Helpers @@ -73,6 +84,27 @@ def duration_string(duration): return string +def allowed_conflicting_groups(): + return Group.objects.filter( + type__in=['wg', 'ag', 'rg', 'rag', 'program', 'edwg'], + state__in=['bof', 'proposed', 'active']) + + +def check_conflict(groups, source_group): + ''' + Takes a string which is a list of group acronyms. Checks that they are all active groups + ''' + # convert to python list (allow space or comma separated lists) + items = groups.replace(',', ' ').split() + active_groups = allowed_conflicting_groups() + for group in items: + if group == source_group.acronym: + raise forms.ValidationError("Cannot declare a conflict with the same group: %s" % group) + + if not active_groups.filter(acronym=group): + raise forms.ValidationError("Invalid or inactive group acronym: %s" % group) + + # ------------------------------------------------- # Forms # ------------------------------------------------- @@ -134,12 +166,12 @@ class InterimMeetingModelForm(forms.ModelForm): approved = forms.BooleanField(required=False) city = forms.CharField(max_length=255, required=False) city.widget.attrs['placeholder'] = "City" - country = forms.ChoiceField(choices=countries, required=False) + country = forms.ChoiceField(choices=COUNTRIES, required=False) country.widget.attrs['class'] = "select2-field" country.widget.attrs['data-max-entries'] = 1 country.widget.attrs['data-placeholder'] = "Country" country.widget.attrs['data-minimum-input-length'] = 0 - time_zone = forms.ChoiceField(choices=timezones) + time_zone = forms.ChoiceField(choices=TIMEZONES) time_zone.widget.attrs['class'] = "select2-field" time_zone.widget.attrs['data-max-entries'] = 1 time_zone.widget.attrs['data-minimum-input-length'] = 0 @@ -341,8 +373,7 @@ def save_agenda(self): # FIXME: What about agendas in html or markdown format? uploaded_filename='{}-00.txt'.format(filename)) doc.set_state(State.objects.get(type__slug=doc.type.slug, slug='active')) - DocAlias.objects.create(name=doc.name).docs.add(doc) - self.instance.sessionpresentation_set.create(document=doc, rev=doc.rev) + self.instance.presentations.create(document=doc, rev=doc.rev) NewRevisionDocEvent.objects.create( type='new_revision', by=self.user.person, @@ -356,12 +387,19 @@ def save_agenda(self): os.makedirs(directory) with io.open(path, "w", encoding='utf-8') as file: file.write(self.cleaned_data['agenda']) + doc.store_str(doc.uploaded_filename, self.cleaned_data['agenda']) class InterimAnnounceForm(forms.ModelForm): class Meta: model = Message - fields = ('to', 'frm', 'cc', 'bcc', 'reply_to', 'subject', 'body') + fields = ('to', 'cc', 'frm', 'subject', 'body') + + def __init__(self, *args, **kwargs): + super(InterimAnnounceForm, self).__init__(*args, **kwargs) + self.fields['frm'].label='From' + self.fields['frm'].widget.attrs['readonly'] = True + self.fields['to'].widget.attrs['readonly'] = True def save(self, *args, **kwargs): user = kwargs.pop('user') @@ -375,7 +413,8 @@ def save(self, *args, **kwargs): class InterimCancelForm(forms.Form): group = forms.CharField(max_length=255, required=False) date = forms.DateField(required=False) - comments = forms.CharField(required=False, widget=forms.Textarea(attrs={'placeholder': 'enter optional comments here'}), strip=False) + # max_length must match Session.agenda_note + comments = forms.CharField(max_length=512, required=False, widget=forms.Textarea(attrs={'placeholder': 'enter optional comments here'}), strip=False) def __init__(self, *args, **kwargs): super(InterimCancelForm, self).__init__(*args, **kwargs) @@ -466,6 +505,9 @@ def __init__(self, show_apply_to_all_checkbox, *args, **kwargs): class UploadMinutesForm(ApplyToAllFileUploadForm): doc_type = 'minutes' +class UploadNarrativeMinutesForm(ApplyToAllFileUploadForm): + doc_type = 'narrativeminutes' + class UploadAgendaForm(ApplyToAllFileUploadForm): doc_type = 'agenda' @@ -474,9 +516,12 @@ class UploadAgendaForm(ApplyToAllFileUploadForm): class UploadSlidesForm(ApplyToAllFileUploadForm): doc_type = 'slides' title = forms.CharField(max_length=255) + approved = forms.BooleanField(label='Auto-approve', initial=True, required=False) - def __init__(self, session, *args, **kwargs): - super().__init__(*args, **kwargs) + def __init__(self, session, show_apply_to_all_checkbox, can_manage, *args, **kwargs): + super().__init__(show_apply_to_all_checkbox, *args, **kwargs) + if not can_manage: + self.fields.pop('approved') self.session = session def clean_title(self): @@ -542,7 +587,7 @@ class SwapTimeslotsForm(forms.Form): queryset=TimeSlot.objects.none(), # default to none, fill in when we have a meeting widget=forms.TextInput, ) - rooms = forms.ModelMultipleChoiceField( + rooms = ModelMultipleChoiceField( required=True, queryset=Room.objects.none(), # default to none, fill in when we have a meeting widget=CsvModelPkInput, @@ -608,7 +653,7 @@ class TimeSlotCreateForm(forms.Form): ) duration = TimeSlotDurationField() show_location = forms.BooleanField(required=False, initial=True) - locations = forms.ModelMultipleChoiceField( + locations = ModelMultipleChoiceField( queryset=Room.objects.none(), widget=forms.CheckboxSelectMultiple, ) @@ -739,6 +784,9 @@ def __init__(self, group, *args, **kwargs): self.fields['purpose'].queryset = SessionPurposeName.objects.filter(pk__in=session_purposes) if not group.features.acts_like_wg: self.fields['requested_duration'].durations = [datetime.timedelta(minutes=m) for m in range(30, 241, 30)] + # add bootstrap classes + self.fields['purpose'].widget.attrs.update({'class': 'form-select'}) + self.fields['type'].widget.attrs.update({'class': 'form-select', 'aria-label': 'session type'}) class Meta: model = Session @@ -823,3 +871,296 @@ def sessiondetailsformset_factory(min_num=1, max_num=3): max_num=max_num, extra=max_num, # only creates up to max_num total ) + + +class SessionRequestStatusForm(forms.Form): + message = forms.CharField(widget=forms.Textarea(attrs={'rows': '3', 'cols': '80'}), strip=False) + + +class NameModelMultipleChoiceField(ModelMultipleChoiceField): + def label_from_instance(self, name): + return name.desc + + +class SessionRequestForm(forms.Form): + num_session = forms.ChoiceField( + choices=NUM_SESSION_CHOICES, + label="Number of sessions") + # session fields are added in __init__() + session_time_relation = forms.ChoiceField( + choices=SESSION_TIME_RELATION_CHOICES, + required=False, + label="Time between two sessions") + attendees = forms.IntegerField(label="Number of Attendees") + # FIXME: it would cleaner to have these be + # ModelMultipleChoiceField, and just customize the widgetry, that + # way validation comes for free (applies to this CharField and the + # constraints dynamically instantiated in __init__()) + joint_with_groups = forms.CharField(max_length=255, required=False) + joint_with_groups_selector = forms.ChoiceField(choices=[], required=False) # group select widget for prev field + joint_for_session = forms.ChoiceField(choices=JOINT_FOR_SESSION_CHOICES, required=False) + comments = forms.CharField( + max_length=200, + label='Special Requests', + help_text='i.e. restrictions on meeting times / days, etc. (limit 200 characters)', + required=False) + third_session = forms.BooleanField( + required=False, + help_text="Help") + resources = forms.MultipleChoiceField( + widget=forms.CheckboxSelectMultiple, + required=False, + label='Resources Requested') + bethere = SearchablePersonsField( + label="Participants who must be present", + required=False, + help_text=mark_safe('Do not include Area Directors and WG Chairs; the system already tracks their availability.')) + timeranges = NameModelMultipleChoiceField( + widget=forms.CheckboxSelectMultiple, + required=False, + label=mark_safe('Times during which this WG can not meet:
Please explain any selections in Special Requests below.'), + queryset=TimerangeName.objects.all()) + adjacent_with_wg = forms.ChoiceField( + required=False, + label=mark_safe('Plan session adjacent with another WG:
(Immediately before or after another WG, no break in between, in the same room.)')) + send_notifications = forms.BooleanField(label="Send notification emails?", required=False, initial=False) + + def __init__(self, group, meeting, data=None, *args, **kwargs): + self.hidden = kwargs.pop('hidden', False) + self.notifications_optional = kwargs.pop('notifications_optional', False) + + self.group = group + formset_class = sessiondetailsformset_factory(max_num=3 if group.features.acts_like_wg else 50) + self.session_forms = formset_class(group=self.group, meeting=meeting, data=data) + super().__init__(data=data, *args, **kwargs) + if not self.notifications_optional: + self.fields['send_notifications'].widget = forms.HiddenInput() + + # Allow additional sessions for non-wg-like groups + if not self.group.features.acts_like_wg: + self.fields['num_session'].choices = ((n, str(n)) for n in range(1, 51)) + + self._add_widget_class(self.fields['third_session'].widget, 'form-check-input') + self.fields['comments'].widget = forms.Textarea(attrs={'rows': '3', 'cols': '65'}) + + other_groups = list(allowed_conflicting_groups().exclude(pk=group.pk).values_list('acronym', 'acronym').order_by('acronym')) + self.fields['adjacent_with_wg'].choices = [('', '--No preference')] + other_groups + group_acronym_choices = [('', '--Select WG(s)')] + other_groups + self.fields['joint_with_groups_selector'].choices = group_acronym_choices + + # Set up constraints for the meeting + self._wg_field_data = [] + for constraintname in meeting.group_conflict_types.all(): + # two fields for each constraint: a CharField for the group list and a selector to add entries + constraint_field = forms.CharField(max_length=255, required=False) + constraint_field.widget.attrs['data-slug'] = constraintname.slug + constraint_field.widget.attrs['data-constraint-name'] = str(constraintname).title() + constraint_field.widget.attrs['aria-label'] = f'{constraintname.slug}_input' + self._add_widget_class(constraint_field.widget, 'wg_constraint') + self._add_widget_class(constraint_field.widget, 'form-control') + + selector_field = forms.ChoiceField(choices=group_acronym_choices, required=False) + selector_field.widget.attrs['data-slug'] = constraintname.slug # used by onchange handler + self._add_widget_class(selector_field.widget, 'wg_constraint_selector') + self._add_widget_class(selector_field.widget, 'form-control') + + cfield_id = 'constraint_{}'.format(constraintname.slug) + cselector_id = 'wg_selector_{}'.format(constraintname.slug) + # keep an eye out for field name conflicts + log.assertion('cfield_id not in self.fields') + log.assertion('cselector_id not in self.fields') + self.fields[cfield_id] = constraint_field + self.fields[cselector_id] = selector_field + self._wg_field_data.append((constraintname, cfield_id, cselector_id)) + + # Show constraints that are not actually used by the meeting so these don't get lost + self._inactive_wg_field_data = [] + inactive_cnames = ConstraintName.objects.filter( + is_group_conflict=True # Only collect group conflicts... + ).exclude( + meeting=meeting # ...that are not enabled for this meeting... + ).filter( + constraint__source=group, # ...but exist for this group... + constraint__meeting=meeting, # ... at this meeting. + ).distinct() + + for inactive_constraint_name in inactive_cnames: + field_id = 'delete_{}'.format(inactive_constraint_name.slug) + self.fields[field_id] = forms.BooleanField(required=False, label='Delete this conflict', help_text='Delete this inactive conflict?') + self._add_widget_class(self.fields[field_id].widget, 'form-control') + constraints = group.constraint_source_set.filter(meeting=meeting, name=inactive_constraint_name) + self._inactive_wg_field_data.append( + (inactive_constraint_name, + ' '.join([c.target.acronym for c in constraints]), + field_id) + ) + + self.fields['joint_with_groups_selector'].widget.attrs['onchange'] = "document.form_post.joint_with_groups.value=document.form_post.joint_with_groups.value + ' ' + this.options[this.selectedIndex].value; return 1;" + self.fields["resources"].choices = [(x.pk, x.desc) for x in ResourceAssociation.objects.filter(name__used=True).order_by('name__order')] + + if self.hidden: + # replace all the widgets to start... + for key in list(self.fields.keys()): + self.fields[key].widget = forms.HiddenInput() + # re-replace a couple special cases + self.fields['resources'].widget = forms.MultipleHiddenInput() + self.fields['timeranges'].widget = forms.MultipleHiddenInput() + # and entirely replace bethere - no need to support searching if input is hidden + self.fields['bethere'] = ModelMultipleChoiceField( + widget=forms.MultipleHiddenInput, required=False, + queryset=Person.objects.all(), + ) + + def wg_constraint_fields(self): + """Iterates over wg constraint fields + + Intended for use in the template. + """ + for cname, cfield_id, cselector_id in self._wg_field_data: + yield cname, self[cfield_id], self[cselector_id] + + def wg_constraint_count(self): + """How many wg constraints are there?""" + return len(self._wg_field_data) + + def wg_constraint_field_ids(self): + """Iterates over wg constraint field IDs""" + for cname, cfield_id, _ in self._wg_field_data: + yield cname, cfield_id + + def inactive_wg_constraints(self): + for cname, value, field_id in self._inactive_wg_field_data: + yield cname, value, self[field_id] + + def inactive_wg_constraint_count(self): + return len(self._inactive_wg_field_data) + + def inactive_wg_constraint_field_ids(self): + """Iterates over wg constraint field IDs""" + for cname, _, field_id in self._inactive_wg_field_data: + yield cname, field_id + + @staticmethod + def _add_widget_class(widget, new_class): + """Add a new class, taking care in case some already exist""" + existing_classes = widget.attrs.get('class', '').split() + widget.attrs['class'] = ' '.join(existing_classes + [new_class]) + + def _join_conflicts(self, cleaned_data, slugs): + """Concatenate constraint fields from cleaned data into a single list""" + conflicts = [] + for cname, cfield_id, _ in self._wg_field_data: + if cname.slug in slugs and cfield_id in cleaned_data: + groups = cleaned_data[cfield_id] + # convert to python list (allow space or comma separated lists) + items = groups.replace(',', ' ').split() + conflicts.extend(items) + return conflicts + + def _validate_duplicate_conflicts(self, cleaned_data): + """Validate that no WGs appear in more than one constraint that does not allow duplicates + + Raises ValidationError + """ + # Only the older constraints (conflict, conflic2, conflic3) need to be mutually exclusive. + all_conflicts = self._join_conflicts(cleaned_data, ['conflict', 'conflic2', 'conflic3']) + seen = [] + duplicated = [] + errors = [] + for c in all_conflicts: + if c not in seen: + seen.append(c) + elif c not in duplicated: # only report once + duplicated.append(c) + errors.append(forms.ValidationError('%s appears in conflicts more than once' % c)) + return errors + + def clean_joint_with_groups(self): + groups = self.cleaned_data['joint_with_groups'] + check_conflict(groups, self.group) + return groups + + def clean_comments(self): + return clean_text_field(self.cleaned_data['comments']) + + def clean_bethere(self): + bethere = self.cleaned_data["bethere"] + if bethere: + extra = set( + Person.objects.filter( + role__group=self.group, role__name__in=["chair", "ad"] + ) + & bethere + ) + if extra: + extras = ", ".join(e.name for e in extra) + raise forms.ValidationError( + ( + f"Please remove the following person{pluralize(len(extra))}, the system " + f"tracks their availability due to their role{pluralize(len(extra))}: {extras}." + ) + ) + return bethere + + def clean_send_notifications(self): + return True if not self.notifications_optional else self.cleaned_data['send_notifications'] + + def is_valid(self): + return super().is_valid() and self.session_forms.is_valid() + + def clean(self): + super(SessionRequestForm, self).clean() + self.session_forms.clean() + + data = self.cleaned_data + + # Validate the individual conflict fields + for _, cfield_id, _ in self._wg_field_data: + try: + check_conflict(data[cfield_id], self.group) + except forms.ValidationError as e: + self.add_error(cfield_id, e) + + # Skip remaining tests if individual field tests had errors, + if self.errors: + return data + + # error if conflicts contain disallowed dupes + for error in self._validate_duplicate_conflicts(data): + self.add_error(None, error) + + # Verify expected number of session entries are present + num_sessions_with_data = len(self.session_forms.forms_to_keep) + num_sessions_expected = -1 + try: + num_sessions_expected = int(data.get('num_session', '')) + except ValueError: + self.add_error('num_session', 'Invalid value for number of sessions') + if num_sessions_with_data < num_sessions_expected: + self.add_error('num_session', 'Must provide data for all sessions') + + # if default (empty) option is selected, cleaned_data won't include num_session key + if num_sessions_expected != 2 and num_sessions_expected is not None: + if data.get('session_time_relation'): + self.add_error( + 'session_time_relation', + forms.ValidationError('Time between sessions can only be used when two sessions are requested.') + ) + + joint_session = data.get('joint_for_session', '') + if joint_session != '': + joint_session = int(joint_session) + if joint_session > num_sessions_with_data: + self.add_error( + 'joint_for_session', + forms.ValidationError( + f'Session {joint_session} can not be the joint session, the session has not been requested.' + ) + ) + + return data + + @property + def media(self): + # get media for our formset + return super().media + self.session_forms.media + forms.Media(js=('ietf/js/session_form.js',)) diff --git a/ietf/meeting/helpers.py b/ietf/meeting/helpers.py index e3f4874f4b..39d271ae6b 100644 --- a/ietf/meeting/helpers.py +++ b/ietf/meeting/helpers.py @@ -104,7 +104,7 @@ def preprocess_assignments_for_agenda(assignments_queryset, meeting, extra_prefe queryset=add_event_info_to_session_qs(Session.objects.all().prefetch_related( 'group', 'group__charter', 'group__charter__group', Prefetch('materials', - queryset=Document.objects.exclude(states__type=F("type"), states__slug='deleted').order_by('sessionpresentation__order').prefetch_related('states'), + queryset=Document.objects.exclude(states__type=F("type"), states__slug='deleted').order_by('presentations__order').prefetch_related('states'), to_attr='prefetched_active_materials' ) )) @@ -317,10 +317,21 @@ def _group_filter_headings(self): groups = set(self._get_group(s) for s in self.sessions if s and self._get_group(s)) - log.assertion('len(groups) == len(set(g.acronym for g in groups))') # no repeated acros + # Verify that we're not using the same acronym for more than one distinct group, accounting for + # the possibility that some groups are GroupHistory instances. This assertion will fail if a Group + # and GroupHistory for the same group have a different acronym - in that event, the filter will + # not match the meeting display, so we should be alerted that this has actually occurred. + log.assertion( + "len(set(getattr(g, 'group_id', g.id) for g in groups)) " + "== len(set(g.acronym for g in groups))" + ) group_parents = set(self._get_group_parent(g) for g in groups if self._get_group_parent(g)) - log.assertion('len(group_parents) == len(set(gp.acronym for gp in group_parents))') # no repeated acros + # See above for explanation of this assertion + log.assertion( + "len(set(getattr(gp, 'group_id', gp.id) for gp in group_parents)) " + "== len(set(gp.acronym for gp in group_parents))" + ) all_groups = groups.union(group_parents) all_groups.difference_update([g for g in all_groups if g.acronym in self.exclude_acronyms]) @@ -638,6 +649,11 @@ def read_session_file(type, num, doc): def read_agenda_file(num, doc): return read_session_file('agenda', num, doc) +# TODO-BLOBSTORE: this is _yet another_ draft derived variant created when users +# ask for drafts from the meeting agenda page. Consider whether to refactor this +# now to not call out to external binaries, and consider whether we need this extra +# format at all in the draft blobstore. if so, it would probably be stored under +# something like plainpdf/ def convert_draft_to_pdf(doc_name): inpath = os.path.join(settings.IDSUBMIT_REPOSITORY_PATH, doc_name + ".txt") outpath = os.path.join(settings.INTERNET_DRAFT_PDF_PATH, doc_name + ".pdf") @@ -879,7 +895,7 @@ def make_materials_directories(meeting): # was merged with the regular datatracker code; then in secr/proceedings/views.py # in make_directories()) saved_umask = os.umask(0) - for leaf in ('slides','agenda','minutes','id','rfc','bluesheets'): + for leaf in ('slides','agenda','minutes', 'narrativeminutes', 'id','rfc','bluesheets'): target = os.path.join(path,leaf) if not os.path.exists(target): os.makedirs(target) @@ -1088,6 +1104,7 @@ def create_interim_session_conferences(sessions): try: confs = meetecho_manager.create( group=session.group, + session_id=session.pk, description=str(session), start_time=ts.utc_start_time(), duration=ts.duration, diff --git a/ietf/meeting/migrations/0005_alter_session_agenda_note.py b/ietf/meeting/migrations/0005_alter_session_agenda_note.py new file mode 100644 index 0000000000..59daeea45d --- /dev/null +++ b/ietf/meeting/migrations/0005_alter_session_agenda_note.py @@ -0,0 +1,18 @@ +# Copyright The IETF Trust 2024, All Rights Reserved + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("meeting", "0004_session_chat_room"), + ] + + operations = [ + migrations.AlterField( + model_name="session", + name="agenda_note", + field=models.CharField(blank=True, max_length=512), + ), + ] diff --git a/ietf/meeting/migrations/0006_alter_sessionpresentation_document_and_session.py b/ietf/meeting/migrations/0006_alter_sessionpresentation_document_and_session.py new file mode 100644 index 0000000000..e8d6a663f8 --- /dev/null +++ b/ietf/meeting/migrations/0006_alter_sessionpresentation_document_and_session.py @@ -0,0 +1,33 @@ +# Copyright The IETF Trust 2024, All Rights Reserved + +from django.db import migrations +import django.db.models.deletion +import ietf.utils.models + + +class Migration(migrations.Migration): + dependencies = [ + ("doc", "0021_narrativeminutes"), + ("meeting", "0005_alter_session_agenda_note"), + ] + + operations = [ + migrations.AlterField( + model_name="sessionpresentation", + name="document", + field=ietf.utils.models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="presentations", + to="doc.document", + ), + ), + migrations.AlterField( + model_name="sessionpresentation", + name="session", + field=ietf.utils.models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="presentations", + to="meeting.session", + ), + ), + ] diff --git a/ietf/meeting/migrations/0007_attended_origin_attended_time.py b/ietf/meeting/migrations/0007_attended_origin_attended_time.py new file mode 100644 index 0000000000..09a8d90e07 --- /dev/null +++ b/ietf/meeting/migrations/0007_attended_origin_attended_time.py @@ -0,0 +1,26 @@ +# Copyright The IETF Trust 2024, All Rights Reserved + +from django.db import migrations, models +import django.utils.timezone + + +class Migration(migrations.Migration): + + dependencies = [ + ("meeting", "0006_alter_sessionpresentation_document_and_session"), + ] + + operations = [ + migrations.AddField( + model_name="attended", + name="origin", + field=models.CharField(default="datatracker", max_length=32), + ), + migrations.AddField( + model_name="attended", + name="time", + field=models.DateTimeField( + blank=True, default=django.utils.timezone.now, null=True + ), + ), + ] diff --git a/ietf/meeting/migrations/0008_remove_schedtimesessassignment_notes.py b/ietf/meeting/migrations/0008_remove_schedtimesessassignment_notes.py new file mode 100644 index 0000000000..3c0b85fc22 --- /dev/null +++ b/ietf/meeting/migrations/0008_remove_schedtimesessassignment_notes.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.15 on 2024-08-16 13:49 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("meeting", "0007_attended_origin_attended_time"), + ] + + operations = [ + migrations.RemoveField( + model_name="schedtimesessassignment", + name="notes", + ), + ] diff --git a/ietf/meeting/migrations/0009_session_meetecho_recording_name.py b/ietf/meeting/migrations/0009_session_meetecho_recording_name.py new file mode 100644 index 0000000000..79ca4919a3 --- /dev/null +++ b/ietf/meeting/migrations/0009_session_meetecho_recording_name.py @@ -0,0 +1,20 @@ +# Copyright The IETF Trust 2024, All Rights Reserved + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("meeting", "0008_remove_schedtimesessassignment_notes"), + ] + + operations = [ + migrations.AddField( + model_name="session", + name="meetecho_recording_name", + field=models.CharField( + blank=True, help_text="Name of the meetecho recording", max_length=64 + ), + ), + ] diff --git a/ietf/meeting/migrations/0010_alter_floorplan_image_alter_meetinghost_logo.py b/ietf/meeting/migrations/0010_alter_floorplan_image_alter_meetinghost_logo.py new file mode 100644 index 0000000000..594a1a4048 --- /dev/null +++ b/ietf/meeting/migrations/0010_alter_floorplan_image_alter_meetinghost_logo.py @@ -0,0 +1,56 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +from django.db import migrations, models +import ietf.meeting.models +import ietf.utils.fields +import ietf.utils.storage +import ietf.utils.validators + + +class Migration(migrations.Migration): + + dependencies = [ + ("meeting", "0009_session_meetecho_recording_name"), + ] + + operations = [ + migrations.AlterField( + model_name="floorplan", + name="image", + field=models.ImageField( + blank=True, + default=None, + storage=ietf.utils.storage.BlobShadowFileSystemStorage( + kind="", location=None + ), + upload_to=ietf.meeting.models.floorplan_path, + ), + ), + migrations.AlterField( + model_name="meetinghost", + name="logo", + field=ietf.utils.fields.MissingOkImageField( + height_field="logo_height", + storage=ietf.utils.storage.BlobShadowFileSystemStorage( + kind="", location=None + ), + upload_to=ietf.meeting.models._host_upload_path, + validators=[ + ietf.utils.validators.MaxImageSizeValidator(400, 400), + ietf.utils.validators.WrappedValidator( + ietf.utils.validators.validate_file_size, True + ), + ietf.utils.validators.WrappedValidator( + ietf.utils.validators.validate_file_extension, + [".png", ".jpg", ".jpeg"], + ), + ietf.utils.validators.WrappedValidator( + ietf.utils.validators.validate_mime_type, + ["image/jpeg", "image/png"], + True, + ), + ], + width_field="logo_width", + ), + ), + ] diff --git a/ietf/meeting/migrations/0011_alter_slidesubmission_doc.py b/ietf/meeting/migrations/0011_alter_slidesubmission_doc.py new file mode 100644 index 0000000000..b9cbc58e99 --- /dev/null +++ b/ietf/meeting/migrations/0011_alter_slidesubmission_doc.py @@ -0,0 +1,26 @@ +# Generated by Django 4.2.19 on 2025-03-17 09:37 + +from django.db import migrations +import django.db.models.deletion +import ietf.utils.models + + +class Migration(migrations.Migration): + + dependencies = [ + ("doc", "0025_storedobject_storedobject_unique_name_per_store"), + ("meeting", "0010_alter_floorplan_image_alter_meetinghost_logo"), + ] + + operations = [ + migrations.AlterField( + model_name="slidesubmission", + name="doc", + field=ietf.utils.models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="doc.document", + ), + ), + ] diff --git a/ietf/meeting/migrations/0012_registration_registrationticket.py b/ietf/meeting/migrations/0012_registration_registrationticket.py new file mode 100644 index 0000000000..c555f52e8b --- /dev/null +++ b/ietf/meeting/migrations/0012_registration_registrationticket.py @@ -0,0 +1,90 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +from django.db import migrations, models +import django.db.models.deletion +import ietf.utils.models + + +class Migration(migrations.Migration): + + dependencies = [ + ("name", "0017_populate_new_reg_names"), + ("person", "0004_alter_person_photo_alter_person_photo_thumb"), + ("meeting", "0011_alter_slidesubmission_doc"), + ] + + operations = [ + migrations.CreateModel( + name="Registration", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("first_name", models.CharField(max_length=255)), + ("last_name", models.CharField(max_length=255)), + ("affiliation", models.CharField(blank=True, max_length=255)), + ("country_code", models.CharField(max_length=2)), + ("email", models.EmailField(blank=True, max_length=254, null=True)), + ("attended", models.BooleanField(default=False)), + ("checkedin", models.BooleanField(default=False)), + ( + "meeting", + ietf.utils.models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="meeting.meeting", + ), + ), + ( + "person", + ietf.utils.models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.PROTECT, + to="person.person", + ), + ), + ], + ), + migrations.CreateModel( + name="RegistrationTicket", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "attendance_type", + ietf.utils.models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, + to="name.attendancetypename", + ), + ), + ( + "registration", + ietf.utils.models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="tickets", + to="meeting.registration", + ), + ), + ( + "ticket_type", + ietf.utils.models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, + to="name.registrationtickettypename", + ), + ), + ], + ), + ] diff --git a/ietf/meeting/migrations/0013_correct_reg_checkedin.py b/ietf/meeting/migrations/0013_correct_reg_checkedin.py new file mode 100644 index 0000000000..88b3efceac --- /dev/null +++ b/ietf/meeting/migrations/0013_correct_reg_checkedin.py @@ -0,0 +1,41 @@ +# Generated by Django 4.2.21 on 2025-05-20 22:28 + +''' +The original migration had a flaw. If a participant had both a remote and onsite +registration, which is rare but does occur, which registration the checkedin state +came from was indeterminate. If it came from the remote registration it would be +False which might be wrong. This migration finds all registrations with onsite tickets +and checkedin is False, and checks if it is correct, and fixes if needed. +''' + +from django.db import migrations +import datetime + + +def forward(apps, schema_editor): + Registration = apps.get_model('meeting', 'Registration') + MeetingRegistration = apps.get_model('stats', 'MeetingRegistration') + today = datetime.date.today() + for reg in Registration.objects.filter(tickets__attendance_type__slug='onsite', checkedin=False, meeting__date__lt=today).order_by('meeting__number'): + # get original MeetingRegistration + mregs = MeetingRegistration.objects.filter(meeting=reg.meeting, email=reg.email, reg_type='onsite') + mregs_checkedin = [mr.checkedin for mr in mregs] + if any(mregs_checkedin): + reg.checkedin = True + reg.save() + print(f'updating {reg.meeting}:{reg.email}:{reg.pk}') + + +def reverse(apps, schema_editor): + pass + + +class Migration(migrations.Migration): + + dependencies = [ + ("meeting", "0012_registration_registrationticket"), + ] + + operations = [ + migrations.RunPython(forward, reverse), + ] diff --git a/ietf/meeting/migrations/0014_alter_floorplan_image.py b/ietf/meeting/migrations/0014_alter_floorplan_image.py new file mode 100644 index 0000000000..e125625edc --- /dev/null +++ b/ietf/meeting/migrations/0014_alter_floorplan_image.py @@ -0,0 +1,25 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +from django.db import migrations, models +import ietf.meeting.models +import ietf.utils.storage + + +class Migration(migrations.Migration): + dependencies = [ + ("meeting", "0013_correct_reg_checkedin"), + ] + + operations = [ + migrations.AlterField( + model_name="floorplan", + name="image", + field=models.ImageField( + default=None, + storage=ietf.utils.storage.BlobShadowFileSystemStorage( + kind="", location=None + ), + upload_to=ietf.meeting.models.floorplan_path, + ), + ), + ] diff --git a/ietf/meeting/migrations/0015_alter_meeting_time_zone.py b/ietf/meeting/migrations/0015_alter_meeting_time_zone.py new file mode 100644 index 0000000000..2a4b7859ee --- /dev/null +++ b/ietf/meeting/migrations/0015_alter_meeting_time_zone.py @@ -0,0 +1,451 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +from django.db import migrations, models + + +def forward(apps, schema_editor): + """Migrate 'GMT' meeting time zones to 'UTC'""" + Meeting = apps.get_model("meeting", "Meeting") + Meeting.objects.filter(time_zone="GMT").update(time_zone="UTC") + + +def reverse(apps, schema_editor): + pass # nothing to do + + +class Migration(migrations.Migration): + + dependencies = [ + ("meeting", "0014_alter_floorplan_image"), + ] + + operations = [ + migrations.RunPython(forward, reverse), + migrations.AlterField( + model_name="meeting", + name="time_zone", + field=models.CharField( + choices=[ + ("", "---------"), + ("Africa/Abidjan", "Africa/Abidjan"), + ("Africa/Accra", "Africa/Accra"), + ("Africa/Addis_Ababa", "Africa/Addis_Ababa"), + ("Africa/Algiers", "Africa/Algiers"), + ("Africa/Asmara", "Africa/Asmara"), + ("Africa/Bamako", "Africa/Bamako"), + ("Africa/Bangui", "Africa/Bangui"), + ("Africa/Banjul", "Africa/Banjul"), + ("Africa/Bissau", "Africa/Bissau"), + ("Africa/Blantyre", "Africa/Blantyre"), + ("Africa/Brazzaville", "Africa/Brazzaville"), + ("Africa/Bujumbura", "Africa/Bujumbura"), + ("Africa/Cairo", "Africa/Cairo"), + ("Africa/Casablanca", "Africa/Casablanca"), + ("Africa/Ceuta", "Africa/Ceuta"), + ("Africa/Conakry", "Africa/Conakry"), + ("Africa/Dakar", "Africa/Dakar"), + ("Africa/Dar_es_Salaam", "Africa/Dar_es_Salaam"), + ("Africa/Djibouti", "Africa/Djibouti"), + ("Africa/Douala", "Africa/Douala"), + ("Africa/El_Aaiun", "Africa/El_Aaiun"), + ("Africa/Freetown", "Africa/Freetown"), + ("Africa/Gaborone", "Africa/Gaborone"), + ("Africa/Harare", "Africa/Harare"), + ("Africa/Johannesburg", "Africa/Johannesburg"), + ("Africa/Juba", "Africa/Juba"), + ("Africa/Kampala", "Africa/Kampala"), + ("Africa/Khartoum", "Africa/Khartoum"), + ("Africa/Kigali", "Africa/Kigali"), + ("Africa/Kinshasa", "Africa/Kinshasa"), + ("Africa/Lagos", "Africa/Lagos"), + ("Africa/Libreville", "Africa/Libreville"), + ("Africa/Lome", "Africa/Lome"), + ("Africa/Luanda", "Africa/Luanda"), + ("Africa/Lubumbashi", "Africa/Lubumbashi"), + ("Africa/Lusaka", "Africa/Lusaka"), + ("Africa/Malabo", "Africa/Malabo"), + ("Africa/Maputo", "Africa/Maputo"), + ("Africa/Maseru", "Africa/Maseru"), + ("Africa/Mbabane", "Africa/Mbabane"), + ("Africa/Mogadishu", "Africa/Mogadishu"), + ("Africa/Monrovia", "Africa/Monrovia"), + ("Africa/Nairobi", "Africa/Nairobi"), + ("Africa/Ndjamena", "Africa/Ndjamena"), + ("Africa/Niamey", "Africa/Niamey"), + ("Africa/Nouakchott", "Africa/Nouakchott"), + ("Africa/Ouagadougou", "Africa/Ouagadougou"), + ("Africa/Porto-Novo", "Africa/Porto-Novo"), + ("Africa/Sao_Tome", "Africa/Sao_Tome"), + ("Africa/Tripoli", "Africa/Tripoli"), + ("Africa/Tunis", "Africa/Tunis"), + ("Africa/Windhoek", "Africa/Windhoek"), + ("America/Adak", "America/Adak"), + ("America/Anchorage", "America/Anchorage"), + ("America/Anguilla", "America/Anguilla"), + ("America/Antigua", "America/Antigua"), + ("America/Araguaina", "America/Araguaina"), + ( + "America/Argentina/Buenos_Aires", + "America/Argentina/Buenos_Aires", + ), + ("America/Argentina/Catamarca", "America/Argentina/Catamarca"), + ("America/Argentina/Cordoba", "America/Argentina/Cordoba"), + ("America/Argentina/Jujuy", "America/Argentina/Jujuy"), + ("America/Argentina/La_Rioja", "America/Argentina/La_Rioja"), + ("America/Argentina/Mendoza", "America/Argentina/Mendoza"), + ( + "America/Argentina/Rio_Gallegos", + "America/Argentina/Rio_Gallegos", + ), + ("America/Argentina/Salta", "America/Argentina/Salta"), + ("America/Argentina/San_Juan", "America/Argentina/San_Juan"), + ("America/Argentina/San_Luis", "America/Argentina/San_Luis"), + ("America/Argentina/Tucuman", "America/Argentina/Tucuman"), + ("America/Argentina/Ushuaia", "America/Argentina/Ushuaia"), + ("America/Aruba", "America/Aruba"), + ("America/Asuncion", "America/Asuncion"), + ("America/Atikokan", "America/Atikokan"), + ("America/Bahia", "America/Bahia"), + ("America/Bahia_Banderas", "America/Bahia_Banderas"), + ("America/Barbados", "America/Barbados"), + ("America/Belem", "America/Belem"), + ("America/Belize", "America/Belize"), + ("America/Blanc-Sablon", "America/Blanc-Sablon"), + ("America/Boa_Vista", "America/Boa_Vista"), + ("America/Bogota", "America/Bogota"), + ("America/Boise", "America/Boise"), + ("America/Cambridge_Bay", "America/Cambridge_Bay"), + ("America/Campo_Grande", "America/Campo_Grande"), + ("America/Cancun", "America/Cancun"), + ("America/Caracas", "America/Caracas"), + ("America/Cayenne", "America/Cayenne"), + ("America/Cayman", "America/Cayman"), + ("America/Chicago", "America/Chicago"), + ("America/Chihuahua", "America/Chihuahua"), + ("America/Costa_Rica", "America/Costa_Rica"), + ("America/Creston", "America/Creston"), + ("America/Cuiaba", "America/Cuiaba"), + ("America/Curacao", "America/Curacao"), + ("America/Danmarkshavn", "America/Danmarkshavn"), + ("America/Dawson", "America/Dawson"), + ("America/Dawson_Creek", "America/Dawson_Creek"), + ("America/Denver", "America/Denver"), + ("America/Detroit", "America/Detroit"), + ("America/Dominica", "America/Dominica"), + ("America/Edmonton", "America/Edmonton"), + ("America/Eirunepe", "America/Eirunepe"), + ("America/El_Salvador", "America/El_Salvador"), + ("America/Fort_Nelson", "America/Fort_Nelson"), + ("America/Fortaleza", "America/Fortaleza"), + ("America/Glace_Bay", "America/Glace_Bay"), + ("America/Goose_Bay", "America/Goose_Bay"), + ("America/Grand_Turk", "America/Grand_Turk"), + ("America/Grenada", "America/Grenada"), + ("America/Guadeloupe", "America/Guadeloupe"), + ("America/Guatemala", "America/Guatemala"), + ("America/Guayaquil", "America/Guayaquil"), + ("America/Guyana", "America/Guyana"), + ("America/Halifax", "America/Halifax"), + ("America/Havana", "America/Havana"), + ("America/Hermosillo", "America/Hermosillo"), + ("America/Indiana/Indianapolis", "America/Indiana/Indianapolis"), + ("America/Indiana/Knox", "America/Indiana/Knox"), + ("America/Indiana/Marengo", "America/Indiana/Marengo"), + ("America/Indiana/Petersburg", "America/Indiana/Petersburg"), + ("America/Indiana/Tell_City", "America/Indiana/Tell_City"), + ("America/Indiana/Vevay", "America/Indiana/Vevay"), + ("America/Indiana/Vincennes", "America/Indiana/Vincennes"), + ("America/Indiana/Winamac", "America/Indiana/Winamac"), + ("America/Inuvik", "America/Inuvik"), + ("America/Iqaluit", "America/Iqaluit"), + ("America/Jamaica", "America/Jamaica"), + ("America/Juneau", "America/Juneau"), + ("America/Kentucky/Louisville", "America/Kentucky/Louisville"), + ("America/Kentucky/Monticello", "America/Kentucky/Monticello"), + ("America/La_Paz", "America/La_Paz"), + ("America/Lima", "America/Lima"), + ("America/Los_Angeles", "America/Los_Angeles"), + ("America/Maceio", "America/Maceio"), + ("America/Managua", "America/Managua"), + ("America/Manaus", "America/Manaus"), + ("America/Martinique", "America/Martinique"), + ("America/Matamoros", "America/Matamoros"), + ("America/Mazatlan", "America/Mazatlan"), + ("America/Menominee", "America/Menominee"), + ("America/Merida", "America/Merida"), + ("America/Metlakatla", "America/Metlakatla"), + ("America/Mexico_City", "America/Mexico_City"), + ("America/Miquelon", "America/Miquelon"), + ("America/Moncton", "America/Moncton"), + ("America/Monterrey", "America/Monterrey"), + ("America/Montevideo", "America/Montevideo"), + ("America/Montserrat", "America/Montserrat"), + ("America/Nassau", "America/Nassau"), + ("America/New_York", "America/New_York"), + ("America/Nipigon", "America/Nipigon"), + ("America/Nome", "America/Nome"), + ("America/Noronha", "America/Noronha"), + ("America/North_Dakota/Beulah", "America/North_Dakota/Beulah"), + ("America/North_Dakota/Center", "America/North_Dakota/Center"), + ( + "America/North_Dakota/New_Salem", + "America/North_Dakota/New_Salem", + ), + ("America/Nuuk", "America/Nuuk"), + ("America/Ojinaga", "America/Ojinaga"), + ("America/Panama", "America/Panama"), + ("America/Pangnirtung", "America/Pangnirtung"), + ("America/Paramaribo", "America/Paramaribo"), + ("America/Phoenix", "America/Phoenix"), + ("America/Port-au-Prince", "America/Port-au-Prince"), + ("America/Port_of_Spain", "America/Port_of_Spain"), + ("America/Porto_Velho", "America/Porto_Velho"), + ("America/Puerto_Rico", "America/Puerto_Rico"), + ("America/Punta_Arenas", "America/Punta_Arenas"), + ("America/Rainy_River", "America/Rainy_River"), + ("America/Rankin_Inlet", "America/Rankin_Inlet"), + ("America/Recife", "America/Recife"), + ("America/Regina", "America/Regina"), + ("America/Resolute", "America/Resolute"), + ("America/Rio_Branco", "America/Rio_Branco"), + ("America/Santarem", "America/Santarem"), + ("America/Santiago", "America/Santiago"), + ("America/Santo_Domingo", "America/Santo_Domingo"), + ("America/Sao_Paulo", "America/Sao_Paulo"), + ("America/Scoresbysund", "America/Scoresbysund"), + ("America/Sitka", "America/Sitka"), + ("America/St_Johns", "America/St_Johns"), + ("America/St_Kitts", "America/St_Kitts"), + ("America/St_Lucia", "America/St_Lucia"), + ("America/St_Thomas", "America/St_Thomas"), + ("America/St_Vincent", "America/St_Vincent"), + ("America/Swift_Current", "America/Swift_Current"), + ("America/Tegucigalpa", "America/Tegucigalpa"), + ("America/Thule", "America/Thule"), + ("America/Thunder_Bay", "America/Thunder_Bay"), + ("America/Tijuana", "America/Tijuana"), + ("America/Toronto", "America/Toronto"), + ("America/Tortola", "America/Tortola"), + ("America/Vancouver", "America/Vancouver"), + ("America/Whitehorse", "America/Whitehorse"), + ("America/Winnipeg", "America/Winnipeg"), + ("America/Yakutat", "America/Yakutat"), + ("America/Yellowknife", "America/Yellowknife"), + ("Antarctica/Casey", "Antarctica/Casey"), + ("Antarctica/Davis", "Antarctica/Davis"), + ("Antarctica/DumontDUrville", "Antarctica/DumontDUrville"), + ("Antarctica/Macquarie", "Antarctica/Macquarie"), + ("Antarctica/Mawson", "Antarctica/Mawson"), + ("Antarctica/McMurdo", "Antarctica/McMurdo"), + ("Antarctica/Palmer", "Antarctica/Palmer"), + ("Antarctica/Rothera", "Antarctica/Rothera"), + ("Antarctica/Syowa", "Antarctica/Syowa"), + ("Antarctica/Troll", "Antarctica/Troll"), + ("Antarctica/Vostok", "Antarctica/Vostok"), + ("Asia/Aden", "Asia/Aden"), + ("Asia/Almaty", "Asia/Almaty"), + ("Asia/Amman", "Asia/Amman"), + ("Asia/Anadyr", "Asia/Anadyr"), + ("Asia/Aqtau", "Asia/Aqtau"), + ("Asia/Aqtobe", "Asia/Aqtobe"), + ("Asia/Ashgabat", "Asia/Ashgabat"), + ("Asia/Atyrau", "Asia/Atyrau"), + ("Asia/Baghdad", "Asia/Baghdad"), + ("Asia/Bahrain", "Asia/Bahrain"), + ("Asia/Baku", "Asia/Baku"), + ("Asia/Bangkok", "Asia/Bangkok"), + ("Asia/Barnaul", "Asia/Barnaul"), + ("Asia/Beirut", "Asia/Beirut"), + ("Asia/Bishkek", "Asia/Bishkek"), + ("Asia/Brunei", "Asia/Brunei"), + ("Asia/Chita", "Asia/Chita"), + ("Asia/Choibalsan", "Asia/Choibalsan"), + ("Asia/Colombo", "Asia/Colombo"), + ("Asia/Damascus", "Asia/Damascus"), + ("Asia/Dhaka", "Asia/Dhaka"), + ("Asia/Dili", "Asia/Dili"), + ("Asia/Dubai", "Asia/Dubai"), + ("Asia/Dushanbe", "Asia/Dushanbe"), + ("Asia/Famagusta", "Asia/Famagusta"), + ("Asia/Gaza", "Asia/Gaza"), + ("Asia/Hebron", "Asia/Hebron"), + ("Asia/Ho_Chi_Minh", "Asia/Ho_Chi_Minh"), + ("Asia/Hong_Kong", "Asia/Hong_Kong"), + ("Asia/Hovd", "Asia/Hovd"), + ("Asia/Irkutsk", "Asia/Irkutsk"), + ("Asia/Jakarta", "Asia/Jakarta"), + ("Asia/Jayapura", "Asia/Jayapura"), + ("Asia/Jerusalem", "Asia/Jerusalem"), + ("Asia/Kabul", "Asia/Kabul"), + ("Asia/Kamchatka", "Asia/Kamchatka"), + ("Asia/Karachi", "Asia/Karachi"), + ("Asia/Kathmandu", "Asia/Kathmandu"), + ("Asia/Khandyga", "Asia/Khandyga"), + ("Asia/Kolkata", "Asia/Kolkata"), + ("Asia/Krasnoyarsk", "Asia/Krasnoyarsk"), + ("Asia/Kuala_Lumpur", "Asia/Kuala_Lumpur"), + ("Asia/Kuching", "Asia/Kuching"), + ("Asia/Kuwait", "Asia/Kuwait"), + ("Asia/Macau", "Asia/Macau"), + ("Asia/Magadan", "Asia/Magadan"), + ("Asia/Makassar", "Asia/Makassar"), + ("Asia/Manila", "Asia/Manila"), + ("Asia/Muscat", "Asia/Muscat"), + ("Asia/Nicosia", "Asia/Nicosia"), + ("Asia/Novokuznetsk", "Asia/Novokuznetsk"), + ("Asia/Novosibirsk", "Asia/Novosibirsk"), + ("Asia/Omsk", "Asia/Omsk"), + ("Asia/Oral", "Asia/Oral"), + ("Asia/Phnom_Penh", "Asia/Phnom_Penh"), + ("Asia/Pontianak", "Asia/Pontianak"), + ("Asia/Pyongyang", "Asia/Pyongyang"), + ("Asia/Qatar", "Asia/Qatar"), + ("Asia/Qostanay", "Asia/Qostanay"), + ("Asia/Qyzylorda", "Asia/Qyzylorda"), + ("Asia/Riyadh", "Asia/Riyadh"), + ("Asia/Sakhalin", "Asia/Sakhalin"), + ("Asia/Samarkand", "Asia/Samarkand"), + ("Asia/Seoul", "Asia/Seoul"), + ("Asia/Shanghai", "Asia/Shanghai"), + ("Asia/Singapore", "Asia/Singapore"), + ("Asia/Srednekolymsk", "Asia/Srednekolymsk"), + ("Asia/Taipei", "Asia/Taipei"), + ("Asia/Tashkent", "Asia/Tashkent"), + ("Asia/Tbilisi", "Asia/Tbilisi"), + ("Asia/Tehran", "Asia/Tehran"), + ("Asia/Thimphu", "Asia/Thimphu"), + ("Asia/Tokyo", "Asia/Tokyo"), + ("Asia/Tomsk", "Asia/Tomsk"), + ("Asia/Ulaanbaatar", "Asia/Ulaanbaatar"), + ("Asia/Urumqi", "Asia/Urumqi"), + ("Asia/Ust-Nera", "Asia/Ust-Nera"), + ("Asia/Vientiane", "Asia/Vientiane"), + ("Asia/Vladivostok", "Asia/Vladivostok"), + ("Asia/Yakutsk", "Asia/Yakutsk"), + ("Asia/Yangon", "Asia/Yangon"), + ("Asia/Yekaterinburg", "Asia/Yekaterinburg"), + ("Asia/Yerevan", "Asia/Yerevan"), + ("Atlantic/Azores", "Atlantic/Azores"), + ("Atlantic/Bermuda", "Atlantic/Bermuda"), + ("Atlantic/Canary", "Atlantic/Canary"), + ("Atlantic/Cape_Verde", "Atlantic/Cape_Verde"), + ("Atlantic/Faroe", "Atlantic/Faroe"), + ("Atlantic/Madeira", "Atlantic/Madeira"), + ("Atlantic/Reykjavik", "Atlantic/Reykjavik"), + ("Atlantic/South_Georgia", "Atlantic/South_Georgia"), + ("Atlantic/St_Helena", "Atlantic/St_Helena"), + ("Atlantic/Stanley", "Atlantic/Stanley"), + ("Australia/Adelaide", "Australia/Adelaide"), + ("Australia/Brisbane", "Australia/Brisbane"), + ("Australia/Broken_Hill", "Australia/Broken_Hill"), + ("Australia/Darwin", "Australia/Darwin"), + ("Australia/Eucla", "Australia/Eucla"), + ("Australia/Hobart", "Australia/Hobart"), + ("Australia/Lindeman", "Australia/Lindeman"), + ("Australia/Lord_Howe", "Australia/Lord_Howe"), + ("Australia/Melbourne", "Australia/Melbourne"), + ("Australia/Perth", "Australia/Perth"), + ("Australia/Sydney", "Australia/Sydney"), + ("Europe/Amsterdam", "Europe/Amsterdam"), + ("Europe/Andorra", "Europe/Andorra"), + ("Europe/Astrakhan", "Europe/Astrakhan"), + ("Europe/Athens", "Europe/Athens"), + ("Europe/Belgrade", "Europe/Belgrade"), + ("Europe/Berlin", "Europe/Berlin"), + ("Europe/Brussels", "Europe/Brussels"), + ("Europe/Bucharest", "Europe/Bucharest"), + ("Europe/Budapest", "Europe/Budapest"), + ("Europe/Chisinau", "Europe/Chisinau"), + ("Europe/Copenhagen", "Europe/Copenhagen"), + ("Europe/Dublin", "Europe/Dublin"), + ("Europe/Gibraltar", "Europe/Gibraltar"), + ("Europe/Helsinki", "Europe/Helsinki"), + ("Europe/Istanbul", "Europe/Istanbul"), + ("Europe/Kaliningrad", "Europe/Kaliningrad"), + ("Europe/Kirov", "Europe/Kirov"), + ("Europe/Kyiv", "Europe/Kyiv"), + ("Europe/Lisbon", "Europe/Lisbon"), + ("Europe/London", "Europe/London"), + ("Europe/Luxembourg", "Europe/Luxembourg"), + ("Europe/Madrid", "Europe/Madrid"), + ("Europe/Malta", "Europe/Malta"), + ("Europe/Minsk", "Europe/Minsk"), + ("Europe/Monaco", "Europe/Monaco"), + ("Europe/Moscow", "Europe/Moscow"), + ("Europe/Oslo", "Europe/Oslo"), + ("Europe/Paris", "Europe/Paris"), + ("Europe/Prague", "Europe/Prague"), + ("Europe/Riga", "Europe/Riga"), + ("Europe/Rome", "Europe/Rome"), + ("Europe/Samara", "Europe/Samara"), + ("Europe/Saratov", "Europe/Saratov"), + ("Europe/Simferopol", "Europe/Simferopol"), + ("Europe/Sofia", "Europe/Sofia"), + ("Europe/Stockholm", "Europe/Stockholm"), + ("Europe/Tallinn", "Europe/Tallinn"), + ("Europe/Tirane", "Europe/Tirane"), + ("Europe/Ulyanovsk", "Europe/Ulyanovsk"), + ("Europe/Uzhgorod", "Europe/Uzhgorod"), + ("Europe/Vaduz", "Europe/Vaduz"), + ("Europe/Vienna", "Europe/Vienna"), + ("Europe/Vilnius", "Europe/Vilnius"), + ("Europe/Volgograd", "Europe/Volgograd"), + ("Europe/Warsaw", "Europe/Warsaw"), + ("Europe/Zaporozhye", "Europe/Zaporozhye"), + ("Europe/Zurich", "Europe/Zurich"), + ("Indian/Antananarivo", "Indian/Antananarivo"), + ("Indian/Chagos", "Indian/Chagos"), + ("Indian/Christmas", "Indian/Christmas"), + ("Indian/Cocos", "Indian/Cocos"), + ("Indian/Comoro", "Indian/Comoro"), + ("Indian/Kerguelen", "Indian/Kerguelen"), + ("Indian/Mahe", "Indian/Mahe"), + ("Indian/Maldives", "Indian/Maldives"), + ("Indian/Mauritius", "Indian/Mauritius"), + ("Indian/Mayotte", "Indian/Mayotte"), + ("Indian/Reunion", "Indian/Reunion"), + ("Pacific/Apia", "Pacific/Apia"), + ("Pacific/Auckland", "Pacific/Auckland"), + ("Pacific/Bougainville", "Pacific/Bougainville"), + ("Pacific/Chatham", "Pacific/Chatham"), + ("Pacific/Chuuk", "Pacific/Chuuk"), + ("Pacific/Easter", "Pacific/Easter"), + ("Pacific/Efate", "Pacific/Efate"), + ("Pacific/Fakaofo", "Pacific/Fakaofo"), + ("Pacific/Fiji", "Pacific/Fiji"), + ("Pacific/Funafuti", "Pacific/Funafuti"), + ("Pacific/Galapagos", "Pacific/Galapagos"), + ("Pacific/Gambier", "Pacific/Gambier"), + ("Pacific/Guadalcanal", "Pacific/Guadalcanal"), + ("Pacific/Guam", "Pacific/Guam"), + ("Pacific/Honolulu", "Pacific/Honolulu"), + ("Pacific/Kanton", "Pacific/Kanton"), + ("Pacific/Kiritimati", "Pacific/Kiritimati"), + ("Pacific/Kosrae", "Pacific/Kosrae"), + ("Pacific/Kwajalein", "Pacific/Kwajalein"), + ("Pacific/Majuro", "Pacific/Majuro"), + ("Pacific/Marquesas", "Pacific/Marquesas"), + ("Pacific/Midway", "Pacific/Midway"), + ("Pacific/Nauru", "Pacific/Nauru"), + ("Pacific/Niue", "Pacific/Niue"), + ("Pacific/Norfolk", "Pacific/Norfolk"), + ("Pacific/Noumea", "Pacific/Noumea"), + ("Pacific/Pago_Pago", "Pacific/Pago_Pago"), + ("Pacific/Palau", "Pacific/Palau"), + ("Pacific/Pitcairn", "Pacific/Pitcairn"), + ("Pacific/Pohnpei", "Pacific/Pohnpei"), + ("Pacific/Port_Moresby", "Pacific/Port_Moresby"), + ("Pacific/Rarotonga", "Pacific/Rarotonga"), + ("Pacific/Saipan", "Pacific/Saipan"), + ("Pacific/Tahiti", "Pacific/Tahiti"), + ("Pacific/Tarawa", "Pacific/Tarawa"), + ("Pacific/Tongatapu", "Pacific/Tongatapu"), + ("Pacific/Wake", "Pacific/Wake"), + ("Pacific/Wallis", "Pacific/Wallis"), + ("UTC", "UTC"), + ], + default="UTC", + max_length=255, + ), + ), + ] diff --git a/ietf/meeting/migrations/0016_alter_meeting_country_alter_meeting_time_zone.py b/ietf/meeting/migrations/0016_alter_meeting_country_alter_meeting_time_zone.py new file mode 100644 index 0000000000..8c467ea156 --- /dev/null +++ b/ietf/meeting/migrations/0016_alter_meeting_country_alter_meeting_time_zone.py @@ -0,0 +1,694 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("meeting", "0015_alter_meeting_time_zone"), + ] + + operations = [ + migrations.AlterField( + model_name="meeting", + name="country", + field=models.CharField( + blank=True, + choices=[ + ("", "---------"), + ("AF", "Afghanistan"), + ("AL", "Albania"), + ("DZ", "Algeria"), + ("AD", "Andorra"), + ("AO", "Angola"), + ("AI", "Anguilla"), + ("AQ", "Antarctica"), + ("AG", "Antigua & Barbuda"), + ("AR", "Argentina"), + ("AM", "Armenia"), + ("AW", "Aruba"), + ("AU", "Australia"), + ("AT", "Austria"), + ("AZ", "Azerbaijan"), + ("BS", "Bahamas"), + ("BH", "Bahrain"), + ("BD", "Bangladesh"), + ("BB", "Barbados"), + ("BY", "Belarus"), + ("BE", "Belgium"), + ("BZ", "Belize"), + ("BJ", "Benin"), + ("BM", "Bermuda"), + ("BT", "Bhutan"), + ("BO", "Bolivia"), + ("BA", "Bosnia & Herzegovina"), + ("BW", "Botswana"), + ("BV", "Bouvet Island"), + ("BR", "Brazil"), + ("GB", "Britain (UK)"), + ("IO", "British Indian Ocean Territory"), + ("BN", "Brunei"), + ("BG", "Bulgaria"), + ("BF", "Burkina Faso"), + ("BI", "Burundi"), + ("KH", "Cambodia"), + ("CM", "Cameroon"), + ("CA", "Canada"), + ("CV", "Cape Verde"), + ("BQ", "Caribbean NL"), + ("KY", "Cayman Islands"), + ("CF", "Central African Rep."), + ("TD", "Chad"), + ("CL", "Chile"), + ("CN", "China"), + ("CX", "Christmas Island"), + ("CC", "Cocos (Keeling) Islands"), + ("CO", "Colombia"), + ("KM", "Comoros"), + ("CD", "Congo (Dem. Rep.)"), + ("CG", "Congo (Rep.)"), + ("CK", "Cook Islands"), + ("CR", "Costa Rica"), + ("HR", "Croatia"), + ("CU", "Cuba"), + ("CW", "Curaçao"), + ("CY", "Cyprus"), + ("CZ", "Czech Republic"), + ("CI", "Côte d'Ivoire"), + ("DK", "Denmark"), + ("DJ", "Djibouti"), + ("DM", "Dominica"), + ("DO", "Dominican Republic"), + ("TL", "East Timor"), + ("EC", "Ecuador"), + ("EG", "Egypt"), + ("SV", "El Salvador"), + ("GQ", "Equatorial Guinea"), + ("ER", "Eritrea"), + ("EE", "Estonia"), + ("SZ", "Eswatini (Swaziland)"), + ("ET", "Ethiopia"), + ("FK", "Falkland Islands"), + ("FO", "Faroe Islands"), + ("FJ", "Fiji"), + ("FI", "Finland"), + ("FR", "France"), + ("GF", "French Guiana"), + ("PF", "French Polynesia"), + ("TF", "French S. Terr."), + ("GA", "Gabon"), + ("GM", "Gambia"), + ("GE", "Georgia"), + ("DE", "Germany"), + ("GH", "Ghana"), + ("GI", "Gibraltar"), + ("GR", "Greece"), + ("GL", "Greenland"), + ("GD", "Grenada"), + ("GP", "Guadeloupe"), + ("GU", "Guam"), + ("GT", "Guatemala"), + ("GG", "Guernsey"), + ("GN", "Guinea"), + ("GW", "Guinea-Bissau"), + ("GY", "Guyana"), + ("HT", "Haiti"), + ("HM", "Heard Island & McDonald Islands"), + ("HN", "Honduras"), + ("HK", "Hong Kong"), + ("HU", "Hungary"), + ("IS", "Iceland"), + ("IN", "India"), + ("ID", "Indonesia"), + ("IR", "Iran"), + ("IQ", "Iraq"), + ("IE", "Ireland"), + ("IM", "Isle of Man"), + ("IL", "Israel"), + ("IT", "Italy"), + ("JM", "Jamaica"), + ("JP", "Japan"), + ("JE", "Jersey"), + ("JO", "Jordan"), + ("KZ", "Kazakhstan"), + ("KE", "Kenya"), + ("KI", "Kiribati"), + ("KP", "Korea (North)"), + ("KR", "Korea (South)"), + ("KW", "Kuwait"), + ("KG", "Kyrgyzstan"), + ("LA", "Laos"), + ("LV", "Latvia"), + ("LB", "Lebanon"), + ("LS", "Lesotho"), + ("LR", "Liberia"), + ("LY", "Libya"), + ("LI", "Liechtenstein"), + ("LT", "Lithuania"), + ("LU", "Luxembourg"), + ("MO", "Macau"), + ("MG", "Madagascar"), + ("MW", "Malawi"), + ("MY", "Malaysia"), + ("MV", "Maldives"), + ("ML", "Mali"), + ("MT", "Malta"), + ("MH", "Marshall Islands"), + ("MQ", "Martinique"), + ("MR", "Mauritania"), + ("MU", "Mauritius"), + ("YT", "Mayotte"), + ("MX", "Mexico"), + ("FM", "Micronesia"), + ("MD", "Moldova"), + ("MC", "Monaco"), + ("MN", "Mongolia"), + ("ME", "Montenegro"), + ("MS", "Montserrat"), + ("MA", "Morocco"), + ("MZ", "Mozambique"), + ("MM", "Myanmar (Burma)"), + ("NA", "Namibia"), + ("NR", "Nauru"), + ("NP", "Nepal"), + ("NL", "Netherlands"), + ("NC", "New Caledonia"), + ("NZ", "New Zealand"), + ("NI", "Nicaragua"), + ("NE", "Niger"), + ("NG", "Nigeria"), + ("NU", "Niue"), + ("NF", "Norfolk Island"), + ("MK", "North Macedonia"), + ("MP", "Northern Mariana Islands"), + ("NO", "Norway"), + ("OM", "Oman"), + ("PK", "Pakistan"), + ("PW", "Palau"), + ("PS", "Palestine"), + ("PA", "Panama"), + ("PG", "Papua New Guinea"), + ("PY", "Paraguay"), + ("PE", "Peru"), + ("PH", "Philippines"), + ("PN", "Pitcairn"), + ("PL", "Poland"), + ("PT", "Portugal"), + ("PR", "Puerto Rico"), + ("QA", "Qatar"), + ("RO", "Romania"), + ("RU", "Russia"), + ("RW", "Rwanda"), + ("RE", "Réunion"), + ("AS", "Samoa (American)"), + ("WS", "Samoa (western)"), + ("SM", "San Marino"), + ("ST", "Sao Tome & Principe"), + ("SA", "Saudi Arabia"), + ("SN", "Senegal"), + ("RS", "Serbia"), + ("SC", "Seychelles"), + ("SL", "Sierra Leone"), + ("SG", "Singapore"), + ("SK", "Slovakia"), + ("SI", "Slovenia"), + ("SB", "Solomon Islands"), + ("SO", "Somalia"), + ("ZA", "South Africa"), + ("GS", "South Georgia & the South Sandwich Islands"), + ("SS", "South Sudan"), + ("ES", "Spain"), + ("LK", "Sri Lanka"), + ("BL", "St Barthelemy"), + ("SH", "St Helena"), + ("KN", "St Kitts & Nevis"), + ("LC", "St Lucia"), + ("SX", "St Maarten (Dutch)"), + ("MF", "St Martin (French)"), + ("PM", "St Pierre & Miquelon"), + ("VC", "St Vincent"), + ("SD", "Sudan"), + ("SR", "Suriname"), + ("SJ", "Svalbard & Jan Mayen"), + ("SE", "Sweden"), + ("CH", "Switzerland"), + ("SY", "Syria"), + ("TW", "Taiwan"), + ("TJ", "Tajikistan"), + ("TZ", "Tanzania"), + ("TH", "Thailand"), + ("TG", "Togo"), + ("TK", "Tokelau"), + ("TO", "Tonga"), + ("TT", "Trinidad & Tobago"), + ("TN", "Tunisia"), + ("TR", "Turkey"), + ("TM", "Turkmenistan"), + ("TC", "Turks & Caicos Is"), + ("TV", "Tuvalu"), + ("UM", "US minor outlying islands"), + ("UG", "Uganda"), + ("UA", "Ukraine"), + ("AE", "United Arab Emirates"), + ("US", "United States"), + ("UY", "Uruguay"), + ("UZ", "Uzbekistan"), + ("VU", "Vanuatu"), + ("VA", "Vatican City"), + ("VE", "Venezuela"), + ("VN", "Vietnam"), + ("VG", "Virgin Islands (UK)"), + ("VI", "Virgin Islands (US)"), + ("WF", "Wallis & Futuna"), + ("EH", "Western Sahara"), + ("YE", "Yemen"), + ("ZM", "Zambia"), + ("ZW", "Zimbabwe"), + ("AX", "Åland Islands"), + ], + max_length=2, + ), + ), + migrations.AlterField( + model_name="meeting", + name="time_zone", + field=models.CharField( + choices=[ + ("", "---------"), + ("Africa/Abidjan", "Africa/Abidjan"), + ("Africa/Accra", "Africa/Accra"), + ("Africa/Addis_Ababa", "Africa/Addis_Ababa"), + ("Africa/Algiers", "Africa/Algiers"), + ("Africa/Asmara", "Africa/Asmara"), + ("Africa/Bamako", "Africa/Bamako"), + ("Africa/Bangui", "Africa/Bangui"), + ("Africa/Banjul", "Africa/Banjul"), + ("Africa/Bissau", "Africa/Bissau"), + ("Africa/Blantyre", "Africa/Blantyre"), + ("Africa/Brazzaville", "Africa/Brazzaville"), + ("Africa/Bujumbura", "Africa/Bujumbura"), + ("Africa/Cairo", "Africa/Cairo"), + ("Africa/Casablanca", "Africa/Casablanca"), + ("Africa/Ceuta", "Africa/Ceuta"), + ("Africa/Conakry", "Africa/Conakry"), + ("Africa/Dakar", "Africa/Dakar"), + ("Africa/Dar_es_Salaam", "Africa/Dar_es_Salaam"), + ("Africa/Djibouti", "Africa/Djibouti"), + ("Africa/Douala", "Africa/Douala"), + ("Africa/El_Aaiun", "Africa/El_Aaiun"), + ("Africa/Freetown", "Africa/Freetown"), + ("Africa/Gaborone", "Africa/Gaborone"), + ("Africa/Harare", "Africa/Harare"), + ("Africa/Johannesburg", "Africa/Johannesburg"), + ("Africa/Juba", "Africa/Juba"), + ("Africa/Kampala", "Africa/Kampala"), + ("Africa/Khartoum", "Africa/Khartoum"), + ("Africa/Kigali", "Africa/Kigali"), + ("Africa/Kinshasa", "Africa/Kinshasa"), + ("Africa/Lagos", "Africa/Lagos"), + ("Africa/Libreville", "Africa/Libreville"), + ("Africa/Lome", "Africa/Lome"), + ("Africa/Luanda", "Africa/Luanda"), + ("Africa/Lubumbashi", "Africa/Lubumbashi"), + ("Africa/Lusaka", "Africa/Lusaka"), + ("Africa/Malabo", "Africa/Malabo"), + ("Africa/Maputo", "Africa/Maputo"), + ("Africa/Maseru", "Africa/Maseru"), + ("Africa/Mbabane", "Africa/Mbabane"), + ("Africa/Mogadishu", "Africa/Mogadishu"), + ("Africa/Monrovia", "Africa/Monrovia"), + ("Africa/Nairobi", "Africa/Nairobi"), + ("Africa/Ndjamena", "Africa/Ndjamena"), + ("Africa/Niamey", "Africa/Niamey"), + ("Africa/Nouakchott", "Africa/Nouakchott"), + ("Africa/Ouagadougou", "Africa/Ouagadougou"), + ("Africa/Porto-Novo", "Africa/Porto-Novo"), + ("Africa/Sao_Tome", "Africa/Sao_Tome"), + ("Africa/Tripoli", "Africa/Tripoli"), + ("Africa/Tunis", "Africa/Tunis"), + ("Africa/Windhoek", "Africa/Windhoek"), + ("America/Adak", "America/Adak"), + ("America/Anchorage", "America/Anchorage"), + ("America/Anguilla", "America/Anguilla"), + ("America/Antigua", "America/Antigua"), + ("America/Araguaina", "America/Araguaina"), + ( + "America/Argentina/Buenos_Aires", + "America/Argentina/Buenos_Aires", + ), + ("America/Argentina/Catamarca", "America/Argentina/Catamarca"), + ("America/Argentina/Cordoba", "America/Argentina/Cordoba"), + ("America/Argentina/Jujuy", "America/Argentina/Jujuy"), + ("America/Argentina/La_Rioja", "America/Argentina/La_Rioja"), + ("America/Argentina/Mendoza", "America/Argentina/Mendoza"), + ( + "America/Argentina/Rio_Gallegos", + "America/Argentina/Rio_Gallegos", + ), + ("America/Argentina/Salta", "America/Argentina/Salta"), + ("America/Argentina/San_Juan", "America/Argentina/San_Juan"), + ("America/Argentina/San_Luis", "America/Argentina/San_Luis"), + ("America/Argentina/Tucuman", "America/Argentina/Tucuman"), + ("America/Argentina/Ushuaia", "America/Argentina/Ushuaia"), + ("America/Aruba", "America/Aruba"), + ("America/Asuncion", "America/Asuncion"), + ("America/Atikokan", "America/Atikokan"), + ("America/Bahia", "America/Bahia"), + ("America/Bahia_Banderas", "America/Bahia_Banderas"), + ("America/Barbados", "America/Barbados"), + ("America/Belem", "America/Belem"), + ("America/Belize", "America/Belize"), + ("America/Blanc-Sablon", "America/Blanc-Sablon"), + ("America/Boa_Vista", "America/Boa_Vista"), + ("America/Bogota", "America/Bogota"), + ("America/Boise", "America/Boise"), + ("America/Cambridge_Bay", "America/Cambridge_Bay"), + ("America/Campo_Grande", "America/Campo_Grande"), + ("America/Cancun", "America/Cancun"), + ("America/Caracas", "America/Caracas"), + ("America/Cayenne", "America/Cayenne"), + ("America/Cayman", "America/Cayman"), + ("America/Chicago", "America/Chicago"), + ("America/Chihuahua", "America/Chihuahua"), + ("America/Ciudad_Juarez", "America/Ciudad_Juarez"), + ("America/Costa_Rica", "America/Costa_Rica"), + ("America/Coyhaique", "America/Coyhaique"), + ("America/Creston", "America/Creston"), + ("America/Cuiaba", "America/Cuiaba"), + ("America/Curacao", "America/Curacao"), + ("America/Danmarkshavn", "America/Danmarkshavn"), + ("America/Dawson", "America/Dawson"), + ("America/Dawson_Creek", "America/Dawson_Creek"), + ("America/Denver", "America/Denver"), + ("America/Detroit", "America/Detroit"), + ("America/Dominica", "America/Dominica"), + ("America/Edmonton", "America/Edmonton"), + ("America/Eirunepe", "America/Eirunepe"), + ("America/El_Salvador", "America/El_Salvador"), + ("America/Fort_Nelson", "America/Fort_Nelson"), + ("America/Fortaleza", "America/Fortaleza"), + ("America/Glace_Bay", "America/Glace_Bay"), + ("America/Goose_Bay", "America/Goose_Bay"), + ("America/Grand_Turk", "America/Grand_Turk"), + ("America/Grenada", "America/Grenada"), + ("America/Guadeloupe", "America/Guadeloupe"), + ("America/Guatemala", "America/Guatemala"), + ("America/Guayaquil", "America/Guayaquil"), + ("America/Guyana", "America/Guyana"), + ("America/Halifax", "America/Halifax"), + ("America/Havana", "America/Havana"), + ("America/Hermosillo", "America/Hermosillo"), + ("America/Indiana/Indianapolis", "America/Indiana/Indianapolis"), + ("America/Indiana/Knox", "America/Indiana/Knox"), + ("America/Indiana/Marengo", "America/Indiana/Marengo"), + ("America/Indiana/Petersburg", "America/Indiana/Petersburg"), + ("America/Indiana/Tell_City", "America/Indiana/Tell_City"), + ("America/Indiana/Vevay", "America/Indiana/Vevay"), + ("America/Indiana/Vincennes", "America/Indiana/Vincennes"), + ("America/Indiana/Winamac", "America/Indiana/Winamac"), + ("America/Inuvik", "America/Inuvik"), + ("America/Iqaluit", "America/Iqaluit"), + ("America/Jamaica", "America/Jamaica"), + ("America/Juneau", "America/Juneau"), + ("America/Kentucky/Louisville", "America/Kentucky/Louisville"), + ("America/Kentucky/Monticello", "America/Kentucky/Monticello"), + ("America/La_Paz", "America/La_Paz"), + ("America/Lima", "America/Lima"), + ("America/Los_Angeles", "America/Los_Angeles"), + ("America/Maceio", "America/Maceio"), + ("America/Managua", "America/Managua"), + ("America/Manaus", "America/Manaus"), + ("America/Martinique", "America/Martinique"), + ("America/Matamoros", "America/Matamoros"), + ("America/Mazatlan", "America/Mazatlan"), + ("America/Menominee", "America/Menominee"), + ("America/Merida", "America/Merida"), + ("America/Metlakatla", "America/Metlakatla"), + ("America/Mexico_City", "America/Mexico_City"), + ("America/Miquelon", "America/Miquelon"), + ("America/Moncton", "America/Moncton"), + ("America/Monterrey", "America/Monterrey"), + ("America/Montevideo", "America/Montevideo"), + ("America/Montserrat", "America/Montserrat"), + ("America/Nassau", "America/Nassau"), + ("America/New_York", "America/New_York"), + ("America/Nome", "America/Nome"), + ("America/Noronha", "America/Noronha"), + ("America/North_Dakota/Beulah", "America/North_Dakota/Beulah"), + ("America/North_Dakota/Center", "America/North_Dakota/Center"), + ( + "America/North_Dakota/New_Salem", + "America/North_Dakota/New_Salem", + ), + ("America/Nuuk", "America/Nuuk"), + ("America/Ojinaga", "America/Ojinaga"), + ("America/Panama", "America/Panama"), + ("America/Paramaribo", "America/Paramaribo"), + ("America/Phoenix", "America/Phoenix"), + ("America/Port-au-Prince", "America/Port-au-Prince"), + ("America/Port_of_Spain", "America/Port_of_Spain"), + ("America/Porto_Velho", "America/Porto_Velho"), + ("America/Puerto_Rico", "America/Puerto_Rico"), + ("America/Punta_Arenas", "America/Punta_Arenas"), + ("America/Rankin_Inlet", "America/Rankin_Inlet"), + ("America/Recife", "America/Recife"), + ("America/Regina", "America/Regina"), + ("America/Resolute", "America/Resolute"), + ("America/Rio_Branco", "America/Rio_Branco"), + ("America/Santarem", "America/Santarem"), + ("America/Santiago", "America/Santiago"), + ("America/Santo_Domingo", "America/Santo_Domingo"), + ("America/Sao_Paulo", "America/Sao_Paulo"), + ("America/Scoresbysund", "America/Scoresbysund"), + ("America/Sitka", "America/Sitka"), + ("America/St_Johns", "America/St_Johns"), + ("America/St_Kitts", "America/St_Kitts"), + ("America/St_Lucia", "America/St_Lucia"), + ("America/St_Thomas", "America/St_Thomas"), + ("America/St_Vincent", "America/St_Vincent"), + ("America/Swift_Current", "America/Swift_Current"), + ("America/Tegucigalpa", "America/Tegucigalpa"), + ("America/Thule", "America/Thule"), + ("America/Tijuana", "America/Tijuana"), + ("America/Toronto", "America/Toronto"), + ("America/Tortola", "America/Tortola"), + ("America/Vancouver", "America/Vancouver"), + ("America/Whitehorse", "America/Whitehorse"), + ("America/Winnipeg", "America/Winnipeg"), + ("America/Yakutat", "America/Yakutat"), + ("Antarctica/Casey", "Antarctica/Casey"), + ("Antarctica/Davis", "Antarctica/Davis"), + ("Antarctica/DumontDUrville", "Antarctica/DumontDUrville"), + ("Antarctica/Macquarie", "Antarctica/Macquarie"), + ("Antarctica/Mawson", "Antarctica/Mawson"), + ("Antarctica/McMurdo", "Antarctica/McMurdo"), + ("Antarctica/Palmer", "Antarctica/Palmer"), + ("Antarctica/Rothera", "Antarctica/Rothera"), + ("Antarctica/Syowa", "Antarctica/Syowa"), + ("Antarctica/Troll", "Antarctica/Troll"), + ("Antarctica/Vostok", "Antarctica/Vostok"), + ("Asia/Aden", "Asia/Aden"), + ("Asia/Almaty", "Asia/Almaty"), + ("Asia/Amman", "Asia/Amman"), + ("Asia/Anadyr", "Asia/Anadyr"), + ("Asia/Aqtau", "Asia/Aqtau"), + ("Asia/Aqtobe", "Asia/Aqtobe"), + ("Asia/Ashgabat", "Asia/Ashgabat"), + ("Asia/Atyrau", "Asia/Atyrau"), + ("Asia/Baghdad", "Asia/Baghdad"), + ("Asia/Bahrain", "Asia/Bahrain"), + ("Asia/Baku", "Asia/Baku"), + ("Asia/Bangkok", "Asia/Bangkok"), + ("Asia/Barnaul", "Asia/Barnaul"), + ("Asia/Beirut", "Asia/Beirut"), + ("Asia/Bishkek", "Asia/Bishkek"), + ("Asia/Brunei", "Asia/Brunei"), + ("Asia/Chita", "Asia/Chita"), + ("Asia/Colombo", "Asia/Colombo"), + ("Asia/Damascus", "Asia/Damascus"), + ("Asia/Dhaka", "Asia/Dhaka"), + ("Asia/Dili", "Asia/Dili"), + ("Asia/Dubai", "Asia/Dubai"), + ("Asia/Dushanbe", "Asia/Dushanbe"), + ("Asia/Famagusta", "Asia/Famagusta"), + ("Asia/Gaza", "Asia/Gaza"), + ("Asia/Hebron", "Asia/Hebron"), + ("Asia/Ho_Chi_Minh", "Asia/Ho_Chi_Minh"), + ("Asia/Hong_Kong", "Asia/Hong_Kong"), + ("Asia/Hovd", "Asia/Hovd"), + ("Asia/Irkutsk", "Asia/Irkutsk"), + ("Asia/Jakarta", "Asia/Jakarta"), + ("Asia/Jayapura", "Asia/Jayapura"), + ("Asia/Jerusalem", "Asia/Jerusalem"), + ("Asia/Kabul", "Asia/Kabul"), + ("Asia/Kamchatka", "Asia/Kamchatka"), + ("Asia/Karachi", "Asia/Karachi"), + ("Asia/Kathmandu", "Asia/Kathmandu"), + ("Asia/Khandyga", "Asia/Khandyga"), + ("Asia/Kolkata", "Asia/Kolkata"), + ("Asia/Krasnoyarsk", "Asia/Krasnoyarsk"), + ("Asia/Kuala_Lumpur", "Asia/Kuala_Lumpur"), + ("Asia/Kuching", "Asia/Kuching"), + ("Asia/Kuwait", "Asia/Kuwait"), + ("Asia/Macau", "Asia/Macau"), + ("Asia/Magadan", "Asia/Magadan"), + ("Asia/Makassar", "Asia/Makassar"), + ("Asia/Manila", "Asia/Manila"), + ("Asia/Muscat", "Asia/Muscat"), + ("Asia/Nicosia", "Asia/Nicosia"), + ("Asia/Novokuznetsk", "Asia/Novokuznetsk"), + ("Asia/Novosibirsk", "Asia/Novosibirsk"), + ("Asia/Omsk", "Asia/Omsk"), + ("Asia/Oral", "Asia/Oral"), + ("Asia/Phnom_Penh", "Asia/Phnom_Penh"), + ("Asia/Pontianak", "Asia/Pontianak"), + ("Asia/Pyongyang", "Asia/Pyongyang"), + ("Asia/Qatar", "Asia/Qatar"), + ("Asia/Qostanay", "Asia/Qostanay"), + ("Asia/Qyzylorda", "Asia/Qyzylorda"), + ("Asia/Riyadh", "Asia/Riyadh"), + ("Asia/Sakhalin", "Asia/Sakhalin"), + ("Asia/Samarkand", "Asia/Samarkand"), + ("Asia/Seoul", "Asia/Seoul"), + ("Asia/Shanghai", "Asia/Shanghai"), + ("Asia/Singapore", "Asia/Singapore"), + ("Asia/Srednekolymsk", "Asia/Srednekolymsk"), + ("Asia/Taipei", "Asia/Taipei"), + ("Asia/Tashkent", "Asia/Tashkent"), + ("Asia/Tbilisi", "Asia/Tbilisi"), + ("Asia/Tehran", "Asia/Tehran"), + ("Asia/Thimphu", "Asia/Thimphu"), + ("Asia/Tokyo", "Asia/Tokyo"), + ("Asia/Tomsk", "Asia/Tomsk"), + ("Asia/Ulaanbaatar", "Asia/Ulaanbaatar"), + ("Asia/Urumqi", "Asia/Urumqi"), + ("Asia/Ust-Nera", "Asia/Ust-Nera"), + ("Asia/Vientiane", "Asia/Vientiane"), + ("Asia/Vladivostok", "Asia/Vladivostok"), + ("Asia/Yakutsk", "Asia/Yakutsk"), + ("Asia/Yangon", "Asia/Yangon"), + ("Asia/Yekaterinburg", "Asia/Yekaterinburg"), + ("Asia/Yerevan", "Asia/Yerevan"), + ("Atlantic/Azores", "Atlantic/Azores"), + ("Atlantic/Bermuda", "Atlantic/Bermuda"), + ("Atlantic/Canary", "Atlantic/Canary"), + ("Atlantic/Cape_Verde", "Atlantic/Cape_Verde"), + ("Atlantic/Faroe", "Atlantic/Faroe"), + ("Atlantic/Madeira", "Atlantic/Madeira"), + ("Atlantic/Reykjavik", "Atlantic/Reykjavik"), + ("Atlantic/South_Georgia", "Atlantic/South_Georgia"), + ("Atlantic/St_Helena", "Atlantic/St_Helena"), + ("Atlantic/Stanley", "Atlantic/Stanley"), + ("Australia/Adelaide", "Australia/Adelaide"), + ("Australia/Brisbane", "Australia/Brisbane"), + ("Australia/Broken_Hill", "Australia/Broken_Hill"), + ("Australia/Darwin", "Australia/Darwin"), + ("Australia/Eucla", "Australia/Eucla"), + ("Australia/Hobart", "Australia/Hobart"), + ("Australia/Lindeman", "Australia/Lindeman"), + ("Australia/Lord_Howe", "Australia/Lord_Howe"), + ("Australia/Melbourne", "Australia/Melbourne"), + ("Australia/Perth", "Australia/Perth"), + ("Australia/Sydney", "Australia/Sydney"), + ("Europe/Amsterdam", "Europe/Amsterdam"), + ("Europe/Andorra", "Europe/Andorra"), + ("Europe/Astrakhan", "Europe/Astrakhan"), + ("Europe/Athens", "Europe/Athens"), + ("Europe/Belgrade", "Europe/Belgrade"), + ("Europe/Berlin", "Europe/Berlin"), + ("Europe/Brussels", "Europe/Brussels"), + ("Europe/Bucharest", "Europe/Bucharest"), + ("Europe/Budapest", "Europe/Budapest"), + ("Europe/Chisinau", "Europe/Chisinau"), + ("Europe/Copenhagen", "Europe/Copenhagen"), + ("Europe/Dublin", "Europe/Dublin"), + ("Europe/Gibraltar", "Europe/Gibraltar"), + ("Europe/Helsinki", "Europe/Helsinki"), + ("Europe/Istanbul", "Europe/Istanbul"), + ("Europe/Kaliningrad", "Europe/Kaliningrad"), + ("Europe/Kirov", "Europe/Kirov"), + ("Europe/Kyiv", "Europe/Kyiv"), + ("Europe/Lisbon", "Europe/Lisbon"), + ("Europe/London", "Europe/London"), + ("Europe/Luxembourg", "Europe/Luxembourg"), + ("Europe/Madrid", "Europe/Madrid"), + ("Europe/Malta", "Europe/Malta"), + ("Europe/Minsk", "Europe/Minsk"), + ("Europe/Monaco", "Europe/Monaco"), + ("Europe/Moscow", "Europe/Moscow"), + ("Europe/Oslo", "Europe/Oslo"), + ("Europe/Paris", "Europe/Paris"), + ("Europe/Prague", "Europe/Prague"), + ("Europe/Riga", "Europe/Riga"), + ("Europe/Rome", "Europe/Rome"), + ("Europe/Samara", "Europe/Samara"), + ("Europe/Saratov", "Europe/Saratov"), + ("Europe/Simferopol", "Europe/Simferopol"), + ("Europe/Sofia", "Europe/Sofia"), + ("Europe/Stockholm", "Europe/Stockholm"), + ("Europe/Tallinn", "Europe/Tallinn"), + ("Europe/Tirane", "Europe/Tirane"), + ("Europe/Ulyanovsk", "Europe/Ulyanovsk"), + ("Europe/Vaduz", "Europe/Vaduz"), + ("Europe/Vienna", "Europe/Vienna"), + ("Europe/Vilnius", "Europe/Vilnius"), + ("Europe/Volgograd", "Europe/Volgograd"), + ("Europe/Warsaw", "Europe/Warsaw"), + ("Europe/Zurich", "Europe/Zurich"), + ("Indian/Antananarivo", "Indian/Antananarivo"), + ("Indian/Chagos", "Indian/Chagos"), + ("Indian/Christmas", "Indian/Christmas"), + ("Indian/Cocos", "Indian/Cocos"), + ("Indian/Comoro", "Indian/Comoro"), + ("Indian/Kerguelen", "Indian/Kerguelen"), + ("Indian/Mahe", "Indian/Mahe"), + ("Indian/Maldives", "Indian/Maldives"), + ("Indian/Mauritius", "Indian/Mauritius"), + ("Indian/Mayotte", "Indian/Mayotte"), + ("Indian/Reunion", "Indian/Reunion"), + ("Pacific/Apia", "Pacific/Apia"), + ("Pacific/Auckland", "Pacific/Auckland"), + ("Pacific/Bougainville", "Pacific/Bougainville"), + ("Pacific/Chatham", "Pacific/Chatham"), + ("Pacific/Chuuk", "Pacific/Chuuk"), + ("Pacific/Easter", "Pacific/Easter"), + ("Pacific/Efate", "Pacific/Efate"), + ("Pacific/Fakaofo", "Pacific/Fakaofo"), + ("Pacific/Fiji", "Pacific/Fiji"), + ("Pacific/Funafuti", "Pacific/Funafuti"), + ("Pacific/Galapagos", "Pacific/Galapagos"), + ("Pacific/Gambier", "Pacific/Gambier"), + ("Pacific/Guadalcanal", "Pacific/Guadalcanal"), + ("Pacific/Guam", "Pacific/Guam"), + ("Pacific/Honolulu", "Pacific/Honolulu"), + ("Pacific/Kanton", "Pacific/Kanton"), + ("Pacific/Kiritimati", "Pacific/Kiritimati"), + ("Pacific/Kosrae", "Pacific/Kosrae"), + ("Pacific/Kwajalein", "Pacific/Kwajalein"), + ("Pacific/Majuro", "Pacific/Majuro"), + ("Pacific/Marquesas", "Pacific/Marquesas"), + ("Pacific/Midway", "Pacific/Midway"), + ("Pacific/Nauru", "Pacific/Nauru"), + ("Pacific/Niue", "Pacific/Niue"), + ("Pacific/Norfolk", "Pacific/Norfolk"), + ("Pacific/Noumea", "Pacific/Noumea"), + ("Pacific/Pago_Pago", "Pacific/Pago_Pago"), + ("Pacific/Palau", "Pacific/Palau"), + ("Pacific/Pitcairn", "Pacific/Pitcairn"), + ("Pacific/Pohnpei", "Pacific/Pohnpei"), + ("Pacific/Port_Moresby", "Pacific/Port_Moresby"), + ("Pacific/Rarotonga", "Pacific/Rarotonga"), + ("Pacific/Saipan", "Pacific/Saipan"), + ("Pacific/Tahiti", "Pacific/Tahiti"), + ("Pacific/Tarawa", "Pacific/Tarawa"), + ("Pacific/Tongatapu", "Pacific/Tongatapu"), + ("Pacific/Wake", "Pacific/Wake"), + ("Pacific/Wallis", "Pacific/Wallis"), + ("UTC", "UTC"), + ], + default="UTC", + max_length=255, + ), + ), + ] diff --git a/ietf/meeting/models.py b/ietf/meeting/models.py index 8fadf124d9..7d9e318aab 100644 --- a/ietf/meeting/models.py +++ b/ietf/meeting/models.py @@ -1,5 +1,4 @@ -# Copyright The IETF Trust 2007-2022, All Rights Reserved -# -*- coding: utf-8 -*- +# Copyright The IETF Trust 2007-2025, All Rights Reserved # old meeting models can be found in ../proceedings/models.py @@ -26,7 +25,6 @@ from django.urls import reverse as urlreverse from django.utils import timezone from django.utils.text import slugify -from django.utils.safestring import mark_safe from ietf.dbtemplate.models import DBTemplate from ietf.doc.models import Document @@ -35,12 +33,12 @@ from ietf.name.models import ( MeetingTypeName, TimeSlotTypeName, SessionStatusName, ConstraintName, RoomResourceName, ImportantDateName, TimerangeName, SlideSubmissionStatusName, ProceedingsMaterialTypeName, - SessionPurposeName, + SessionPurposeName, AttendanceTypeName, RegistrationTicketTypeName ) from ietf.person.models import Person from ietf.utils.decorators import memoize from ietf.utils.history import find_history_replacements_active_at, find_history_active_at -from ietf.utils.storage import NoLocationMigrationFileSystemStorage +from ietf.utils.storage import BlobShadowFileSystemStorage from ietf.utils.text import xslugify from ietf.utils.timezone import datetime_from_date, date_today from ietf.utils.models import ForeignKey @@ -50,15 +48,20 @@ ) from ietf.utils.fields import MissingOkImageField -countries = list(pytz.country_names.items()) -countries.sort(key=lambda x: x[1]) +# Set up countries / timezones, including an empty choice for fields +EMPTY_CHOICE = ("", "-" * 9) +COUNTRIES = (EMPTY_CHOICE,) + tuple( + sorted(pytz.country_names.items(), key=lambda x: x[1]) +) -timezones = [] -for name in pytz.common_timezones: - tzfn = os.path.join(settings.TZDATA_ICS_PATH, name + ".ics") - if not os.path.islink(tzfn): - timezones.append((name, name)) -timezones.sort() +_tzdata_ics_path = Path(settings.TZDATA_ICS_PATH) +TIMEZONES = (EMPTY_CHOICE,) + tuple( + sorted( + (name, name) + for name in pytz.common_timezones + if name != "GMT" and not (_tzdata_ics_path / f"{name}.ics").is_symlink() + ) +) class Meeting(models.Model): @@ -73,11 +76,11 @@ class Meeting(models.Model): days = models.IntegerField(default=7, null=False, validators=[MinValueValidator(1)], help_text="The number of days the meeting lasts") city = models.CharField(blank=True, max_length=255) - country = models.CharField(blank=True, max_length=2, choices=countries) + country = models.CharField(blank=True, max_length=2, choices=COUNTRIES) # We can't derive time-zone from country, as there are some that have # more than one timezone, and the pytz module doesn't provide timezone # lookup information for all relevant city/country combinations. - time_zone = models.CharField(max_length=255, choices=timezones, default='UTC') + time_zone = models.CharField(max_length=255, choices=TIMEZONES, default='UTC') idsubmit_cutoff_day_offset_00 = models.IntegerField(blank=True, default=settings.IDSUBMIT_DEFAULT_CUTOFF_DAY_OFFSET_00, help_text = "The number of days before the meeting start date when the submission of -00 drafts will be closed.") @@ -146,7 +149,7 @@ def get_00_cutoff(self): cutoff_date = importantdate.date else: cutoff_date = self.date + datetime.timedelta(days=ImportantDateName.objects.get(slug='idcutoff').default_offset_days) - cutoff_time = datetime_from_date(cutoff_date, datetime.timezone.utc) + self.idsubmit_cutoff_time_utc + cutoff_time = datetime_from_date(cutoff_date, datetime.UTC) + self.idsubmit_cutoff_time_utc return cutoff_time def get_01_cutoff(self): @@ -158,7 +161,7 @@ def get_01_cutoff(self): cutoff_date = importantdate.date else: cutoff_date = self.date + datetime.timedelta(days=ImportantDateName.objects.get(slug='idcutoff').default_offset_days) - cutoff_time = datetime_from_date(cutoff_date, datetime.timezone.utc) + self.idsubmit_cutoff_time_utc + cutoff_time = datetime_from_date(cutoff_date, datetime.UTC) + self.idsubmit_cutoff_time_utc return cutoff_time def get_reopen_time(self): @@ -234,9 +237,9 @@ def get_proceedings_materials(self): ).order_by('type__order') def get_attendance(self): - """Get the meeting attendance from the MeetingRegistrations + """Get the meeting attendance from the Registrations - Returns a NamedTuple with onsite and online attributes. Returns None if the record is unavailable + Returns a NamedTuple with onsite and remote attributes. Returns None if the record is unavailable for this meeting. """ number = self.get_number() @@ -247,25 +250,39 @@ def get_attendance(self): # MeetingRegistration.attended started conflating badge-pickup and session attendance before IETF 114. # We've separated session attendance off to ietf.meeting.Attended, but need to report attendance at older # meetings correctly. - - attended_per_meetingregistration = ( - Q(meetingregistration__meeting=self) & ( - Q(meetingregistration__attended=True) | - Q(meetingregistration__checkedin=True) + # + # Looking up by registration and attendance records separately and joining in + # python is far faster than combining the Q objects in the query (~100x). + # Further optimization may be possible, but the queries are tricky... + attended_per_meeting_registration = ( + Q(registration__meeting=self) & ( + Q(registration__attended=True) | + Q(registration__checkedin=True) ) ) + attendees_by_reg = set( + Person.objects.filter(attended_per_meeting_registration).values_list( + "pk", flat=True + ) + ) + attended_per_meeting_attended = ( Q(attended__session__meeting=self) # Note that we are not filtering to plenary, wg, or rg sessions # as we do for nomcom eligibility - if picking up a badge (see above) # is good enough, just attending e.g. a training session is also good enough ) - attended = Person.objects.filter( - attended_per_meetingregistration | attended_per_meeting_attended - ).distinct() - - onsite=set(attended.filter(meetingregistration__meeting=self, meetingregistration__reg_type='onsite')) - remote=set(attended.filter(meetingregistration__meeting=self, meetingregistration__reg_type='remote')) + attendees_by_att = set( + Person.objects.filter(attended_per_meeting_attended).values_list( + "pk", flat=True + ) + ) + + attendees = Person.objects.filter( + pk__in=attendees_by_att | attendees_by_reg + ) + onsite = set(attendees.filter(registration__meeting=self, registration__tickets__attendance_type__slug='onsite')) + remote = set(attendees.filter(registration__meeting=self, registration__tickets__attendance_type__slug='remote')) remote.difference_update(onsite) return Attendance( @@ -367,27 +384,39 @@ def vtimezone(self): pass return None - def set_official_schedule(self, schedule): - if self.schedule != schedule: - self.schedule = schedule - self.save() def updated(self): # should be Meeting.modified, but we don't have that - min_time = pytz.utc.localize(datetime.datetime(1970, 1, 1, 0, 0, 0)) - timeslots_updated = self.timeslot_set.aggregate(Max('modified'))["modified__max"] or min_time - sessions_updated = self.session_set.aggregate(Max('modified'))["modified__max"] or min_time - assignments_updated = min_time + timeslots_updated = self.timeslot_set.aggregate(Max('modified'))["modified__max"] + sessions_updated = self.session_set.aggregate(Max('modified'))["modified__max"] + assignments_updated = None if self.schedule: - assignments_updated = SchedTimeSessAssignment.objects.filter(schedule__in=[self.schedule, self.schedule.base if self.schedule else None]).aggregate(Max('modified'))["modified__max"] or min_time - return max(timeslots_updated, sessions_updated, assignments_updated) + assignments_updated = SchedTimeSessAssignment.objects.filter(schedule__in=[self.schedule, self.schedule.base if self.schedule else None]).aggregate(Max('modified'))["modified__max"] + dts = [timeslots_updated, sessions_updated, assignments_updated] + valid_only = [dt for dt in dts if dt is not None] + return max(valid_only) if valid_only else None @memoize def previous_meeting(self): return Meeting.objects.filter(type_id=self.type_id,date__lt=self.date).order_by('-date').first() def uses_notes(self): - return self.date>=datetime.date(2020,7,6) + if self.type_id != 'ietf': + return True + num = self.get_number() + return num is not None and num >= 108 + + def has_recordings(self): + if self.type_id != 'ietf': + return True + num = self.get_number() + return num is not None and num >= 80 + + def has_chat_logs(self): + if self.type_id != 'ietf': + return True; + num = self.get_number() + return num is not None and num >= 60 def meeting_start(self): """Meeting-local midnight at the start of the meeting date""" @@ -456,24 +485,9 @@ class Room(models.Model): # end floorplan-related stuff def __str__(self): - return u"%s size: %s" % (self.name, self.capacity) - - def delete_timeslots(self): - for ts in self.timeslot_set.all(): - ts.sessionassignments.all().delete() - ts.delete() - - def create_timeslots(self): - days, time_slices, slots = self.meeting.build_timeslices() - for day in days: - for ts in slots[day]: - TimeSlot.objects.create(type_id=ts.type_id, - meeting=self.meeting, - name=ts.name, - time=ts.time, - location=self, - duration=ts.duration) - #self.meeting.create_all_timeslots() + if len(self.functional_name) > 0 and self.functional_name != self.name: + return f"{self.name} [{self.functional_name}] (size: {self.capacity})" + return f"{self.name} (size: {self.capacity})" def dom_id(self): return "room%u" % (self.pk) @@ -497,14 +511,6 @@ def right(self): return max(self.x1, self.x2) if (self.x1 and self.x2) else 0 def bottom(self): return max(self.y1, self.y2) if (self.y1 and self.y2) else 0 - def functional_display_name(self): - if not self.functional_name: - return "" - if 'breakout' in self.functional_name.lower(): - return "" - if self.functional_name[0].isdigit(): - return "" - return self.functional_name # audio stream support def audio_stream_url(self): urlresources = [ur for ur in self.urlresource_set.all() if ur.name_id == 'audiostream'] @@ -539,7 +545,12 @@ class FloorPlan(models.Model): modified= models.DateTimeField(auto_now=True) meeting = ForeignKey(Meeting) order = models.SmallIntegerField() - image = models.ImageField(storage=NoLocationMigrationFileSystemStorage(), upload_to=floorplan_path, blank=True, default=None) + image = models.ImageField( + storage=BlobShadowFileSystemStorage(kind="floorplan"), + upload_to=floorplan_path, + blank=False, + default=None, + ) # class Meta: ordering = ['-id',] @@ -570,7 +581,7 @@ class TimeSlot(models.Model): duration = models.DurationField(default=datetime.timedelta(0)) location = ForeignKey(Room, blank=True, null=True) show_location = models.BooleanField(default=True, help_text="Show location in agenda.") - sessions = models.ManyToManyField('Session', related_name='slots', through='SchedTimeSessAssignment', blank=True, help_text="Scheduled session, if any.") + sessions = models.ManyToManyField('meeting.Session', related_name='slots', through='meeting.SchedTimeSessAssignment', blank=True, help_text="Scheduled session, if any.") modified = models.DateTimeField(auto_now=True) # @@ -582,19 +593,23 @@ def session(self): self._session_cache = self.sessions.filter(timeslotassignments__schedule__in=[self.meeting.schedule, self.meeting.schedule.base if self.meeting else None]).first() return self._session_cache - def meeting_date(self): - return self.time.date() + # Unused + # + # def meeting_date(self): + # return self.time.date() - def registration(self): - # below implements a object local cache - # it tries to find a timeslot of type registration which starts at the same time as this slot - # so that it can be shown at the top of the agenda. - if not hasattr(self, '_reg_info'): - try: - self._reg_info = TimeSlot.objects.get(meeting=self.meeting, time__month=self.time.month, time__day=self.time.day, type="reg") - except TimeSlot.DoesNotExist: - self._reg_info = None - return self._reg_info + # Unused + # + # def registration(self): + # # below implements a object local cache + # # it tries to find a timeslot of type registration which starts at the same time as this slot + # # so that it can be shown at the top of the agenda. + # if not hasattr(self, '_reg_info'): + # try: + # self._reg_info = TimeSlot.objects.get(meeting=self.meeting, time__month=self.time.month, time__day=self.time.day, type="reg") + # except TimeSlot.DoesNotExist: + # self._reg_info = None + # return self._reg_info def __str__(self): location = self.get_location() @@ -621,30 +636,33 @@ def get_hidden_location(self): def get_location(self): return self.get_hidden_location() if self.show_location else "" - def get_functional_location(self): - name_parts = [] - room = self.location - if room and room.functional_name: - name_parts.append(room.functional_name) - location = self.get_hidden_location() - if location: - name_parts.append(location) - return ' - '.join(name_parts) - - def get_html_location(self): - if not hasattr(self, '_cached_html_location'): - self._cached_html_location = self.get_location() - if len(self._cached_html_location) > 8: - self._cached_html_location = mark_safe(self._cached_html_location.replace('/', '/')) - else: - self._cached_html_location = mark_safe(self._cached_html_location.replace(' ', ' ')) - return self._cached_html_location + # Unused + # + # def get_functional_location(self): + # name_parts = [] + # room = self.location + # if room and room.functional_name: + # name_parts.append(room.functional_name) + # location = self.get_hidden_location() + # if location: + # name_parts.append(location) + # return ' - '.join(name_parts) + + # def get_html_location(self): + # if not hasattr(self, '_cached_html_location'): + # self._cached_html_location = self.get_location() + # if len(self._cached_html_location) > 8: + # self._cached_html_location = mark_safe(self._cached_html_location.replace('/', '/')) + # else: + # self._cached_html_location = mark_safe(self._cached_html_location.replace(' ', ' ')) + # return self._cached_html_location def tz(self): return self.meeting.tz() - def tzname(self): - return self.tz().tzname(self.time) + # Unused + # def tzname(self): + # return self.tz().tzname(self.time) def utc_start_time(self): return self.time.astimezone(pytz.utc) # USE_TZ is True, so time is aware @@ -658,30 +676,32 @@ def local_start_time(self): def local_end_time(self): return (self.time.astimezone(pytz.utc) + self.duration).astimezone(self.tz()) - @property - def js_identifier(self): - # this returns a unique identifier that is js happy. - # {{s.timeslot.time|date:'Y-m-d'}}_{{ s.timeslot.time|date:'Hi' }}" - # also must match: - # {{r|slugify}}_{{day}}_{{slot.0|date:'Hi'}} - dom_id="ts%u" % (self.pk) - if self.location is not None: - dom_id = self.location.dom_id() - return "%s_%s_%s" % (dom_id, self.time.strftime('%Y-%m-%d'), self.time.strftime('%H%M')) - - def delete_concurrent_timeslots(self): - """Delete all timeslots which are in the same time as this slot""" - # can not include duration in filter, because there is no support - # for having it a WHERE clause. - # below will delete self as well. - for ts in self.meeting.timeslot_set.filter(time=self.time).all(): - if ts.duration!=self.duration: - continue - - # now remove any schedule that might have been made to this - # timeslot. - ts.sessionassignments.all().delete() - ts.delete() + # Unused + # + # @property + # def js_identifier(self): + # # this returns a unique identifier that is js happy. + # # {{s.timeslot.time|date:'Y-m-d'}}_{{ s.timeslot.time|date:'Hi' }}" + # # also must match: + # # {{r|slugify}}_{{day}}_{{slot.0|date:'Hi'}} + # dom_id="ts%u" % (self.pk) + # if self.location is not None: + # dom_id = self.location.dom_id() + # return "%s_%s_%s" % (dom_id, self.time.strftime('%Y-%m-%d'), self.time.strftime('%H%M')) + + # def delete_concurrent_timeslots(self): + # """Delete all timeslots which are in the same time as this slot""" + # # can not include duration in filter, because there is no support + # # for having it a WHERE clause. + # # below will delete self as well. + # for ts in self.meeting.timeslot_set.filter(time=self.time).all(): + # if ts.duration!=self.duration: + # continue + + # # now remove any schedule that might have been made to this + # # timeslot. + # ts.sessionassignments.all().delete() + # ts.delete() """ Find a timeslot that comes next, in the same room. It must be on the same day, @@ -767,9 +787,6 @@ def official_token(self): else: return "unofficial" - def delete_assignments(self): - self.assignments.all().delete() - @property def qs_assignments_with_sessions(self): return self.assignments.filter(session__isnull=False) @@ -782,10 +799,6 @@ def qs_sessions_scheduled(self): """Get QuerySet containing sessions assigned to timeslots by this schedule""" return Session.objects.filter(timeslotassignments__schedule=self) - def delete_schedule(self): - self.assignments.all().delete() - self.delete() - # to be renamed SchedTimeSessAssignments (stsa) class SchedTimeSessAssignment(models.Model): """ @@ -798,7 +811,6 @@ class SchedTimeSessAssignment(models.Model): schedule = ForeignKey('Schedule', null=False, blank=False, related_name='assignments') extendedfrom = ForeignKey('self', null=True, default=None, help_text="Timeslot this session is an extension of.") modified = models.DateTimeField(auto_now=True) - notes = models.TextField(blank=True) badness = models.IntegerField(default=0, blank=True, null=True) pinned = models.BooleanField(default=False, help_text="Do not move session during automatic placement.") @@ -931,8 +943,8 @@ def brief_display(self): class SessionPresentation(models.Model): - session = ForeignKey('Session') - document = ForeignKey(Document) + session = ForeignKey('Session', related_name="presentations") + document = ForeignKey(Document, related_name="presentations") rev = models.CharField(verbose_name="revision", max_length=16, null=True, blank=True) order = models.PositiveSmallIntegerField(default=0) @@ -944,8 +956,6 @@ class Meta: def __str__(self): return u"%s -> %s-%s" % (self.session, self.document.name, self.rev) -constraint_cache_uses = 0 -constraint_cache_initials = 0 class SessionQuerySet(models.QuerySet): def with_current_status(self): @@ -1044,7 +1054,7 @@ class Session(models.Model): group = ForeignKey(Group) # The group type historically determined the session type. BOFs also need to be added as a group. Note that not all meeting requests have a natural group to associate with. joint_with_groups = models.ManyToManyField(Group, related_name='sessions_joint_in',blank=True) attendees = models.IntegerField(null=True, blank=True) - agenda_note = models.CharField(blank=True, max_length=255) + agenda_note = models.CharField(blank=True, max_length=512) requested_duration = models.DurationField(default=datetime.timedelta(0)) comments = models.TextField(blank=True) scheduled = models.DateTimeField(null=True, blank=True) @@ -1053,6 +1063,7 @@ class Session(models.Model): on_agenda = models.BooleanField(default=True, help_text='Is this session visible on the meeting agenda?') has_onsite_tool = models.BooleanField(default=False, help_text="Does this session use the officially supported onsite and remote tooling?") chat_room = models.CharField(blank=True, max_length=32, help_text='Name of Zulip stream, if different from group acronym') + meetecho_recording_name = models.CharField(blank=True, max_length=64, help_text="Name of the meetecho recording") tombstone_for = models.ForeignKey('Session', blank=True, null=True, help_text="This session is the tombstone for a session that was rescheduled", on_delete=models.CASCADE) @@ -1071,7 +1082,7 @@ def get_material(self, material_type, only_one): for d in l: d.meeting_related = lambda: True else: - l = self.materials.filter(type=material_type).exclude(states__type=material_type, states__slug='deleted').order_by('sessionpresentation__order') + l = self.materials.filter(type=material_type).exclude(states__type=material_type, states__slug='deleted').order_by('presentations__order') if only_one: if l: @@ -1091,16 +1102,25 @@ def minutes(self): self._cached_minutes = self.get_material("minutes", only_one=True) return self._cached_minutes + def narrative_minutes(self): + if not hasattr(self, '_cached_narrative_minutes'): + self._cached_minutes = self.get_material("narrativeminutes", only_one=True) + return self._cached_minutes + def recordings(self): return list(self.get_material("recording", only_one=False)) def bluesheets(self): return list(self.get_material("bluesheets", only_one=False)) + def chatlogs(self): + return list(self.get_material("chatlog", only_one=False)) + def slides(self): if not hasattr(self, "_slides_cache"): self._slides_cache = list(self.get_material("slides", only_one=False)) return self._slides_cache + def drafts(self): return list(self.materials.filter(type='draft')) @@ -1135,30 +1155,6 @@ def order_in_meeting(self): self._order_in_meeting = session_list.index(self) + 1 if self in session_list else 0 return self._order_in_meeting - def all_meeting_sessions_cancelled(self): - return set(s.current_status for s in self.all_meeting_sessions_for_group()) == {'canceled'} - - def all_meeting_recordings(self): - recordings = [] # These are not sets because we need to preserve relative ordering or redo the ordering work later - sessions = self.all_meeting_sessions_for_group() - for session in sessions: - recordings.extend([r for r in session.recordings() if r not in recordings]) - return recordings - - def all_meeting_bluesheets(self): - bluesheets = [] - sessions = self.all_meeting_sessions_for_group() - for session in sessions: - bluesheets.extend([b for b in session.bluesheets() if b not in bluesheets]) - return bluesheets - - def all_meeting_drafts(self): - drafts = [] - sessions = self.all_meeting_sessions_for_group() - for session in sessions: - drafts.extend([d for d in session.drafts() if d not in drafts]) - return drafts - def all_meeting_agendas(self): agendas = [] sessions = self.all_meeting_sessions_for_group() @@ -1188,7 +1184,7 @@ def can_manage_materials(self, user): return can_manage_materials(user,self.group) def is_material_submission_cutoff(self): - return date_today(datetime.timezone.utc) > self.meeting.get_submission_correction_date() + return date_today(datetime.UTC) > self.meeting.get_submission_correction_date() def joint_with_groups_acronyms(self): return [group.acronym for group in self.joint_with_groups.all()] @@ -1231,19 +1227,30 @@ def special_request_token(self): else: return "" + @staticmethod + def _alpha_str(n: int): + """Convert integer to string of a-z characters (a, b, c, ..., aa, ab, ...)""" + chars = [] + while True: + chars.append(string.ascii_lowercase[n % 26]) + n //= 26 + # for 2nd letter and beyond, 0 means end the string + if n == 0: + break + # beyond the first letter, no need to represent a 0, so decrement + n -= 1 + return "".join(chars[::-1]) + def docname_token(self): sess_mtg = Session.objects.filter(meeting=self.meeting, group=self.group).order_by('pk') index = list(sess_mtg).index(self) - return 'sess%s' % (string.ascii_lowercase[index]) + return f"sess{self._alpha_str(index)}" def docname_token_only_for_multiple(self): sess_mtg = Session.objects.filter(meeting=self.meeting, group=self.group).order_by('pk') if len(list(sess_mtg)) > 1: index = list(sess_mtg).index(self) - if index < 26: - token = 'sess%s' % (string.ascii_lowercase[index]) - else: - token = 'sess%s%s' % (string.ascii_lowercase[index//26],string.ascii_lowercase[index%26]) + token = f"sess{self._alpha_str(index)}" return token return None @@ -1275,19 +1282,6 @@ def agenda_text(self): else: return "The agenda has not been uploaded yet." - def agenda_file(self): - if not hasattr(self, '_agenda_file'): - self._agenda_file = "" - - agenda = self.agenda() - if not agenda: - return "" - - # FIXME: uploaded_filename should be replaced with a function that computes filenames when they are of a fixed schema and not uploaded names - self._agenda_file = "%s/agenda/%s" % (self.meeting.number, agenda.uploaded_filename) - - return self._agenda_file - def chat_room_name(self): if self.chat_room: return self.chat_room @@ -1301,10 +1295,21 @@ def chat_room_url(self): return settings.CHAT_URL_PATTERN.format(chat_room_name=self.chat_room_name()) def chat_archive_url(self): - chatlog = self.sessionpresentation_set.filter(document__type__slug='chatlog').first() - if chatlog is not None: - return chatlog.document.get_href() - elif self.meeting.date <= datetime.date(2022, 7, 15): + + if hasattr(self,"prefetched_active_materials"): + chatlog_doc = None + for doc in self.prefetched_active_materials: + if doc.type_id=="chatlog": + chatlog_doc = doc + break + if chatlog_doc is not None: + return chatlog_doc.get_href() + else: + chatlog = self.presentations.filter(document__type__slug='chatlog').first() + if chatlog is not None: + return chatlog.document.get_href() + + if self.meeting.date <= datetime.date(2022, 7, 15): # datatracker 8.8.0 released on 2022 July 15; before that, fall back to old log URL return f'https://www.ietf.org/jabber/logs/{ self.chat_room_name() }?C=M;O=D' elif hasattr(settings,'CHAT_ARCHIVE_URL_PATTERN'): @@ -1331,32 +1336,43 @@ def group_parent_at_the_time(self): return self.meeting.group_at_the_time(self.group_at_the_time().parent) def audio_stream_url(self): - if ( - self.meeting.type.slug == "ietf" - and self.has_onsite_tool - and (url := getattr(settings, "MEETECHO_AUDIO_STREAM_URL", "")) - ): + url = getattr(settings, "MEETECHO_AUDIO_STREAM_URL", "") + if self.meeting.type.slug == "ietf" and self.has_onsite_tool and url: return url.format(session=self) return None def video_stream_url(self): - if ( - self.meeting.type.slug == "ietf" - and self.has_onsite_tool - and (url := getattr(settings, "MEETECHO_VIDEO_STREAM_URL", "")) - ): + url = getattr(settings, "MEETECHO_VIDEO_STREAM_URL", "") + if self.meeting.type.slug == "ietf" and self.has_onsite_tool and url: return url.format(session=self) return None def onsite_tool_url(self): - if ( - self.meeting.type.slug == "ietf" - and self.has_onsite_tool - and (url := getattr(settings, "MEETECHO_ONSITE_TOOL_URL", "")) - ): + url = getattr(settings, "MEETECHO_ONSITE_TOOL_URL", "") + if self.meeting.type.slug == "ietf" and self.has_onsite_tool and url: return url.format(session=self) return None + def _session_recording_url_label(self): + otsa = self.official_timeslotassignment() + if otsa is None: + return None + if self.meeting.type.slug == "ietf" and self.has_onsite_tool: + session_label = f"IETF{self.meeting.number}-{self.group.acronym.upper()}-{otsa.timeslot.time.strftime('%Y%m%d-%H%M')}" + else: + session_label = f"IETF-{self.group.acronym.upper()}-{otsa.timeslot.time.strftime('%Y%m%d-%H%M')}" + return session_label + + def session_recording_url(self): + url_formatter = getattr(settings, "MEETECHO_SESSION_RECORDING_URL", "") + url = None + name = self.meetecho_recording_name + if name is None or name.strip() == "": + name = self._session_recording_url_label() + if url_formatter.strip() != "" and name is not None: + url = url_formatter.format(session_label=name) + return url + class SchedulingEvent(models.Model): session = ForeignKey(Session) @@ -1385,7 +1401,7 @@ class SlideSubmission(models.Model): apply_to_all = models.BooleanField(default=False) submitter = ForeignKey(Person) status = ForeignKey(SlideSubmissionStatusName, null=True, default='pending', on_delete=models.SET_NULL) - doc = ForeignKey(Document, null=True, on_delete=models.SET_NULL) + doc = ForeignKey(Document, blank=True, null=True, on_delete=models.SET_NULL) def staged_filepath(self): return os.path.join(settings.SLIDE_STAGING_PATH , self.filename) @@ -1436,8 +1452,12 @@ class MeetingHost(models.Model): """Meeting sponsor""" meeting = ForeignKey(Meeting, related_name='meetinghosts') name = models.CharField(max_length=255, blank=False) + # TODO-BLOBSTORE - capture these logos and look for other ImageField like model fields. logo = MissingOkImageField( - storage=NoLocationMigrationFileSystemStorage(location=settings.MEETINGHOST_LOGO_PATH), + storage=BlobShadowFileSystemStorage( + kind="meetinghostlogo", + location=settings.MEETINGHOST_LOGO_PATH, + ), upload_to=_host_upload_path, width_field='logo_width', height_field='logo_height', @@ -1452,7 +1472,7 @@ class MeetingHost(models.Model): validate_file_extension, settings.MEETING_VALID_UPLOAD_EXTENSIONS['meetinghostlogo'], ), - WrappedValidator( + WrappedValidator( validate_mime_type, settings.MEETING_VALID_UPLOAD_MIME_TYPES['meetinghostlogo'], True, @@ -1471,9 +1491,56 @@ class Meta: class Attended(models.Model): person = ForeignKey(Person) session = ForeignKey(Session) + time = models.DateTimeField(default=timezone.now, null=True, blank=True) + origin = models.CharField(max_length=32, default='datatracker') class Meta: unique_together = (('person', 'session'),) def __str__(self): return f'{self.person} at {self.session}' + + +class RegistrationManager(models.Manager): + def onsite(self): + return self.get_queryset().filter(tickets__attendance_type__slug='onsite') + + def remote(self): + return self.get_queryset().filter(tickets__attendance_type__slug='remote').exclude(tickets__attendance_type__slug='onsite') + +class Registration(models.Model): + """Registration attendee records from the IETF registration system""" + meeting = ForeignKey(Meeting) + first_name = models.CharField(max_length=255) + last_name = models.CharField(max_length=255) + affiliation = models.CharField(blank=True, max_length=255) + country_code = models.CharField(max_length=2) # ISO 3166 + person = ForeignKey(Person, blank=True, null=True, on_delete=models.PROTECT) + email = models.EmailField(blank=True, null=True) + # attended was used prior to the introduction of the ietf.meeting.Attended model and is still used by + # Meeting.get_attendance() for older meetings. It should not be used except for dealing with legacy data. + attended = models.BooleanField(default=False) + # checkedin indicates that the badge was picked up + checkedin = models.BooleanField(default=False) + + # custom manager + objects = RegistrationManager() + + def __str__(self): + return "{} {}".format(self.first_name, self.last_name) + + @property + def attendance_type(self): + if self.tickets.filter(attendance_type__slug='onsite').exists(): + return 'onsite' + elif self.tickets.filter(attendance_type__slug='remote').exists(): + return 'remote' + return None + +class RegistrationTicket(models.Model): + registration = ForeignKey(Registration, related_name='tickets') + attendance_type = ForeignKey(AttendanceTypeName, on_delete=models.PROTECT) + ticket_type = ForeignKey(RegistrationTicketTypeName, on_delete=models.PROTECT) + + def __str__(self): + return "{}:{}".format(self.attendance_type, self.ticket_type) diff --git a/ietf/meeting/resources.py b/ietf/meeting/resources.py index dc273c04cf..490b75f925 100644 --- a/ietf/meeting/resources.py +++ b/ietf/meeting/resources.py @@ -11,12 +11,23 @@ from ietf import api -from ietf.meeting.models import ( Meeting, ResourceAssociation, Constraint, Room, Schedule, Session, - TimeSlot, SchedTimeSessAssignment, SessionPresentation, FloorPlan, - UrlResource, ImportantDate, SlideSubmission, SchedulingEvent, - BusinessConstraint, ProceedingsMaterial, MeetingHost, Attended) +from ietf.meeting.models import (Meeting, ResourceAssociation, Constraint, Room, + Schedule, Session, + TimeSlot, SchedTimeSessAssignment, SessionPresentation, + FloorPlan, + UrlResource, ImportantDate, SlideSubmission, + SchedulingEvent, + BusinessConstraint, ProceedingsMaterial, MeetingHost, + Attended, + Registration, RegistrationTicket) + +from ietf.name.resources import ( + AttendanceTypeNameResource, + MeetingTypeNameResource, + RegistrationTicketTypeNameResource, +) + -from ietf.name.resources import MeetingTypeNameResource class MeetingResource(ModelResource): type = ToOneField(MeetingTypeNameResource, 'type') schedule = ToOneField('ietf.meeting.resources.ScheduleResource', 'schedule', null=True) @@ -269,7 +280,6 @@ class Meta: filtering = { "id": ALL, "modified": ALL, - "notes": ALL, "badness": ALL, "pinned": ALL, "timeslot": ALL_WITH_RELATIONS, @@ -432,3 +442,52 @@ class Meta: "session": ALL_WITH_RELATIONS, } api.meeting.register(AttendedResource()) + +from ietf.person.resources import PersonResource +class RegistrationResource(ModelResource): + meeting = ToOneField(MeetingResource, 'meeting') + person = ToOneField(PersonResource, 'person', null=True) + tickets = ToManyField( + 'ietf.meeting.resources.RegistrationTicketResource', + 'tickets', + full=True, + ) + + class Meta: + queryset = Registration.objects.all() + serializer = api.Serializer() + cache = SimpleCache() + #resource_name = 'registration' + ordering = ['id', ] + filtering = { + "id": ALL, + "first_name": ALL, + "last_name": ALL, + "affiliation": ALL, + "country_code": ALL, + "email": ALL, + "attended": ALL, + "checkedin": ALL, + "meeting": ALL_WITH_RELATIONS, + "person": ALL_WITH_RELATIONS, + "tickets": ALL_WITH_RELATIONS, + } +api.meeting.register(RegistrationResource()) + +class RegistrationTicketResource(ModelResource): + registration = ToOneField(RegistrationResource, 'registration') + attendance_type = ToOneField(AttendanceTypeNameResource, 'attendance_type') + ticket_type = ToOneField(RegistrationTicketTypeNameResource, 'ticket_type') + class Meta: + queryset = RegistrationTicket.objects.all() + serializer = api.Serializer() + cache = SimpleCache() + #resource_name = 'registrationticket' + ordering = ['id', ] + filtering = { + "id": ALL, + "ticket_type": ALL_WITH_RELATIONS, + "attendance_type": ALL_WITH_RELATIONS, + "registration": ALL_WITH_RELATIONS, + } +api.meeting.register(RegistrationTicketResource()) diff --git a/ietf/meeting/tasks.py b/ietf/meeting/tasks.py new file mode 100644 index 0000000000..a73763560b --- /dev/null +++ b/ietf/meeting/tasks.py @@ -0,0 +1,248 @@ +# Copyright The IETF Trust 2024-2026, All Rights Reserved +# +# Celery task definitions +# +import datetime + +from itertools import batched + +from celery import shared_task, chain +from django.db.models import IntegerField +from django.db.models.functions import Cast +from django.utils import timezone + +from ietf.utils import log +from .models import Meeting +from .utils import ( + generate_proceedings_content, + resolve_materials_for_one_meeting, + store_blobs_for_one_meeting, +) +from .views import generate_agenda_data +from .utils import fetch_attendance_from_meetings + + +@shared_task +def agenda_data_refresh_task(num=None): + """Refresh agenda data for one plenary meeting + + If `num` is `None`, refreshes data for the current meeting. + """ + log.log( + f"Refreshing agenda data for {f"IETF-{num}" if num else "current IETF meeting"}" + ) + try: + generate_agenda_data(num, force_refresh=True) + except Exception as err: + # Log and swallow exceptions so failure on one meeting won't break a chain of + # tasks. This is used by agenda_data_refresh_all_task(). + log.log(f"ERROR: Refreshing agenda data failed for num={num}: {err}") + + +@shared_task +def agenda_data_refresh(): + """Deprecated. Use agenda_data_refresh_task() instead. + + TODO remove this after switching the periodic task to the new name + """ + log.log("Deprecated agenda_data_refresh task called!") + agenda_data_refresh_task() + + +@shared_task +def agenda_data_refresh_all_task(*, batch_size=10): + """Refresh agenda data for all plenary meetings + + Executes as a chain of tasks, each computing up to `batch_size` meetings + in a single task. + """ + meeting_numbers = sorted( + Meeting.objects.annotate( + number_as_int=Cast("number", output_field=IntegerField()) + ) + .filter(type_id="ietf", number_as_int__gt=64) + .values_list("number_as_int", flat=True) + ) + # Batch using chained maps rather than celery.chunk so we only use one worker + # at a time. + batched_task_chain = chain( + *( + agenda_data_refresh_task.map(nums) + for nums in batched(meeting_numbers, batch_size) + ) + ) + batched_task_chain.delay() + + +@shared_task +def proceedings_content_refresh_task(*, all=False): + """Refresh meeting proceedings cache + + If `all` is `False`, then refreshes the cache for meetings whose numbers modulo + 24 equal the current hour number (0-23). Scheduling the task once per hour will + then result in all proceedings being recomputed daily, with no more than two per + hour (now) or a few per hour in the next decade. That keeps the computation time + to under a couple minutes on our current production system. + + If `all` is True, refreshes all meetings + """ + now = timezone.now() + + for meeting in Meeting.objects.filter(type_id="ietf").order_by("number"): + if meeting.proceedings_format_version == 1: + continue # skip v1 proceedings, they're stored externally + num = meeting.get_number() # convert str -> int + if num is None: + log.log( + f"Not refreshing proceedings for meeting {meeting.number}: " + f"type is 'ietf' but get_number() returned None" + ) + elif all or (num % 24 == now.hour): + log.log(f"Refreshing proceedings for meeting {meeting.number}...") + generate_proceedings_content(meeting, force_refresh=True) + + +@shared_task +def fetch_meeting_attendance_task(): + # fetch most recent two meetings + meetings = Meeting.objects.filter(type="ietf", date__lte=timezone.now()).order_by( + "-date" + )[:2] + try: + stats = fetch_attendance_from_meetings(meetings) + except RuntimeError as err: + log.log(f"Error in fetch_meeting_attendance_task: {err}") + else: + for meeting, meeting_stats in zip(meetings, stats): + log.log( + "Fetched data for meeting {:>3}: {:4d} created, {:4d} updated, {:4d} deleted, {:4d} processed".format( + meeting.number, + meeting_stats["created"], + meeting_stats["updated"], + meeting_stats["deleted"], + meeting_stats["processed"], + ) + ) + + +def _select_meetings( + meetings: list[str] | None = None, + meetings_since: str | None = None, + meetings_until: str | None = None, +): # nyah + """Select meetings by number or date range""" + # IETF-1 = 1986-01-16 + EARLIEST_MEETING_DATE = datetime.datetime(1986, 1, 1) + meetings_since_dt: datetime.datetime | None = None + meetings_until_dt: datetime.datetime | None = None + + if meetings_since == "zero": + meetings_since_dt = EARLIEST_MEETING_DATE + elif meetings_since is not None: + try: + meetings_since_dt = datetime.datetime.fromisoformat(meetings_since) + except ValueError: + log.log( + "Failed to parse meetings_since='{meetings_since}' with fromisoformat" + ) + raise + + if meetings_until is not None: + try: + meetings_until_dt = datetime.datetime.fromisoformat(meetings_until) + except ValueError: + log.log( + "Failed to parse meetings_until='{meetings_until}' with fromisoformat" + ) + raise + if meetings_since_dt is None: + # if we only got meetings_until, start from the first meeting + meetings_since_dt = EARLIEST_MEETING_DATE + + if meetings is None: + if meetings_since_dt is None: + log.log("No meetings requested, doing nothing.") + return Meeting.objects.none() + meetings_qs = Meeting.objects.filter(date__gte=meetings_since_dt) + if meetings_until_dt is not None: + meetings_qs = meetings_qs.filter(date__lte=meetings_until_dt) + log.log( + "Selecting meetings between " + f"{meetings_since_dt} and {meetings_until_dt}" + ) + else: + log.log(f"Selecting meetings since {meetings_since_dt}") + else: + if meetings_since_dt is not None: + log.log( + "Ignoring meetings_since and meetings_until " + "because specific meetings were requested." + ) + meetings_qs = Meeting.objects.filter(number__in=meetings) + return meetings_qs + + +@shared_task +def resolve_meeting_materials_task( + *, # only allow kw arguments + meetings: list[str] | None = None, + meetings_since: str | None = None, + meetings_until: str | None = None, +): + """Run materials resolver on meetings + + Can request a set of meetings by number by passing a list in the meetings arg, or + by range by passing an iso-format timestamps in meetings_since / meetings_until. + To select all meetings, set meetings_since="zero" and omit other parameters. + """ + meetings_qs = _select_meetings(meetings, meetings_since, meetings_until) + for meeting in meetings_qs.order_by("date"): + log.log( + f"Resolving materials for {meeting.type_id} " + f"meeting {meeting.number} ({meeting.date})..." + ) + mark = timezone.now() + try: + resolve_materials_for_one_meeting(meeting) + except Exception as err: + log.log( + "Exception raised while resolving materials for " + f"meeting {meeting.number}: {err}" + ) + else: + log.log( + f"Resolved in {(timezone.now() - mark).total_seconds():0.3f} seconds." + ) + + +@shared_task +def store_meeting_materials_as_blobs_task( + *, # only allow kw arguments + meetings: list[str] | None = None, + meetings_since: str | None = None, + meetings_until: str | None = None, +): + """Push meeting materials into the blob store + + Can request a set of meetings by number by passing a list in the meetings arg, or + by range by passing an iso-format timestamps in meetings_since / meetings_until. + To select all meetings, set meetings_since="zero" and omit other parameters. + """ + meetings_qs = _select_meetings(meetings, meetings_since, meetings_until) + for meeting in meetings_qs.order_by("date"): + log.log( + f"Creating blobs for materials for {meeting.type_id} " + f"meeting {meeting.number} ({meeting.date})..." + ) + mark = timezone.now() + try: + store_blobs_for_one_meeting(meeting) + except Exception as err: + log.log( + "Exception raised while creating blobs for " + f"meeting {meeting.number}: {err}" + ) + else: + log.log( + f"Blobs created in {(timezone.now() - mark).total_seconds():0.3f} seconds." + ) diff --git a/ietf/secr/sreq/templatetags/ams_filters.py b/ietf/meeting/templatetags/ams_filters.py similarity index 96% rename from ietf/secr/sreq/templatetags/ams_filters.py rename to ietf/meeting/templatetags/ams_filters.py index 3ef872232a..a8175a81d6 100644 --- a/ietf/secr/sreq/templatetags/ams_filters.py +++ b/ietf/meeting/templatetags/ams_filters.py @@ -1,3 +1,5 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + from django import template from ietf.person.models import Person diff --git a/ietf/meeting/templatetags/proceedings_filters.py b/ietf/meeting/templatetags/proceedings_filters.py index f5fe0e1f14..a2a4932e7c 100644 --- a/ietf/meeting/templatetags/proceedings_filters.py +++ b/ietf/meeting/templatetags/proceedings_filters.py @@ -11,7 +11,7 @@ def hack_recording_title(recording,add_timestamp=False): if recording.title.startswith('Audio recording for') or recording.title.startswith('Video recording for'): hacked_title = recording.title[:15] if add_timestamp: - hacked_title += ' '+recording.sessionpresentation_set.first().session.official_timeslotassignment().timeslot.time.strftime("%a %H:%M") + hacked_title += ' '+recording.presentations.first().session.official_timeslotassignment().timeslot.time.strftime("%a %H:%M") return hacked_title else: return recording.title diff --git a/ietf/meeting/templatetags/session_filters.py b/ietf/meeting/templatetags/session_filters.py index 4fe377a813..3846dab49e 100644 --- a/ietf/meeting/templatetags/session_filters.py +++ b/ietf/meeting/templatetags/session_filters.py @@ -8,7 +8,7 @@ @register.filter def presented_versions(session, doc): - sp = session.sessionpresentation_set.filter(document=doc) + sp = session.presentations.filter(document=doc) if not sp: return "Document not in session" else: diff --git a/ietf/meeting/test_data.py b/ietf/meeting/test_data.py index 5ecb494df2..8be55b47a2 100644 --- a/ietf/meeting/test_data.py +++ b/ietf/meeting/test_data.py @@ -51,7 +51,7 @@ def make_interim_meeting(group,date,status='sched',tz='UTC'): doc = DocumentFactory.create(name=name, type_id='agenda', title="Agenda", uploaded_filename=file, group=group, rev=rev, states=[('draft','active')]) pres = SessionPresentation.objects.create(session=session, document=doc, rev=doc.rev) - session.sessionpresentation_set.add(pres) + session.presentations.add(pres) # minutes name = "minutes-%s-%s" % (meeting.number, time.strftime("%Y%m%d%H%M")) rev = '00' @@ -59,7 +59,7 @@ def make_interim_meeting(group,date,status='sched',tz='UTC'): doc = DocumentFactory.create(name=name, type_id='minutes', title="Minutes", uploaded_filename=file, group=group, rev=rev, states=[('draft','active')]) pres = SessionPresentation.objects.create(session=session, document=doc, rev=doc.rev) - session.sessionpresentation_set.add(pres) + session.presentations.add(pres) # slides title = "Slideshow" @@ -70,7 +70,7 @@ def make_interim_meeting(group,date,status='sched',tz='UTC'): uploaded_filename=file, group=group, rev=rev, states=[('slides','active'), ('reuse_policy', 'single')]) pres = SessionPresentation.objects.create(session=session, document=doc, rev=doc.rev) - session.sessionpresentation_set.add(pres) + session.presentations.add(pres) # return meeting @@ -198,24 +198,24 @@ def make_meeting_test_data(meeting=None, create_interims=False): doc = DocumentFactory.create(name='agenda-72-mars', type_id='agenda', title="Agenda", uploaded_filename="agenda-72-mars.txt", group=mars, rev='00', states=[('agenda','active')]) pres = SessionPresentation.objects.create(session=mars_session,document=doc,rev=doc.rev) - mars_session.sessionpresentation_set.add(pres) # + mars_session.presentations.add(pres) # doc = DocumentFactory.create(name='minutes-72-mars', type_id='minutes', title="Minutes", uploaded_filename="minutes-72-mars.md", group=mars, rev='00', states=[('minutes','active')]) pres = SessionPresentation.objects.create(session=mars_session,document=doc,rev=doc.rev) - mars_session.sessionpresentation_set.add(pres) + mars_session.presentations.add(pres) doc = DocumentFactory.create(name='slides-72-mars-1-active', type_id='slides', title="Slideshow", uploaded_filename="slides-72-mars.txt", group=mars, rev='00', states=[('slides','active'), ('reuse_policy', 'single')]) pres = SessionPresentation.objects.create(session=mars_session,document=doc,rev=doc.rev) - mars_session.sessionpresentation_set.add(pres) + mars_session.presentations.add(pres) doc = DocumentFactory.create(name='slides-72-mars-2-deleted', type_id='slides', title="Bad Slideshow", uploaded_filename="slides-72-mars-2-deleted.txt", group=mars, rev='00', states=[('slides','deleted'), ('reuse_policy', 'single')]) pres = SessionPresentation.objects.create(session=mars_session,document=doc,rev=doc.rev) - mars_session.sessionpresentation_set.add(pres) + mars_session.presentations.add(pres) # Future Interim Meetings date = date_today() + datetime.timedelta(days=365) diff --git a/ietf/meeting/tests_helpers.py b/ietf/meeting/tests_helpers.py index 9ce3c21cbc..b118b9f041 100644 --- a/ietf/meeting/tests_helpers.py +++ b/ietf/meeting/tests_helpers.py @@ -487,7 +487,7 @@ def test_create_interim_session_conferences(self, mock): mock.reset_mock() mock_conf_mgr.create.return_value = [ Conference( - manager=mock_conf_mgr, id=1, public_id='some-uuid', description='desc', + manager=mock_conf_mgr, id=int(sessions[0].pk), public_id='some-uuid', description='desc', start_time=timeslots[0].utc_start_time(), duration=timeslots[0].duration, url='fake-meetecho-url', deletion_token='please-delete-me', ), @@ -498,6 +498,7 @@ def test_create_interim_session_conferences(self, mock): mock_conf_mgr.create.call_args[1], { 'group': sessions[0].group, + 'session_id': sessions[0].id, 'description': str(sessions[0]), 'start_time': timeslots[0].utc_start_time(), 'duration': timeslots[0].duration, @@ -512,12 +513,12 @@ def test_create_interim_session_conferences(self, mock): mock.reset_mock() mock_conf_mgr.create.side_effect = [ [Conference( - manager=mock_conf_mgr, id=1, public_id='some-uuid', description='desc', + manager=mock_conf_mgr, id=int(sessions[0].pk), public_id='some-uuid', description='desc', start_time=timeslots[0].utc_start_time(), duration=timeslots[0].duration, url='different-fake-meetecho-url', deletion_token='please-delete-me', )], [Conference( - manager=mock_conf_mgr, id=2, public_id='another-uuid', description='desc', + manager=mock_conf_mgr, id=int(sessions[1].pk), public_id='another-uuid', description='desc', start_time=timeslots[1].utc_start_time(), duration=timeslots[1].duration, url='another-fake-meetecho-url', deletion_token='please-delete-me-too', )], @@ -528,16 +529,18 @@ def test_create_interim_session_conferences(self, mock): mock_conf_mgr.create.call_args_list, [ ({ - 'group': sessions[0].group, - 'description': str(sessions[0]), - 'start_time': timeslots[0].utc_start_time(), - 'duration': timeslots[0].duration, + 'group': sessions[0].group, + 'session_id': sessions[0].id, + 'description': str(sessions[0]), + 'start_time': timeslots[0].utc_start_time(), + 'duration': timeslots[0].duration, },), ({ - 'group': sessions[1].group, - 'description': str(sessions[1]), - 'start_time': timeslots[1].utc_start_time(), - 'duration': timeslots[1].duration, + 'group': sessions[1].group, + 'session_id': sessions[1].id, + 'description': str(sessions[1]), + 'start_time': timeslots[1].utc_start_time(), + 'duration': timeslots[1].duration, },), ] ) diff --git a/ietf/meeting/tests_js.py b/ietf/meeting/tests_js.py index e69afe5ca4..3269342924 100644 --- a/ietf/meeting/tests_js.py +++ b/ietf/meeting/tests_js.py @@ -5,7 +5,7 @@ import time import datetime import shutil -import os +import tempfile import re from django.utils import timezone @@ -249,7 +249,9 @@ def test_edit_meeting_schedule(self): self.assertTrue(s1_element.is_displayed()) # should still be displayed self.assertIn('hidden-parent', s1_element.get_attribute('class'), 'Session should be hidden when parent disabled') - s1_element.click() # try to select + + self.scroll_and_click((By.CSS_SELECTOR, '#session{}'.format(s1.pk))) + self.assertNotIn('selected', s1_element.get_attribute('class'), 'Session should not be selectable when parent disabled') @@ -299,9 +301,9 @@ def test_edit_meeting_schedule(self): 'Session s1 should have moved to second meeting day') # swap timeslot column - put session in a differently-timed timeslot - self.driver.find_element(By.CSS_SELECTOR, + self.scroll_and_click((By.CSS_SELECTOR, '.day .swap-timeslot-col[data-timeslot-pk="{}"]'.format(slot1b.pk) - ).click() # open modal on the second timeslot for room1 + )) # open modal on the second timeslot for room1 self.assertTrue(self.driver.find_element(By.CSS_SELECTOR, "#swap-timeslot-col-modal").is_displayed()) self.driver.find_element(By.CSS_SELECTOR, '#swap-timeslot-col-modal input[name="target_timeslot"][value="{}"]'.format(slot4.pk) @@ -499,7 +501,7 @@ def test_past_swap_days_buttons(self): clicked_index = 1 # scroll so the button we want to click is just below the navbar, otherwise it may # fall beneath the sessions panel - navbar = self.driver.find_element_by_class_name('navbar') + navbar = self.driver.find_element(By.CSS_SELECTOR, '.navbar') self.driver.execute_script( 'window.scrollBy({top: %s, behavior: "instant"})' % ( future_swap_days_buttons[1].location['y'] - navbar.size['height'] @@ -884,9 +886,9 @@ class SlideReorderTests(IetfSeleniumTestCase): def setUp(self): super(SlideReorderTests, self).setUp() self.session = SessionFactory(meeting__type_id='ietf', status_id='sched') - self.session.sessionpresentation_set.create(document=DocumentFactory(type_id='slides',name='one'),order=1) - self.session.sessionpresentation_set.create(document=DocumentFactory(type_id='slides',name='two'),order=2) - self.session.sessionpresentation_set.create(document=DocumentFactory(type_id='slides',name='three'),order=3) + self.session.presentations.create(document=DocumentFactory(type_id='slides',name='one'),order=1) + self.session.presentations.create(document=DocumentFactory(type_id='slides',name='two'),order=2) + self.session.presentations.create(document=DocumentFactory(type_id='slides',name='three'),order=3) def secr_login(self): self.login('secretary') @@ -906,7 +908,7 @@ def testReorderSlides(self): ActionChains(self.driver).drag_and_drop(second,third).perform() time.sleep(0.1) # The API that modifies the database runs async - names=self.session.sessionpresentation_set.values_list('document__name',flat=True) + names=self.session.presentations.values_list('document__name',flat=True) self.assertEqual(list(names),['one','three','two']) @ifSeleniumEnabled @@ -939,13 +941,8 @@ def tearDown(self): def tempdir(self, label): # Borrowed from test_utils.TestCase slug = slugify(self.__class__.__name__.replace('.','-')) - dirname = "tmp-{label}-{slug}-dir".format(**locals()) - if 'VIRTUAL_ENV' in os.environ: - dirname = os.path.join(os.environ['VIRTUAL_ENV'], dirname) - path = os.path.abspath(dirname) - if not os.path.exists(path): - os.mkdir(path) - return path + suffix = "-{label}-{slug}-dir".format(**locals()) + return tempfile.mkdtemp(suffix=suffix) def displayed_interims(self, groups=None): sessions = add_event_info_to_session_qs( @@ -1045,6 +1042,7 @@ def advance_month(): def do_upcoming_view_filter_test(self, querystring, visible_meetings=()): self.login() self.driver.get(self.absreverse('ietf.meeting.views.upcoming') + querystring) + time.sleep(0.2) # gross, but give the filter JS time to do its thing self.assert_upcoming_meeting_visibility(visible_meetings) self.assert_upcoming_meeting_calendar(visible_meetings) self.assert_upcoming_view_filter_matches_ics_filter(querystring) @@ -1232,10 +1230,13 @@ def _assert_ietf_tz_correct(meetings, tz): self.driver.get(self.absreverse('ietf.meeting.views.upcoming')) tz_select_input = self.driver.find_element(By.ID, 'timezone-select') tz_select_bottom_input = self.driver.find_element(By.ID, 'timezone-select-bottom') - local_tz_link = self.driver.find_element(By.ID, 'local-timezone') - utc_tz_link = self.driver.find_element(By.ID, 'utc-timezone') - local_tz_bottom_link = self.driver.find_element(By.ID, 'local-timezone-bottom') - utc_tz_bottom_link = self.driver.find_element(By.ID, 'utc-timezone-bottom') + + # For things we click, need to click the labels / actually visible items. The actual inputs are hidden + # and managed by the JS. + local_tz_link = self.driver.find_element(By.CSS_SELECTOR, 'label[for="local-timezone"]') + utc_tz_link = self.driver.find_element(By.CSS_SELECTOR, 'label[for="utc-timezone"]') + local_tz_bottom_link = self.driver.find_element(By.CSS_SELECTOR, 'label[for="local-timezone-bottom"]') + utc_tz_bottom_link = self.driver.find_element(By.CSS_SELECTOR, 'label[for="utc-timezone-bottom"]') # wait for the select box to be updated - look for an arbitrary time zone to be in # its options list to detect this @@ -1245,7 +1246,10 @@ def _assert_ietf_tz_correct(meetings, tz): (By.CSS_SELECTOR, '#timezone-select > option[value="%s"]' % arbitrary_tz) ) ) - + tz_selector_clickables = self.driver.find_elements(By.CSS_SELECTOR, ".tz-display .select2") + self.assertEqual(len(tz_selector_clickables), 2) + (tz_selector_top, tz_selector_bottom) = tz_selector_clickables + arbitrary_tz_bottom_opt = tz_select_bottom_input.find_element(By.CSS_SELECTOR, '#timezone-select-bottom > option[value="%s"]' % arbitrary_tz) @@ -1256,7 +1260,7 @@ def _assert_ietf_tz_correct(meetings, tz): # to inherit Django's settings.TIME_ZONE but I don't know whether that's guaranteed to be consistent. # To avoid test fragility, ask Moment what it considers local and expect that. local_tz = self.driver.execute_script('return moment.tz.guess();') - local_tz_opt = tz_select_input.find_element(By.CSS_SELECTOR, 'option[value=%s]' % local_tz) + local_tz_opt = tz_select_input.find_element(By.CSS_SELECTOR, 'option[value="%s"]' % local_tz) local_tz_bottom_opt = tz_select_bottom_input.find_element(By.CSS_SELECTOR, 'option[value="%s"]' % local_tz) # Should start off in local time zone @@ -1266,8 +1270,7 @@ def _assert_ietf_tz_correct(meetings, tz): _assert_ietf_tz_correct(ietf_meetings, local_tz) # click 'utc' button - self.driver.execute_script("arguments[0].click();", utc_tz_link) # FIXME-LARS: not working: - # utc_tz_link.click() + utc_tz_link.click() self.wait.until(expected_conditions.element_to_be_selected(utc_tz_opt)) self.assertFalse(local_tz_opt.is_selected()) self.assertFalse(local_tz_bottom_opt.is_selected()) @@ -1279,8 +1282,7 @@ def _assert_ietf_tz_correct(meetings, tz): _assert_ietf_tz_correct(ietf_meetings, 'UTC') # click back to 'local' - self.driver.execute_script("arguments[0].click();", local_tz_link) # FIXME-LARS: not working: - # local_tz_link.click() + local_tz_link.click() self.wait.until(expected_conditions.element_to_be_selected(local_tz_opt)) self.assertTrue(local_tz_opt.is_selected()) self.assertTrue(local_tz_bottom_opt.is_selected()) @@ -1292,7 +1294,12 @@ def _assert_ietf_tz_correct(meetings, tz): _assert_ietf_tz_correct(ietf_meetings, local_tz) # Now select a different item from the select input - arbitrary_tz_opt.click() + tz_selector_top.click() + self.wait.until( + expected_conditions.presence_of_element_located( + (By.CSS_SELECTOR, 'span.select2-container .select2-results li[id$="America/Halifax"]') + ) + ).click() self.wait.until(expected_conditions.element_to_be_selected(arbitrary_tz_opt)) self.assertFalse(local_tz_opt.is_selected()) self.assertFalse(local_tz_bottom_opt.is_selected()) @@ -1305,8 +1312,8 @@ def _assert_ietf_tz_correct(meetings, tz): # Now repeat those tests using the widgets at the bottom of the page # click 'utc' button - self.driver.execute_script("arguments[0].click();", utc_tz_bottom_link) # FIXME-LARS: not working: - # utc_tz_bottom_link.click() + self.scroll_to_element(utc_tz_bottom_link) + utc_tz_bottom_link.click() self.wait.until(expected_conditions.element_to_be_selected(utc_tz_opt)) self.assertFalse(local_tz_opt.is_selected()) self.assertFalse(local_tz_bottom_opt.is_selected()) @@ -1318,8 +1325,8 @@ def _assert_ietf_tz_correct(meetings, tz): _assert_ietf_tz_correct(ietf_meetings, 'UTC') # click back to 'local' - self.driver.execute_script("arguments[0].click();", local_tz_bottom_link) # FIXME-LARS: not working: - # local_tz_bottom_link.click() + self.scroll_to_element(local_tz_bottom_link) + local_tz_bottom_link.click() self.wait.until(expected_conditions.element_to_be_selected(local_tz_opt)) self.assertTrue(local_tz_opt.is_selected()) self.assertTrue(local_tz_bottom_opt.is_selected()) @@ -1331,7 +1338,13 @@ def _assert_ietf_tz_correct(meetings, tz): _assert_ietf_tz_correct(ietf_meetings, local_tz) # Now select a different item from the select input - arbitrary_tz_bottom_opt.click() + self.scroll_to_element(tz_selector_bottom) + tz_selector_bottom.click() + self.wait.until( + expected_conditions.presence_of_element_located( + (By.CSS_SELECTOR, 'span.select2-container .select2-results li[id$="America/Halifax"]') + ) + ).click() self.wait.until(expected_conditions.element_to_be_selected(arbitrary_tz_opt)) self.assertFalse(local_tz_opt.is_selected()) self.assertFalse(local_tz_bottom_opt.is_selected()) @@ -1362,13 +1375,8 @@ def test_upcoming_materials_modal(self): self.assertFalse(modal_div.is_displayed()) # Click the 'materials' button - open_modal_button = self.wait.until( - expected_conditions.element_to_be_clickable( - (By.CSS_SELECTOR, '[data-bs-target="#modal-%s"]' % slug) - ), - 'Modal open button not found or not clickable', - ) - open_modal_button.click() + open_modal_button_locator = (By.CSS_SELECTOR, '[data-bs-target="#modal-%s"]' % slug) + self.scroll_and_click(open_modal_button_locator) self.wait.until( expected_conditions.visibility_of(modal_div), 'Modal did not become visible after clicking open button', @@ -1382,6 +1390,7 @@ def test_upcoming_materials_modal(self): ), 'Modal close button not found or not clickable', ) + time.sleep(0.3) # gross, but the button is clickable while still fading in close_modal_button.click() self.wait.until( expected_conditions.invisibility_of_element(modal_div), @@ -1567,7 +1576,7 @@ def test_delete_timeslot_cancel(self): def do_delete_time_interval_test(self, cancel=False): delete_time_local = datetime_from_date(self.meeting.date, self.meeting.tz()).replace(hour=10) - delete_time = delete_time_local.astimezone(datetime.timezone.utc) + delete_time = delete_time_local.astimezone(datetime.UTC) duration = datetime.timedelta(minutes=60) delete: [TimeSlot] = TimeSlotFactory.create_batch( # type: ignore[annotation-unchecked] diff --git a/ietf/meeting/tests_models.py b/ietf/meeting/tests_models.py index 0ccd462715..869d9ec814 100644 --- a/ietf/meeting/tests_models.py +++ b/ietf/meeting/tests_models.py @@ -1,16 +1,18 @@ -# Copyright The IETF Trust 2021, All Rights Reserved +# Copyright The IETF Trust 2021-2024, All Rights Reserved # -*- coding: utf-8 -*- """Tests of models in the Meeting application""" import datetime -from mock import patch +from unittest.mock import patch from django.conf import settings from django.test import override_settings +import ietf.meeting.models from ietf.group.factories import GroupFactory, GroupHistoryFactory from ietf.meeting.factories import MeetingFactory, SessionFactory, AttendedFactory, SessionPresentationFactory -from ietf.stats.factories import MeetingRegistrationFactory +from ietf.meeting.factories import RegistrationFactory +from ietf.meeting.models import Session from ietf.utils.test_utils import TestCase from ietf.utils.timezone import date_today, datetime_today @@ -19,9 +21,9 @@ class MeetingTests(TestCase): def test_get_attendance_pre110(self): """Pre-110 meetings do not calculate attendance""" meeting = MeetingFactory(type_id='ietf', number='109') - MeetingRegistrationFactory.create_batch(3, meeting=meeting, reg_type='') - MeetingRegistrationFactory.create_batch(4, meeting=meeting, reg_type='remote') - MeetingRegistrationFactory.create_batch(5, meeting=meeting, reg_type='in_person') + RegistrationFactory.create_batch(3, meeting=meeting, with_ticket={'attendance_type_id': 'unknown'}) + RegistrationFactory.create_batch(4, meeting=meeting, with_ticket={'attendance_type_id': 'remote'}) + RegistrationFactory.create_batch(5, meeting=meeting, with_ticket={'attendance_type_id': 'onsite'}) self.assertIsNone(meeting.get_attendance()) def test_get_attendance_110(self): @@ -29,31 +31,31 @@ def test_get_attendance_110(self): meeting = MeetingFactory(type_id='ietf', number='110') # start with attendees that should be ignored - MeetingRegistrationFactory.create_batch(3, meeting=meeting, reg_type='', attended=True) - MeetingRegistrationFactory(meeting=meeting, reg_type='', attended=False) + RegistrationFactory.create_batch(3, meeting=meeting, with_ticket={'attendance_type_id': 'unknown'}, attended=True) + RegistrationFactory(meeting=meeting, with_ticket={'attendance_type_id': 'unknown'}, attended=False) attendance = meeting.get_attendance() self.assertIsNotNone(attendance) self.assertEqual(attendance.remote, 0) self.assertEqual(attendance.onsite, 0) # add online attendees with at least one who registered but did not attend - MeetingRegistrationFactory.create_batch(4, meeting=meeting, reg_type='remote', attended=True) - MeetingRegistrationFactory(meeting=meeting, reg_type='remote', attended=False) + RegistrationFactory.create_batch(4, meeting=meeting, with_ticket={'attendance_type_id': 'remote'}, attended=True) + RegistrationFactory(meeting=meeting, with_ticket={'attendance_type_id': 'remote'}, attended=False) attendance = meeting.get_attendance() self.assertIsNotNone(attendance) self.assertEqual(attendance.remote, 4) self.assertEqual(attendance.onsite, 0) # and the same for onsite attendees - MeetingRegistrationFactory.create_batch(5, meeting=meeting, reg_type='onsite', attended=True) - MeetingRegistrationFactory(meeting=meeting, reg_type='in_person', attended=False) + RegistrationFactory.create_batch(5, meeting=meeting, with_ticket={'attendance_type_id': 'onsite'}, attended=True) + RegistrationFactory(meeting=meeting, with_ticket={'attendance_type_id': 'onsite'}, attended=False) attendance = meeting.get_attendance() self.assertIsNotNone(attendance) self.assertEqual(attendance.remote, 4) self.assertEqual(attendance.onsite, 5) # and once more after removing all the online attendees - meeting.meetingregistration_set.filter(reg_type='remote').delete() + meeting.registration_set.remote().delete() attendance = meeting.get_attendance() self.assertIsNotNone(attendance) self.assertEqual(attendance.remote, 0) @@ -62,11 +64,11 @@ def test_get_attendance_110(self): def test_get_attendance_113(self): """Simulate IETF 113 attendance gathering data""" meeting = MeetingFactory(type_id='ietf', number='113') - MeetingRegistrationFactory(meeting=meeting, reg_type='onsite', attended=True, checkedin=False) - MeetingRegistrationFactory(meeting=meeting, reg_type='onsite', attended=False, checkedin=True) - p1 = MeetingRegistrationFactory(meeting=meeting, reg_type='onsite', attended=False, checkedin=False).person + RegistrationFactory(meeting=meeting, with_ticket={'attendance_type_id': 'onsite'}, attended=True, checkedin=False) + RegistrationFactory(meeting=meeting, with_ticket={'attendance_type_id': 'onsite'}, attended=False, checkedin=True) + p1 = RegistrationFactory(meeting=meeting, with_ticket={'attendance_type_id': 'onsite'}, attended=False, checkedin=False).person AttendedFactory(session__meeting=meeting, person=p1) - p2 = MeetingRegistrationFactory(meeting=meeting, reg_type='remote', attended=False, checkedin=False).person + p2 = RegistrationFactory(meeting=meeting, with_ticket={'attendance_type_id': 'remote'}, attended=False, checkedin=False).person AttendedFactory(session__meeting=meeting, person=p2) attendance = meeting.get_attendance() self.assertEqual(attendance.onsite, 3) @@ -80,9 +82,9 @@ def test_get_attendance_keeps_meetings_distinct(self): # Create a person who attended a remote session for first_mtg and onsite for second_mtg without # checking in for either. - p = MeetingRegistrationFactory(meeting=second_mtg, reg_type='onsite', attended=False, checkedin=False).person + p = RegistrationFactory(meeting=second_mtg, with_ticket={'attendance_type_id': 'onsite'}, attended=False, checkedin=False).person AttendedFactory(session__meeting=first_mtg, person=p) - MeetingRegistrationFactory(meeting=first_mtg, person=p, reg_type='remote', attended=False, checkedin=False) + RegistrationFactory(meeting=first_mtg, person=p, with_ticket={'attendance_type_id': 'remote'}, attended=False, checkedin=False) AttendedFactory(session__meeting=second_mtg, person=p) att = first_mtg.get_attendance() @@ -146,3 +148,53 @@ def test_chat_room_name(self): self.assertEqual(session.chat_room_name(), 'plenary') session.chat_room = 'fnord' self.assertEqual(session.chat_room_name(), 'fnord') + + def test_alpha_str(self): + self.assertEqual(Session._alpha_str(0), "a") + self.assertEqual(Session._alpha_str(1), "b") + self.assertEqual(Session._alpha_str(25), "z") + self.assertEqual(Session._alpha_str(26), "aa") + self.assertEqual(Session._alpha_str(27 * 26 - 1), "zz") + self.assertEqual(Session._alpha_str(27 * 26), "aaa") + + @patch.object(ietf.meeting.models.Session, "_session_recording_url_label", return_value="LABEL") + def test_session_recording_url(self, mock): + for session_type in ["ietf", "interim"]: + session = SessionFactory(meeting__type_id=session_type) + with override_settings(): + if hasattr(settings, "MEETECHO_SESSION_RECORDING_URL"): + del settings.MEETECHO_SESSION_RECORDING_URL + self.assertIsNone(session.session_recording_url()) + + settings.MEETECHO_SESSION_RECORDING_URL = "http://player.example.com" + self.assertEqual(session.session_recording_url(), "http://player.example.com") + + settings.MEETECHO_SESSION_RECORDING_URL = "http://player.example.com?{session_label}" + self.assertEqual(session.session_recording_url(), "http://player.example.com?LABEL") + + session.meetecho_recording_name="actualname" + session.save() + self.assertEqual(session.session_recording_url(), "http://player.example.com?actualname") + + def test_session_recording_url_label_ietf(self): + session = SessionFactory( + meeting__type_id='ietf', + meeting__date=date_today(), + meeting__number="123", + group__acronym="acro", + ) + session_time = session.official_timeslotassignment().timeslot.time + self.assertEqual( + f"IETF123-ACRO-{session_time:%Y%m%d-%H%M}", # n.b., time in label is UTC + session._session_recording_url_label()) + + def test_session_recording_url_label_interim(self): + session = SessionFactory( + meeting__type_id='interim', + meeting__date=date_today(), + group__acronym="acro", + ) + session_time = session.official_timeslotassignment().timeslot.time + self.assertEqual( + f"IETF-ACRO-{session_time:%Y%m%d-%H%M}", # n.b., time in label is UTC + session._session_recording_url_label()) diff --git a/ietf/meeting/tests_schedule_forms.py b/ietf/meeting/tests_schedule_forms.py index 58c1332bd5..426d26dc2d 100644 --- a/ietf/meeting/tests_schedule_forms.py +++ b/ietf/meeting/tests_schedule_forms.py @@ -140,13 +140,13 @@ def test_location_options(self): rendered = str(TimeSlotEditForm(instance=ts)['location']) # noinspection PyTypeChecker self.assertInHTML( - f'', + f'', rendered, ) for room in rooms: # noinspection PyTypeChecker self.assertInHTML( - f'', + f'', rendered, ) diff --git a/ietf/secr/sreq/tests.py b/ietf/meeting/tests_session_requests.py similarity index 81% rename from ietf/secr/sreq/tests.py rename to ietf/meeting/tests_session_requests.py index 7fb13f1796..42dbee5f23 100644 --- a/ietf/secr/sreq/tests.py +++ b/ietf/meeting/tests_session_requests.py @@ -1,4 +1,4 @@ -# Copyright The IETF Trust 2013-2022, All Rights Reserved +# Copyright The IETF Trust 2013-2025, All Rights Reserved # -*- coding: utf-8 -*- @@ -13,31 +13,17 @@ from ietf.meeting.models import Session, ResourceAssociation, SchedulingEvent, Constraint from ietf.meeting.factories import MeetingFactory, SessionFactory from ietf.name.models import ConstraintName, TimerangeName +from ietf.person.factories import PersonFactory from ietf.person.models import Person -from ietf.secr.sreq.forms import SessionForm -from ietf.utils.mail import outbox, empty_outbox, get_payload_text +from ietf.meeting.forms import SessionRequestForm +from ietf.utils.mail import outbox, empty_outbox, get_payload_text, send_mail from ietf.utils.timezone import date_today from pyquery import PyQuery -SECR_USER='secretary' +SECR_USER = 'secretary' -class SreqUrlTests(TestCase): - def test_urls(self): - MeetingFactory(type_id='ietf',date=date_today()) - - self.client.login(username="secretary", password="secretary+password") - - r = self.client.get("/secr/") - self.assertEqual(r.status_code, 200) - - r = self.client.get("/secr/sreq/") - self.assertEqual(r.status_code, 200) - - testgroup=GroupFactory() - r = self.client.get("/secr/sreq/%s/new/" % testgroup.acronym) - self.assertEqual(r.status_code, 200) class SessionRequestTestCase(TestCase): def test_main(self): @@ -45,7 +31,7 @@ def test_main(self): SessionFactory.create_batch(2, meeting=meeting, status_id='sched') SessionFactory.create_batch(2, meeting=meeting, status_id='disappr') # Several unscheduled groups come from make_immutable_base_data - url = reverse('ietf.secr.sreq.views.main') + url = reverse('ietf.meeting.views_session_request.list_view') self.client.login(username="secretary", password="secretary+password") r = self.client.get(url) self.assertEqual(r.status_code, 200) @@ -61,23 +47,49 @@ def test_approve(self): mars = GroupFactory(parent=area, acronym='mars') # create session waiting for approval session = SessionFactory(meeting=meeting, group=mars, status_id='apprw') - url = reverse('ietf.secr.sreq.views.approve', kwargs={'acronym':'mars'}) + url = reverse('ietf.meeting.views_session_request.approve_request', kwargs={'acronym': 'mars'}) self.client.login(username="ad", password="ad+password") r = self.client.get(url) - self.assertRedirects(r,reverse('ietf.secr.sreq.views.view', kwargs={'acronym':'mars'})) + self.assertRedirects(r, reverse('ietf.meeting.views_session_request.view_request', kwargs={'acronym': 'mars'})) self.assertEqual(SchedulingEvent.objects.filter(session=session).order_by('-id')[0].status_id, 'appr') - + def test_cancel(self): meeting = MeetingFactory(type_id='ietf', date=date_today()) ad = Person.objects.get(user__username='ad') area = RoleFactory(name_id='ad', person=ad, group__type_id='area').group session = SessionFactory(meeting=meeting, group__parent=area, group__acronym='mars', status_id='sched') - url = reverse('ietf.secr.sreq.views.cancel', kwargs={'acronym':'mars'}) + url = reverse('ietf.meeting.views_session_request.cancel_request', kwargs={'acronym': 'mars'}) self.client.login(username="ad", password="ad+password") r = self.client.get(url) - self.assertRedirects(r,reverse('ietf.secr.sreq.views.main')) + self.assertRedirects(r, reverse('ietf.meeting.views_session_request.list_view')) self.assertEqual(SchedulingEvent.objects.filter(session=session).order_by('-id')[0].status_id, 'deleted') + def test_cancel_notification_msg(self): + to = "" + subject = "Dummy subject" + template = "meeting/session_cancel_notification.txt" + meeting = MeetingFactory(type_id="ietf", date=date_today()) + requester = PersonFactory(name="James O'Rourke", user__username="jimorourke") + context = {"meeting": meeting, "requester": requester} + cc = "cc.a@example.com, cc.b@example.com" + bcc = "bcc@example.com" + + msg = send_mail( + None, + to, + None, + subject, + template, + context, + cc=cc, + bcc=bcc, + ) + self.assertEqual(requester.name, "James O'Rourke") # note ' (single quote) in the name + self.assertIn( + f"A request to cancel a meeting session has just been submitted by {requester.name}.", + get_payload_text(msg), + ) + def test_edit(self): meeting = MeetingFactory(type_id='ietf', date=date_today()) mars = RoleFactory(name_id='chair', person__user__username='marschairman', group__acronym='mars').group @@ -86,9 +98,9 @@ def test_edit(self): group4 = GroupFactory() iabprog = GroupFactory(type_id='program') - SessionFactory(meeting=meeting,group=mars,status_id='sched') + SessionFactory(meeting=meeting, group=mars, status_id='sched') - url = reverse('ietf.secr.sreq.views.edit', kwargs={'acronym':'mars'}) + url = reverse('ietf.meeting.views_session_request.edit_request', kwargs={'acronym': 'mars'}) self.client.login(username="marschairman", password="marschairman+password") r = self.client.get(url) self.assertEqual(r.status_code, 200) @@ -96,9 +108,9 @@ def test_edit(self): comments = 'need lights' mars_sessions = meeting.session_set.filter(group__acronym='mars') empty_outbox() - post_data = {'num_session':'2', + post_data = {'num_session': '2', 'attendees': attendees, - 'constraint_chair_conflict':iabprog.acronym, + 'constraint_chair_conflict': iabprog.acronym, 'session_time_relation': 'subsequent-days', 'adjacent_with_wg': group2.acronym, 'joint_with_groups': group3.acronym + ' ' + group4.acronym, @@ -108,7 +120,7 @@ def test_edit(self): 'session_set-INITIAL_FORMS': '1', 'session_set-MIN_NUM_FORMS': '1', 'session_set-MAX_NUM_FORMS': '3', - 'session_set-0-id':mars_sessions[0].pk, + 'session_set-0-id': mars_sessions[0].pk, 'session_set-0-name': mars_sessions[0].name, 'session_set-0-short': mars_sessions[0].short, 'session_set-0-purpose': mars_sessions[0].purpose_id, @@ -142,7 +154,7 @@ def test_edit(self): 'session_set-2-DELETE': 'on', 'submit': 'Continue'} r = self.client.post(url, post_data, HTTP_HOST='example.com') - redirect_url = reverse('ietf.secr.sreq.views.view', kwargs={'acronym': 'mars'}) + redirect_url = reverse('ietf.meeting.views_session_request.view_request', kwargs={'acronym': 'mars'}) self.assertRedirects(r, redirect_url) # Check whether updates were stored in the database @@ -177,17 +189,17 @@ def test_edit(self): # Edit again, changing the joint sessions and clearing some fields. The behaviour of # edit is different depending on whether previous joint sessions were recorded. empty_outbox() - post_data = {'num_session':'2', - 'attendees':attendees, - 'constraint_chair_conflict':'', - 'comments':'need lights', + post_data = {'num_session': '2', + 'attendees': attendees, + 'constraint_chair_conflict': '', + 'comments': 'need lights', 'joint_with_groups': group2.acronym, 'joint_for_session': '1', 'session_set-TOTAL_FORMS': '3', # matches what view actually sends, even with only 2 filled in 'session_set-INITIAL_FORMS': '2', 'session_set-MIN_NUM_FORMS': '1', 'session_set-MAX_NUM_FORMS': '3', - 'session_set-0-id':sessions[0].pk, + 'session_set-0-id': sessions[0].pk, 'session_set-0-name': sessions[0].name, 'session_set-0-short': sessions[0].short, 'session_set-0-purpose': sessions[0].purpose_id, @@ -224,7 +236,7 @@ def test_edit(self): self.assertRedirects(r, redirect_url) # Check whether updates were stored in the database - sessions = Session.objects.filter(meeting=meeting, group=mars) + sessions = Session.objects.filter(meeting=meeting, group=mars).order_by("id") self.assertEqual(len(sessions), 2) session = sessions[0] self.assertFalse(session.constraints().filter(name='time_relation')) @@ -243,7 +255,6 @@ def test_edit(self): r = self.client.get(redirect_url) self.assertContains(r, 'First session with: {}'.format(group2.acronym)) - def test_edit_constraint_bethere(self): meeting = MeetingFactory(type_id='ietf', date=date_today()) mars = RoleFactory(name_id='chair', person__user__username='marschairman', group__acronym='mars').group @@ -255,7 +266,7 @@ def test_edit_constraint_bethere(self): name_id='bethere', ) self.assertEqual(session.people_constraints.count(), 1) - url = reverse('ietf.secr.sreq.views.edit', kwargs=dict(acronym='mars')) + url = reverse('ietf.meeting.views_session_request.edit_request', kwargs=dict(acronym='mars')) self.client.login(username='marschairman', password='marschairman+password') attendees = '10' ad = Person.objects.get(user__username='ad') @@ -263,8 +274,8 @@ def test_edit_constraint_bethere(self): 'num_session': '1', 'attendees': attendees, 'bethere': str(ad.pk), - 'constraint_chair_conflict':'', - 'comments':'', + 'constraint_chair_conflict': '', + 'comments': '', 'joint_with_groups': '', 'joint_for_session': '', 'delete_conflict': 'on', @@ -272,7 +283,7 @@ def test_edit_constraint_bethere(self): 'session_set-INITIAL_FORMS': '1', 'session_set-MIN_NUM_FORMS': '1', 'session_set-MAX_NUM_FORMS': '3', - 'session_set-0-id':session.pk, + 'session_set-0-id': session.pk, 'session_set-0-name': session.name, 'session_set-0-short': session.short, 'session_set-0-purpose': session.purpose_id, @@ -286,8 +297,8 @@ def test_edit_constraint_bethere(self): 'session_set-1-id': '', 'session_set-1-name': '', 'session_set-1-short': '', - 'session_set-1-purpose':'regular', - 'session_set-1-type':'regular', + 'session_set-1-purpose': 'regular', + 'session_set-1-type': 'regular', 'session_set-1-requested_duration': '', 'session_set-1-on_agenda': 'True', 'session_set-1-attendees': attendees, @@ -306,7 +317,7 @@ def test_edit_constraint_bethere(self): 'submit': 'Save', } r = self.client.post(url, post_data, HTTP_HOST='example.com') - redirect_url = reverse('ietf.secr.sreq.views.view', kwargs={'acronym': 'mars'}) + redirect_url = reverse('ietf.meeting.views_session_request.view_request', kwargs={'acronym': 'mars'}) self.assertRedirects(r, redirect_url) self.assertEqual([pc.person for pc in session.people_constraints.all()], [ad]) @@ -323,7 +334,7 @@ def test_edit_inactive_conflicts(self): target=other_group, ) - url = reverse('ietf.secr.sreq.views.edit', kwargs=dict(acronym='mars')) + url = reverse('ietf.meeting.views_session_request.edit_request', kwargs=dict(acronym='mars')) self.client.login(username='marschairman', password='marschairman+password') r = self.client.get(url) self.assertEqual(r.status_code, 200) @@ -333,17 +344,17 @@ def test_edit_inactive_conflicts(self): found = q('input#id_delete_conflict[type="checkbox"]') self.assertEqual(len(found), 1) delete_checkbox = found[0] - # check that the label on the checkbox is correct - self.assertIn('Delete this conflict', delete_checkbox.tail) + self.assertIn('Delete this conflict', delete_checkbox.label.text) # check that the target is displayed correctly in the UI - self.assertIn(other_group.acronym, delete_checkbox.find('../input[@type="text"]').value) + row = found.parent().parent() + self.assertIn(other_group.acronym, row.find('input[@type="text"]').val()) attendees = '10' post_data = { 'num_session': '1', 'attendees': attendees, - 'constraint_chair_conflict':'', - 'comments':'', + 'constraint_chair_conflict': '', + 'comments': '', 'joint_with_groups': '', 'joint_for_session': '', 'delete_conflict': 'on', @@ -351,7 +362,7 @@ def test_edit_inactive_conflicts(self): 'session_set-INITIAL_FORMS': '1', 'session_set-MIN_NUM_FORMS': '1', 'session_set-MAX_NUM_FORMS': '3', - 'session_set-0-id':session.pk, + 'session_set-0-id': session.pk, 'session_set-0-name': session.name, 'session_set-0-short': session.short, 'session_set-0-purpose': session.purpose_id, @@ -365,28 +376,28 @@ def test_edit_inactive_conflicts(self): 'submit': 'Save', } r = self.client.post(url, post_data, HTTP_HOST='example.com') - redirect_url = reverse('ietf.secr.sreq.views.view', kwargs={'acronym': 'mars'}) + redirect_url = reverse('ietf.meeting.views_session_request.view_request', kwargs={'acronym': 'mars'}) self.assertRedirects(r, redirect_url) self.assertEqual(len(mars.constraint_source_set.filter(name_id='conflict')), 0) def test_tool_status(self): MeetingFactory(type_id='ietf', date=date_today()) - url = reverse('ietf.secr.sreq.views.tool_status') + url = reverse('ietf.meeting.views_session_request.status') self.client.login(username="secretary", password="secretary+password") r = self.client.get(url) self.assertEqual(r.status_code, 200) - r = self.client.post(url, {'message':'locked', 'submit':'Lock'}) - self.assertRedirects(r,reverse('ietf.secr.sreq.views.main')) + r = self.client.post(url, {'message': 'locked', 'submit': 'Lock'}) + self.assertRedirects(r, reverse('ietf.meeting.views_session_request.list_view')) def test_new_req_constraint_types(self): """Configurable constraint types should be handled correctly in a new request - Relies on SessionForm representing constraint values with element IDs + Relies on SessionRequestForm representing constraint values with element IDs like id_constraint_ """ meeting = MeetingFactory(type_id='ietf', date=date_today()) RoleFactory(name_id='chair', person__user__username='marschairman', group__acronym='mars') - url = reverse('ietf.secr.sreq.views.new', kwargs=dict(acronym='mars')) + url = reverse('ietf.meeting.views_session_request.new_request', kwargs=dict(acronym='mars')) self.client.login(username="marschairman", password="marschairman+password") for expected in [ @@ -414,7 +425,7 @@ def test_edit_req_constraint_types(self): add_to_schedule=False) RoleFactory(name_id='chair', person__user__username='marschairman', group__acronym='mars') - url = reverse('ietf.secr.sreq.views.edit', kwargs=dict(acronym='mars')) + url = reverse('ietf.meeting.views_session_request.edit_request', kwargs=dict(acronym='mars')) self.client.login(username='marschairman', password='marschairman+password') for expected in [ @@ -433,6 +444,7 @@ def test_edit_req_constraint_types(self): ['id_constraint_{}'.format(conf_name) for conf_name in expected], ) + class SubmitRequestCase(TestCase): def setUp(self): super(SubmitRequestCase, self).setUp() @@ -449,15 +461,15 @@ def test_submit_request(self): group3 = GroupFactory(parent=area) group4 = GroupFactory(parent=area) session_count_before = Session.objects.filter(meeting=meeting, group=group).count() - url = reverse('ietf.secr.sreq.views.new',kwargs={'acronym':group.acronym}) - confirm_url = reverse('ietf.secr.sreq.views.confirm',kwargs={'acronym':group.acronym}) - main_url = reverse('ietf.secr.sreq.views.main') + url = reverse('ietf.meeting.views_session_request.new_request', kwargs={'acronym': group.acronym}) + confirm_url = reverse('ietf.meeting.views_session_request.confirm', kwargs={'acronym': group.acronym}) + main_url = reverse('ietf.meeting.views_session_request.list_view') attendees = '10' comments = 'need projector' - post_data = {'num_session':'1', - 'attendees':attendees, - 'constraint_chair_conflict':'', - 'comments':comments, + post_data = {'num_session': '1', + 'attendees': attendees, + 'constraint_chair_conflict': '', + 'comments': comments, 'adjacent_with_wg': group2.acronym, 'timeranges': ['thursday-afternoon-early', 'thursday-afternoon-late'], 'joint_with_groups': group3.acronym + ' ' + group4.acronym, @@ -479,7 +491,7 @@ def test_submit_request(self): 'session_set-0-DELETE': '', 'submit': 'Continue'} self.client.login(username="secretary", password="secretary+password") - r = self.client.post(url,post_data) + r = self.client.post(url, post_data) self.assertEqual(r.status_code, 200) # Verify the contents of the confirm view @@ -488,13 +500,13 @@ def test_submit_request(self): self.assertContains(r, 'First session with: {} {}'.format(group3.acronym, group4.acronym)) post_data['submit'] = 'Submit' - r = self.client.post(confirm_url,post_data) + r = self.client.post(confirm_url, post_data) self.assertRedirects(r, main_url) session_count_after = Session.objects.filter(meeting=meeting, group=group, type='regular').count() self.assertEqual(session_count_after, session_count_before + 1) # test that second confirm does not add sessions - r = self.client.post(confirm_url,post_data) + r = self.client.post(confirm_url, post_data) self.assertRedirects(r, main_url) session_count_after = Session.objects.filter(meeting=meeting, group=group, type='regular').count() self.assertEqual(session_count_after, session_count_before + 1) @@ -508,42 +520,6 @@ def test_submit_request(self): ) self.assertEqual(set(list(session.joint_with_groups.all())), set([group3, group4])) - def test_submit_request_invalid(self): - MeetingFactory(type_id='ietf', date=date_today()) - ad = Person.objects.get(user__username='ad') - area = RoleFactory(name_id='ad', person=ad, group__type_id='area').group - group = GroupFactory(parent=area) - url = reverse('ietf.secr.sreq.views.new',kwargs={'acronym':group.acronym}) - attendees = '10' - comments = 'need projector' - post_data = { - 'num_session':'2', - 'attendees':attendees, - 'constraint_chair_conflict':'', - 'comments':comments, - 'session_set-TOTAL_FORMS': '1', - 'session_set-INITIAL_FORMS': '1', - 'session_set-MIN_NUM_FORMS': '1', - 'session_set-MAX_NUM_FORMS': '3', - # no 'session_set-0-id' to create a new session - 'session_set-0-name': '', - 'session_set-0-short': '', - 'session_set-0-purpose': 'regular', - 'session_set-0-type': 'regular', - 'session_set-0-requested_duration': '3600', - 'session_set-0-on_agenda': True, - 'session_set-0-remote_instructions': '', - 'session_set-0-attendees': attendees, - 'session_set-0-comments': comments, - 'session_set-0-DELETE': '', - } - self.client.login(username="secretary", password="secretary+password") - r = self.client.post(url,post_data) - self.assertEqual(r.status_code, 200) - q = PyQuery(r.content) - self.assertEqual(len(q('#session-request-form')),1) - self.assertContains(r, 'Must provide data for all sessions') - def test_submit_request_check_constraints(self): m1 = MeetingFactory(type_id='ietf', date=date_today() - datetime.timedelta(days=100)) MeetingFactory(type_id='ietf', date=date_today(), @@ -570,7 +546,7 @@ def test_submit_request_check_constraints(self): self.client.login(username="secretary", password="secretary+password") - url = reverse('ietf.secr.sreq.views.new',kwargs={'acronym':group.acronym}) + url = reverse('ietf.meeting.views_session_request.new_request', kwargs={'acronym': group.acronym}) r = self.client.get(url + '?previous') self.assertEqual(r.status_code, 200) q = PyQuery(r.content) @@ -580,11 +556,11 @@ def test_submit_request_check_constraints(self): attendees = '10' comments = 'need projector' - post_data = {'num_session':'1', - 'attendees':attendees, + post_data = {'num_session': '1', + 'attendees': attendees, 'constraint_chair_conflict': group.acronym, - 'comments':comments, - 'session_set-TOTAL_FORMS': '1', + 'comments': comments, + 'session_set-TOTAL_FORMS': '3', 'session_set-INITIAL_FORMS': '1', 'session_set-MIN_NUM_FORMS': '1', 'session_set-MAX_NUM_FORMS': '3', @@ -599,11 +575,31 @@ def test_submit_request_check_constraints(self): 'session_set-0-attendees': attendees, 'session_set-0-comments': comments, 'session_set-0-DELETE': '', + 'session_set-1-name': '', + 'session_set-1-short': '', + 'session_set-1-purpose': session.purpose_id, + 'session_set-1-type': session.type_id, + 'session_set-1-requested_duration': '', + 'session_set-1-on_agenda': session.on_agenda, + 'session_set-1-remote_instructions': '', + 'session_set-1-attendees': attendees, + 'session_set-1-comments': '', + 'session_set-1-DELETE': 'on', + 'session_set-2-name': '', + 'session_set-2-short': '', + 'session_set-2-purpose': session.purpose_id, + 'session_set-2-type': session.type_id, + 'session_set-2-requested_duration': '', + 'session_set-2-on_agenda': session.on_agenda, + 'session_set-2-remote_instructions': '', + 'session_set-2-attendees': attendees, + 'session_set-2-comments': '', + 'session_set-2-DELETE': 'on', 'submit': 'Continue'} - r = self.client.post(url,post_data) + r = self.client.post(url, post_data) self.assertEqual(r.status_code, 200) q = PyQuery(r.content) - self.assertEqual(len(q('#session-request-form')),1) + self.assertEqual(len(q('#session-request-form')), 1) self.assertContains(r, "Cannot declare a conflict with the same group") def test_request_notification(self): @@ -618,18 +614,18 @@ def test_request_notification(self): RoleFactory(name_id='chair', group=group, person__user__username='ameschairman') resource = ResourceAssociation.objects.create(name_id='project') # Bit of a test data hack - the fixture now has no used resources to pick from - resource.name.used=True + resource.name.used = True resource.name.save() - url = reverse('ietf.secr.sreq.views.new',kwargs={'acronym':group.acronym}) - confirm_url = reverse('ietf.secr.sreq.views.confirm',kwargs={'acronym':group.acronym}) + url = reverse('ietf.meeting.views_session_request.new_request', kwargs={'acronym': group.acronym}) + confirm_url = reverse('ietf.meeting.views_session_request.confirm', kwargs={'acronym': group.acronym}) len_before = len(outbox) attendees = '10' - post_data = {'num_session':'2', - 'attendees':attendees, - 'bethere':str(ad.pk), - 'constraint_chair_conflict':group4.acronym, - 'comments':'', + post_data = {'num_session': '2', + 'attendees': attendees, + 'bethere': str(ad.pk), + 'constraint_chair_conflict': group4.acronym, + 'comments': '', 'resources': resource.pk, 'session_time_relation': 'subsequent-days', 'adjacent_with_wg': group2.acronym, @@ -665,23 +661,23 @@ def test_request_notification(self): 'submit': 'Continue'} self.client.login(username="ameschairman", password="ameschairman+password") # submit - r = self.client.post(url,post_data) + r = self.client.post(url, post_data) self.assertEqual(r.status_code, 200) q = PyQuery(r.content) self.assertTrue('Confirm' in str(q("title")), r.context['form'].errors) # confirm post_data['submit'] = 'Submit' - r = self.client.post(confirm_url,post_data) - self.assertRedirects(r, reverse('ietf.secr.sreq.views.main')) - self.assertEqual(len(outbox),len_before+1) + r = self.client.post(confirm_url, post_data) + self.assertRedirects(r, reverse('ietf.meeting.views_session_request.list_view')) + self.assertEqual(len(outbox), len_before + 1) notification = outbox[-1] notification_payload = get_payload_text(notification) - sessions = Session.objects.filter(meeting=meeting,group=group) + sessions = Session.objects.filter(meeting=meeting, group=group) self.assertEqual(len(sessions), 2) session = sessions[0] - self.assertEqual(session.resources.count(),1) - self.assertEqual(session.people_constraints.count(),1) + self.assertEqual(session.resources.count(), 1) + self.assertEqual(session.people_constraints.count(), 1) self.assertEqual(session.constraints().get(name='time_relation').time_relation, 'subsequent-days') self.assertEqual(session.constraints().get(name='wg_adjacent').target.acronym, group2.acronym) self.assertEqual( @@ -701,6 +697,33 @@ def test_request_notification(self): self.assertNotIn('1 Hour, 1 Hour, 1 Hour', notification_payload) self.assertNotIn('The third session requires your approval', notification_payload) + def test_request_notification_msg(self): + to = "" + subject = "Dummy subject" + template = "meeting/session_request_notification.txt" + header = "A new" + meeting = MeetingFactory(type_id="ietf", date=date_today()) + requester = PersonFactory(name="James O'Rourke", user__username="jimorourke") + context = {"header": header, "meeting": meeting, "requester": requester} + cc = "cc.a@example.com, cc.b@example.com" + bcc = "bcc@example.com" + + msg = send_mail( + None, + to, + None, + subject, + template, + context, + cc=cc, + bcc=bcc, + ) + self.assertEqual(requester.name, "James O'Rourke") # note ' (single quote) in the name + self.assertIn( + f"{header} meeting session request has just been submitted by {requester.name}.", + get_payload_text(msg), + ) + def test_request_notification_third_session(self): meeting = MeetingFactory(type_id='ietf', date=date_today()) ad = Person.objects.get(user__username='ad') @@ -713,19 +736,19 @@ def test_request_notification_third_session(self): RoleFactory(name_id='chair', group=group, person__user__username='ameschairman') resource = ResourceAssociation.objects.create(name_id='project') # Bit of a test data hack - the fixture now has no used resources to pick from - resource.name.used=True + resource.name.used = True resource.name.save() - url = reverse('ietf.secr.sreq.views.new',kwargs={'acronym':group.acronym}) - confirm_url = reverse('ietf.secr.sreq.views.confirm',kwargs={'acronym':group.acronym}) + url = reverse('ietf.meeting.views_session_request.new_request', kwargs={'acronym': group.acronym}) + confirm_url = reverse('ietf.meeting.views_session_request.confirm', kwargs={'acronym': group.acronym}) len_before = len(outbox) attendees = '10' - post_data = {'num_session':'2', + post_data = {'num_session': '2', 'third_session': 'true', - 'attendees':attendees, - 'bethere':str(ad.pk), - 'constraint_chair_conflict':group4.acronym, - 'comments':'', + 'attendees': attendees, + 'bethere': str(ad.pk), + 'constraint_chair_conflict': group4.acronym, + 'comments': '', 'resources': resource.pk, 'session_time_relation': 'subsequent-days', 'adjacent_with_wg': group2.acronym, @@ -772,23 +795,23 @@ def test_request_notification_third_session(self): 'submit': 'Continue'} self.client.login(username="ameschairman", password="ameschairman+password") # submit - r = self.client.post(url,post_data) + r = self.client.post(url, post_data) self.assertEqual(r.status_code, 200) q = PyQuery(r.content) self.assertTrue('Confirm' in str(q("title")), r.context['form'].errors) # confirm post_data['submit'] = 'Submit' - r = self.client.post(confirm_url,post_data) - self.assertRedirects(r, reverse('ietf.secr.sreq.views.main')) - self.assertEqual(len(outbox),len_before+1) + r = self.client.post(confirm_url, post_data) + self.assertRedirects(r, reverse('ietf.meeting.views_session_request.list_view')) + self.assertEqual(len(outbox), len_before + 1) notification = outbox[-1] notification_payload = get_payload_text(notification) - sessions = Session.objects.filter(meeting=meeting,group=group) + sessions = Session.objects.filter(meeting=meeting, group=group) self.assertEqual(len(sessions), 3) session = sessions[0] - self.assertEqual(session.resources.count(),1) - self.assertEqual(session.people_constraints.count(),1) + self.assertEqual(session.resources.count(), 1) + self.assertEqual(session.people_constraints.count(), 1) self.assertEqual(session.constraints().get(name='time_relation').time_relation, 'subsequent-days') self.assertEqual(session.constraints().get(name='wg_adjacent').target.acronym, group2.acronym) self.assertEqual( @@ -807,16 +830,17 @@ def test_request_notification_third_session(self): self.assertIn('1 Hour, 1 Hour, 1 Hour', notification_payload) self.assertIn('The third session requires your approval', notification_payload) + class LockAppTestCase(TestCase): def setUp(self): super().setUp() - self.meeting = MeetingFactory(type_id='ietf', date=date_today(),session_request_lock_message='locked') + self.meeting = MeetingFactory(type_id='ietf', date=date_today(), session_request_lock_message='locked') self.group = GroupFactory(acronym='mars') RoleFactory(name_id='chair', group=self.group, person__user__username='marschairman') - SessionFactory(group=self.group,meeting=self.meeting) + SessionFactory(group=self.group, meeting=self.meeting) def test_edit_request(self): - url = reverse('ietf.secr.sreq.views.edit',kwargs={'acronym':self.group.acronym}) + url = reverse('ietf.meeting.views_session_request.edit_request', kwargs={'acronym': self.group.acronym}) self.client.login(username="secretary", password="secretary+password") r = self.client.get(url) self.assertEqual(r.status_code, 200) @@ -828,48 +852,49 @@ def test_edit_request(self): self.assertEqual(r.status_code, 200) q = PyQuery(r.content) self.assertEqual(len(q(':disabled[name="submit"]')), 1) - + def test_view_request(self): - url = reverse('ietf.secr.sreq.views.view',kwargs={'acronym':self.group.acronym}) + url = reverse('ietf.meeting.views_session_request.view_request', kwargs={'acronym': self.group.acronym}) self.client.login(username="secretary", password="secretary+password") - r = self.client.get(url,follow=True) + r = self.client.get(url, follow=True) self.assertEqual(r.status_code, 200) q = PyQuery(r.content) self.assertEqual(len(q(':enabled[name="edit"]')), 1) # secretary can edit chair = self.group.role_set.filter(name_id='chair').first().person.user.username self.client.login(username=chair, password=f'{chair}+password') - r = self.client.get(url,follow=True) + r = self.client.get(url, follow=True) self.assertEqual(r.status_code, 200) q = PyQuery(r.content) self.assertEqual(len(q(':disabled[name="edit"]')), 1) # chair cannot edit def test_new_request(self): - url = reverse('ietf.secr.sreq.views.new',kwargs={'acronym':self.group.acronym}) - + url = reverse('ietf.meeting.views_session_request.new_request', kwargs={'acronym': self.group.acronym}) + # try as WG Chair self.client.login(username="marschairman", password="marschairman+password") r = self.client.get(url, follow=True) self.assertEqual(r.status_code, 200) q = PyQuery(r.content) - self.assertEqual(len(q('#session-request-form')),0) - + self.assertEqual(len(q('#session-request-form')), 0) + # try as Secretariat self.client.login(username="secretary", password="secretary+password") - r = self.client.get(url,follow=True) + r = self.client.get(url, follow=True) self.assertEqual(r.status_code, 200) q = PyQuery(r.content) - self.assertEqual(len(q('#session-request-form')),1) - + self.assertEqual(len(q('#session-request-form')), 1) + + class NotMeetingCase(TestCase): def test_not_meeting(self): - MeetingFactory(type_id='ietf',date=date_today()) + MeetingFactory(type_id='ietf', date=date_today()) group = GroupFactory(acronym='mars') - url = reverse('ietf.secr.sreq.views.no_session',kwargs={'acronym':group.acronym}) + url = reverse('ietf.meeting.views_session_request.no_session', kwargs={'acronym': group.acronym}) self.client.login(username="secretary", password="secretary+password") empty_outbox() - r = self.client.get(url,follow=True) + r = self.client.get(url, follow=True) # If the view invoked by that get throws an exception (such as an integrity error), # the traceback from this test will talk about a TransactionManagementError and # yell about executing queries before the end of an 'atomic' block @@ -878,14 +903,15 @@ def test_not_meeting(self): self.assertEqual(r.status_code, 200) self.assertContains(r, 'A message was sent to notify not having a session') - r = self.client.get(url,follow=True) + r = self.client.get(url, follow=True) self.assertEqual(r.status_code, 200) self.assertContains(r, 'is already marked as not meeting') - self.assertEqual(len(outbox),1) + self.assertEqual(len(outbox), 1) self.assertTrue('Not having a session' in outbox[0]['Subject']) self.assertTrue('session-request@' in outbox[0]['To']) + class RetrievePreviousCase(TestCase): pass @@ -895,7 +921,7 @@ class RetrievePreviousCase(TestCase): # test access by unauthorized -class SessionFormTest(TestCase): +class SessionRequestFormTest(TestCase): def setUp(self): super().setUp() self.meeting = MeetingFactory(type_id='ietf') @@ -960,19 +986,19 @@ def setUp(self): 'session_set-2-comments': '', 'session_set-2-DELETE': '', } - + def test_valid(self): # Test with three sessions - form = SessionForm(data=self.valid_form_data, group=self.group1, meeting=self.meeting) + form = SessionRequestForm(data=self.valid_form_data, group=self.group1, meeting=self.meeting) self.assertTrue(form.is_valid()) - + # Test with two sessions self.valid_form_data.update({ 'third_session': '', 'session_set-TOTAL_FORMS': '2', 'joint_for_session': '2' }) - form = SessionForm(data=self.valid_form_data, group=self.group1, meeting=self.meeting) + form = SessionRequestForm(data=self.valid_form_data, group=self.group1, meeting=self.meeting) self.assertTrue(form.is_valid()) # Test with one session @@ -982,9 +1008,9 @@ def test_valid(self): 'joint_for_session': '1', 'session_time_relation': '', }) - form = SessionForm(data=self.valid_form_data, group=self.group1, meeting=self.meeting) + form = SessionRequestForm(data=self.valid_form_data, group=self.group1, meeting=self.meeting) self.assertTrue(form.is_valid()) - + def test_invalid_groups(self): new_form_data = { 'constraint_chair_conflict': 'doesnotexist', @@ -1003,7 +1029,7 @@ def test_valid_group_appears_in_multiple_conflicts(self): 'constraint_tech_overlap': self.group2.acronym, } self.valid_form_data.update(new_form_data) - form = SessionForm(data=self.valid_form_data, group=self.group1, meeting=self.meeting) + form = SessionRequestForm(data=self.valid_form_data, group=self.group1, meeting=self.meeting) self.assertTrue(form.is_valid()) def test_invalid_group_appears_in_multiple_conflicts(self): @@ -1062,7 +1088,7 @@ def test_invalid_joint_for_session(self): 'joint_for_session': [ 'Session 2 can not be the joint session, the session has not been requested.'] }) - + def test_invalid_missing_session_length(self): form = self._invalid_test_helper({ 'session_set-TOTAL_FORMS': '2', @@ -1102,6 +1128,6 @@ def test_invalid_missing_session_length(self): def _invalid_test_helper(self, new_form_data): form_data = dict(self.valid_form_data, **new_form_data) - form = SessionForm(data=form_data, group=self.group1, meeting=self.meeting) + form = SessionRequestForm(data=form_data, group=self.group1, meeting=self.meeting) self.assertFalse(form.is_valid()) return form diff --git a/ietf/meeting/tests_tasks.py b/ietf/meeting/tests_tasks.py new file mode 100644 index 0000000000..2c5120a39d --- /dev/null +++ b/ietf/meeting/tests_tasks.py @@ -0,0 +1,121 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +import datetime +from unittest.mock import patch, call +from ietf.utils.test_utils import TestCase +from ietf.utils.timezone import date_today +from .factories import MeetingFactory +from .tasks import ( + proceedings_content_refresh_task, + agenda_data_refresh_task, + agenda_data_refresh_all_task, +) +from .tasks import fetch_meeting_attendance_task + + +class TaskTests(TestCase): + @patch("ietf.meeting.tasks.generate_agenda_data") + def test_agenda_data_refresh_task(self, mock_generate): + agenda_data_refresh_task() + self.assertTrue(mock_generate.called) + self.assertEqual(mock_generate.call_args, call(None, force_refresh=True)) + + mock_generate.reset_mock() + mock_generate.side_effect = RuntimeError + try: + agenda_data_refresh_task() + except Exception as err: + self.fail( + f"agenda_data_refresh_task should not raise exceptions (got {repr(err)})" + ) + + @patch("ietf.meeting.tasks.agenda_data_refresh_task") + @patch("ietf.meeting.tasks.chain") + def test_agenda_data_refresh_all_task(self, mock_chain, mock_agenda_data_refresh): + # Patch the agenda_data_refresh_task task with a mock whose `.map` attribute + # converts its argument, which is expected to be an iterator, to a list + # and returns it. We'll use this to check that the expected task chain + # was set up, but we don't actually run any celery tasks. + mock_agenda_data_refresh.map.side_effect = lambda x: list(x) + + meetings = MeetingFactory.create_batch(5, type_id="ietf") + numbers = sorted(int(m.number) for m in meetings) + agenda_data_refresh_all_task(batch_size=2) + self.assertTrue(mock_chain.called) + # The lists in the call() below are the output of the lambda we patched in + # via mock_agenda_data_refresh.map.side_effect above. I.e., this tests that + # map() was called with the correct batched data. + self.assertEqual( + mock_chain.call_args, + call( + [numbers[0], numbers[1]], + [numbers[2], numbers[3]], + [numbers[4]], + ), + ) + self.assertEqual(mock_agenda_data_refresh.call_count, 0) + self.assertEqual(mock_agenda_data_refresh.map.call_count, 3) + + @patch("ietf.meeting.tasks.generate_proceedings_content") + def test_proceedings_content_refresh_task(self, mock_generate): + # Generate a couple of meetings + meeting120 = MeetingFactory(type_id="ietf", number="120") # 24 * 5 + meeting127 = MeetingFactory(type_id="ietf", number="127") # 24 * 5 + 7 + + # Times to be returned + now_utc = datetime.datetime.now(tz=datetime.UTC) + hour_00_utc = now_utc.replace(hour=0) + hour_01_utc = now_utc.replace(hour=1) + hour_07_utc = now_utc.replace(hour=7) + + # hour 00 - should call meeting with number % 24 == 0 + with patch("ietf.meeting.tasks.timezone.now", return_value=hour_00_utc): + proceedings_content_refresh_task() + self.assertEqual(mock_generate.call_count, 1) + self.assertEqual(mock_generate.call_args, call(meeting120, force_refresh=True)) + mock_generate.reset_mock() + + # hour 01 - should call no meetings + with patch("ietf.meeting.tasks.timezone.now", return_value=hour_01_utc): + proceedings_content_refresh_task() + self.assertEqual(mock_generate.call_count, 0) + + # hour 07 - should call meeting with number % 24 == 0 + with patch("ietf.meeting.tasks.timezone.now", return_value=hour_07_utc): + proceedings_content_refresh_task() + self.assertEqual(mock_generate.call_count, 1) + self.assertEqual(mock_generate.call_args, call(meeting127, force_refresh=True)) + mock_generate.reset_mock() + + # With all=True, all should be called regardless of time. Reuse hour_01_utc which called none before + with patch("ietf.meeting.tasks.timezone.now", return_value=hour_01_utc): + proceedings_content_refresh_task(all=True) + self.assertEqual(mock_generate.call_count, 2) + + @patch("ietf.meeting.tasks.fetch_attendance_from_meetings") + def test_fetch_meeting_attendance_task(self, mock_fetch_attendance): + today = date_today() + meetings = [ + MeetingFactory(type_id="ietf", date=today - datetime.timedelta(days=1)), + MeetingFactory(type_id="ietf", date=today - datetime.timedelta(days=2)), + MeetingFactory(type_id="ietf", date=today - datetime.timedelta(days=3)), + ] + data = { + "created": 1, + "updated": 2, + "deleted": 0, + "processed": 3, + } + + mock_fetch_attendance.return_value = [data, data] + + fetch_meeting_attendance_task() + self.assertEqual(mock_fetch_attendance.call_count, 1) + self.assertCountEqual(mock_fetch_attendance.call_args[0][0], meetings[0:2]) + + # test handling of RuntimeError + mock_fetch_attendance.reset_mock() + mock_fetch_attendance.side_effect = RuntimeError + fetch_meeting_attendance_task() + self.assertTrue(mock_fetch_attendance.called) + # Good enough that we got here without raising an exception diff --git a/ietf/meeting/tests_utils.py b/ietf/meeting/tests_utils.py new file mode 100644 index 0000000000..7dd8f435e1 --- /dev/null +++ b/ietf/meeting/tests_utils.py @@ -0,0 +1,309 @@ +# Copyright The IETF Trust 2025, All Rights Reserved +# -*- coding: utf-8 -*- + +import copy +import datetime +import debug # pyflakes: ignore +import json +import jsonschema +from json import JSONDecodeError +from unittest.mock import patch, Mock + +from django.http import HttpResponse, JsonResponse +from ietf.meeting.factories import MeetingFactory, RegistrationFactory, RegistrationTicketFactory +from ietf.meeting.models import Registration +from ietf.meeting.utils import ( + process_single_registration, + get_registration_data, + sync_registration_data, + fetch_attendance_from_meetings, + get_activity_stats +) +from ietf.nomcom.models import Volunteer +from ietf.nomcom.factories import NomComFactory, nomcom_kwargs_for_year +from ietf.person.factories import PersonFactory +from ietf.utils.test_utils import TestCase +from ietf.meeting.test_data import make_meeting_test_data +from ietf.doc.factories import NewRevisionDocEventFactory, DocEventFactory + + +class JsonResponseWithJson(JsonResponse): + def json(self): + return json.loads(self.content) + + +class ActivityStatsTests(TestCase): + + def test_activity_stats(self): + utc = datetime.timezone.utc + make_meeting_test_data() + sdate = datetime.date(2016,4,3) + edate = datetime.date(2016,7,14) + MeetingFactory(type_id='ietf', date=sdate, number="96") + MeetingFactory(type_id='ietf', date=edate, number="97") + + NewRevisionDocEventFactory(time=datetime.datetime(2016,4,5,12,0,0,0,tzinfo=utc)) + NewRevisionDocEventFactory(time=datetime.datetime(2016,4,6,12,0,0,0,tzinfo=utc)) + NewRevisionDocEventFactory(time=datetime.datetime(2016,4,7,12,0,0,0,tzinfo=utc)) + + NewRevisionDocEventFactory(time=datetime.datetime(2016,6,30,12,0,0,0,tzinfo=utc)) + NewRevisionDocEventFactory(time=datetime.datetime(2016,6,30,13,0,0,0,tzinfo=utc)) + + DocEventFactory(doc__std_level_id="ps", doc__type_id="rfc", type="published_rfc", time=datetime.datetime(2016,4,5,12,0,0,0,tzinfo=utc)) + DocEventFactory(doc__std_level_id="bcp", doc__type_id="rfc", type="published_rfc", time=datetime.datetime(2016,4,6,12,0,0,0,tzinfo=utc)) + DocEventFactory(doc__std_level_id="inf", doc__type_id="rfc", type="published_rfc", time=datetime.datetime(2016,4,7,12,0,0,0,tzinfo=utc)) + DocEventFactory(doc__std_level_id="exp", doc__type_id="rfc", type="published_rfc", time=datetime.datetime(2016,4,8,12,0,0,0,tzinfo=utc)) + + data = get_activity_stats(sdate, edate) + self.assertEqual(data['new_drafts_count'], len(data['new_docs'])) + self.assertEqual(data['ffw_new_count'], 2) + self.assertEqual(data['ffw_new_percent'], '40%') + rfc_count = 0 + for c in data['counts']: + rfc_count += data['counts'].get(c) + self.assertEqual(rfc_count, len(data['rfcs'])) + + +class GetRegistrationsTests(TestCase): + + @patch('ietf.meeting.utils.requests.get') + def test_get_registation_data(self, mock_get): + meeting = MeetingFactory(type_id='ietf', number='122') + person = PersonFactory() + reg_details = dict( + first_name=person.first_name(), + last_name=person.last_name(), + email=person.email().address, + affiliation='Microsoft', + country_code='US', + meeting=meeting.number, + checkedin=True, + is_nomcom_volunteer=False, + cancelled=False, + tickets=[{'attendance_type': 'onsite', 'ticket_type': 'week_pass'}], + ) + reg_data = {'objects': {person.email().address: reg_details}} + reg_data_bad = copy.deepcopy(reg_data) + del reg_data_bad['objects'][person.email().address]['email'] + response1 = HttpResponse('Invalid apikey', status=403) + response2 = JsonResponseWithJson(reg_data) + response3 = Mock() + response3.status_code = 200 + response3.json.side_effect = JSONDecodeError("Expecting value", doc="", pos=0) + response4 = JsonResponseWithJson(reg_data_bad) + mock_get.side_effect = [response1, response2, response3, response4] + # test status 403 + with self.assertRaises(Exception): + get_registration_data(meeting) + # test status 200 good + returned_data = get_registration_data(meeting) + self.assertEqual(returned_data, reg_data) + # test decode error + with self.assertRaises(ValueError): + get_registration_data(meeting) + # test validation error + with self.assertRaises(jsonschema.exceptions.ValidationError): + get_registration_data(meeting) + + @patch('ietf.meeting.utils.get_registration_data') + def test_sync_registation_data(self, mock_get): + meeting = MeetingFactory(type_id='ietf', number='122') + person1 = PersonFactory() + person2 = PersonFactory() + items = [] + for person in [person1, person2]: + items.append(dict( + first_name=person.first_name(), + last_name=person.last_name(), + email=person.email().address, + affiliation='Microsoft', + country_code='US', + meeting=meeting.number, + checkedin=True, + is_nomcom_volunteer=False, + cancelled=False, + tickets=[{'attendance_type': 'onsite', 'ticket_type': 'week_pass'}], + )) + reg_data = {'objects': {items[0]['email']: items[0], items[1]['email']: items[1]}} + mock_get.return_value = reg_data + self.assertEqual(Registration.objects.filter(meeting=meeting).count(), 0) + stats = sync_registration_data(meeting) + self.assertEqual(Registration.objects.filter(meeting=meeting).count(), 2) + self.assertEqual(stats['created'], 2) + # test idempotent + stats = sync_registration_data(meeting) + self.assertEqual(Registration.objects.filter(meeting=meeting).count(), 2) + self.assertEqual(stats['created'], 0) + # test delete cancelled registration + del reg_data['objects'][items[1]['email']] + stats = sync_registration_data(meeting) + self.assertEqual(Registration.objects.filter(meeting=meeting).count(), 1) + self.assertEqual(stats['deleted'], 1) + + def test_process_single_registration(self): + # test new registration + meeting = MeetingFactory(type_id='ietf', number='122') + person = PersonFactory() + reg_data = dict( + first_name=person.first_name(), + last_name=person.last_name(), + email=person.email().address, + affiliation='Microsoft', + country_code='US', + meeting=meeting.number, + checkedin=True, + is_nomcom_volunteer=False, + cancelled=False, + tickets=[{'attendance_type': 'onsite', 'ticket_type': 'week_pass'}], + ) + self.assertEqual(meeting.registration_set.count(), 0) + new_reg, action = process_single_registration(reg_data, meeting) + self.assertEqual(meeting.registration_set.count(), 1) + reg = meeting.registration_set.first() + self.assertEqual(new_reg, reg) + self.assertEqual(action, 'created') + self.assertEqual(reg.first_name, person.first_name()) + self.assertEqual(reg.last_name, person.last_name()) + self.assertEqual(reg.email, person.email().address) + self.assertEqual(reg.affiliation, 'Microsoft') + self.assertEqual(reg.meeting, meeting) + self.assertEqual(reg.checkedin, True) + self.assertEqual(reg.tickets.count(), 1) + ticket = reg.tickets.first() + self.assertEqual(ticket.attendance_type.slug, 'onsite') + self.assertEqual(ticket.ticket_type.slug, 'week_pass') + + # test no change + new_reg, action = process_single_registration(reg_data, meeting) + self.assertEqual(meeting.registration_set.count(), 1) + reg = meeting.registration_set.first() + self.assertEqual(new_reg, reg) + self.assertEqual(action, None) + + # test update fields + reg_data['affiliation'] = 'Cisco' + new_reg, action = process_single_registration(reg_data, meeting) + self.assertEqual(meeting.registration_set.count(), 1) + reg = meeting.registration_set.first() + self.assertEqual(new_reg, reg) + self.assertEqual(action, 'updated') + self.assertEqual(reg.affiliation, 'Cisco') + + # test update tickets + reg_data['tickets'] = [{'attendance_type': 'remote', 'ticket_type': 'week_pass'}] + new_reg, action = process_single_registration(reg_data, meeting) + self.assertEqual(meeting.registration_set.count(), 1) + reg = meeting.registration_set.first() + self.assertEqual(new_reg, reg) + self.assertEqual(action, 'updated') + self.assertEqual(reg.tickets.count(), 1) + ticket = reg.tickets.first() + self.assertEqual(ticket.attendance_type.slug, 'remote') + + # test tickets, two of same + reg_data['tickets'] = [ + {'attendance_type': 'onsite', 'ticket_type': 'one_day'}, + {'attendance_type': 'onsite', 'ticket_type': 'one_day'}, + {'attendance_type': 'remote', 'ticket_type': 'week_pass'}, + ] + new_reg, action = process_single_registration(reg_data, meeting) + self.assertEqual(meeting.registration_set.count(), 1) + reg = meeting.registration_set.first() + self.assertEqual(new_reg, reg) + self.assertEqual(action, 'updated') + self.assertEqual(reg.tickets.count(), 3) + self.assertEqual(reg.tickets.filter(attendance_type__slug='onsite', ticket_type__slug='one_day').count(), 2) + self.assertEqual(reg.tickets.filter(attendance_type__slug='remote', ticket_type__slug='week_pass').count(), 1) + + # test tickets, two of same, delete one + reg_data['tickets'] = [ + {'attendance_type': 'onsite', 'ticket_type': 'one_day'}, + {'attendance_type': 'remote', 'ticket_type': 'week_pass'}, + ] + new_reg, action = process_single_registration(reg_data, meeting) + self.assertEqual(meeting.registration_set.count(), 1) + reg = meeting.registration_set.first() + self.assertEqual(new_reg, reg) + self.assertEqual(action, 'updated') + self.assertEqual(reg.tickets.count(), 2) + self.assertEqual(reg.tickets.filter(attendance_type__slug='onsite', ticket_type__slug='one_day').count(), 1) + self.assertEqual(reg.tickets.filter(attendance_type__slug='remote', ticket_type__slug='week_pass').count(), 1) + + def test_process_single_registration_nomcom(self): + '''Test that Volunteer is created if is_nomcom_volunteer=True''' + meeting = MeetingFactory(type_id='ietf', number='122') + person = PersonFactory() + reg_data = dict( + first_name=person.first_name(), + last_name=person.last_name(), + email=person.email().address, + affiliation='Microsoft', + country_code='US', + meeting=meeting.number, + checkedin=True, + is_nomcom_volunteer=True, + cancelled=False, + tickets=[{'attendance_type': 'onsite', 'ticket_type': 'week_pass'}], + ) + now = datetime.datetime.now() + if now.month > 10: + year = now.year + 1 + else: + year = now.year + # create appropriate group and nomcom objects + nomcom = NomComFactory.create(is_accepting_volunteers=True, **nomcom_kwargs_for_year(year)) + # assert no Volunteers exists + self.assertEqual(Volunteer.objects.count(), 0) + new_reg, action = process_single_registration(reg_data, meeting) + self.assertEqual(action, 'created') + # assert Volunteer exists + self.assertEqual(Volunteer.objects.count(), 1) + volunteer = Volunteer.objects.last() + self.assertEqual(volunteer.person, person) + self.assertEqual(volunteer.nomcom, nomcom) + self.assertEqual(volunteer.origin, 'registration') + + def test_process_single_registration_cancelled(self): + # test cancelled registration, one of two tickets + meeting = MeetingFactory(type_id='ietf', number='122') + person = PersonFactory() + reg = RegistrationFactory(meeting=meeting, person=person, checkedin=False, with_ticket={'attendance_type_id': 'onsite'}) + RegistrationTicketFactory(registration=reg, attendance_type_id='remote', ticket_type_id='week_pass') + reg_data = dict( + first_name=person.first_name(), + last_name=person.last_name(), + email=person.email().address, + affiliation='Microsoft', + country_code='US', + meeting=meeting.number, + checkedin=False, + is_nomcom_volunteer=False, + cancelled=True, + tickets=[{'attendance_type': 'onsite', 'ticket_type': 'week_pass'}], + ) + self.assertEqual(meeting.registration_set.count(), 1) + self.assertEqual(reg.tickets.count(), 2) + new_reg, action = process_single_registration(reg_data, meeting) + self.assertEqual((new_reg, action), (None, 'deleted')) + self.assertEqual(meeting.registration_set.count(), 1) + self.assertEqual(reg.tickets.count(), 1) + self.assertTrue(reg.tickets.filter(attendance_type__slug='remote').exists()) + # test cancelled registration, last ticket + reg_data['tickets'][0]['attendance_type'] = 'remote' + new_reg, action = process_single_registration(reg_data, meeting) + self.assertEqual((new_reg, action), (None, 'deleted')) + self.assertEqual(meeting.registration_set.count(), 0) + + @patch("ietf.meeting.utils.sync_registration_data") + def test_fetch_attendance_from_meetings(self, mock_sync_reg_data): + mock_meetings = [object(), object(), object()] + d1 = dict(created=1, updated=2, deleted=0, processed=3) + d2 = dict(created=2, updated=2, deleted=0, processed=4) + d3 = dict(created=1, updated=4, deleted=1, processed=5) + mock_sync_reg_data.side_effect = (d1, d2, d3) + stats = fetch_attendance_from_meetings(mock_meetings) + self.assertEqual( + [mock_sync_reg_data.call_args_list[n][0][0] for n in range(3)], + mock_meetings, + ) + self.assertEqual(stats, [d1, d2, d3]) diff --git a/ietf/meeting/tests_views.py b/ietf/meeting/tests_views.py index d6bbf291fd..17988e50be 100644 --- a/ietf/meeting/tests_views.py +++ b/ietf/meeting/tests_views.py @@ -1,4 +1,4 @@ -# Copyright The IETF Trust 2009-2020, All Rights Reserved +# Copyright The IETF Trust 2009-2025, All Rights Reserved # -*- coding: utf-8 -*- import datetime import io @@ -12,9 +12,10 @@ import requests_mock from unittest import skipIf -from mock import patch, PropertyMock +from unittest.mock import call, patch, PropertyMock from pyquery import PyQuery from lxml.etree import tostring +from icalendar import Calendar from io import StringIO, BytesIO from bs4 import BeautifulSoup from urllib.parse import urlparse, urlsplit @@ -26,44 +27,51 @@ from django.urls import reverse as urlreverse from django.conf import settings from django.contrib.auth.models import User +from django.core.serializers.json import DjangoJSONEncoder from django.test import Client, override_settings from django.db.models import F, Max from django.http import QueryDict, FileResponse from django.template import Context, Template from django.utils import timezone +from django.utils.html import escape +from django.utils.safestring import mark_safe from django.utils.text import slugify import debug # pyflakes:ignore from ietf.doc.models import Document, NewRevisionDocEvent +from ietf.doc.storage_utils import exists_in_storage, remove_from_storage, retrieve_bytes, retrieve_str from ietf.group.models import Group, Role, GroupFeatures from ietf.group.utils import can_manage_group from ietf.person.models import Person -from ietf.meeting.helpers import can_approve_interim_request, can_view_interim_request, preprocess_assignments_for_agenda +from ietf.meeting.helpers import can_approve_interim_request, can_request_interim_meeting, can_view_interim_request, preprocess_assignments_for_agenda from ietf.meeting.helpers import send_interim_approval_request, AgendaKeywordTagger from ietf.meeting.helpers import send_interim_meeting_cancellation_notice, send_interim_session_cancellation_notice from ietf.meeting.helpers import send_interim_minutes_reminder, populate_important_dates, update_important_dates from ietf.meeting.models import Session, TimeSlot, Meeting, SchedTimeSessAssignment, Schedule, SessionPresentation, SlideSubmission, SchedulingEvent, Room, Constraint, ConstraintName from ietf.meeting.test_data import make_meeting_test_data, make_interim_meeting, make_interim_test_data -from ietf.meeting.utils import finalize, condition_slide_order +from ietf.meeting.utils import ( + condition_slide_order, + generate_proceedings_content, + diff_meeting_schedules, +) from ietf.meeting.utils import add_event_info_to_session_qs, participants_for_meeting -from ietf.meeting.utils import create_recording, get_next_sequence +from ietf.meeting.utils import create_recording, delete_recording, get_next_sequence, bluesheet_data from ietf.meeting.views import session_draft_list, parse_agenda_filter_params, sessions_post_save, agenda_extract_schedule -from ietf.meeting.views import get_summary_by_area, get_summary_by_type, get_summary_by_purpose +from ietf.meeting.views import get_summary_by_area, get_summary_by_type, get_summary_by_purpose, generate_agenda_data from ietf.name.models import SessionStatusName, ImportantDateName, RoleName, ProceedingsMaterialTypeName -from ietf.utils.decorators import skip_coverage from ietf.utils.mail import outbox, empty_outbox, get_payload_text +from ietf.utils.test_runner import TestBlobstoreManager, disable_coverage from ietf.utils.test_utils import TestCase, login_testing_unauthorized, unicontent from ietf.utils.timezone import date_today, time_now -from ietf.person.factories import PersonFactory +from ietf.person.factories import PersonFactory, PersonalApiKeyFactory from ietf.group.factories import GroupFactory, GroupEventFactory, RoleFactory from ietf.meeting.factories import (SessionFactory, ScheduleFactory, SessionPresentationFactory, MeetingFactory, FloorPlanFactory, TimeSlotFactory, SlideSubmissionFactory, RoomFactory, ConstraintFactory, MeetingHostFactory, ProceedingsMaterialFactory, - AttendedFactory) -from ietf.stats.factories import MeetingRegistrationFactory + AttendedFactory, RegistrationFactory) from ietf.doc.factories import DocumentFactory, WgDraftFactory from ietf.submit.tests import submission_file from ietf.utils.test_utils import assert_ical_response_is_valid @@ -110,7 +118,7 @@ def setUp(self): # files will upload to the locations specified in settings.py. # Note that this will affect any use of the storage class in # meeting.models - i.e., FloorPlan.image and MeetingHost.logo - self.patcher = patch('ietf.meeting.models.NoLocationMigrationFileSystemStorage.base_location', + self.patcher = patch('ietf.meeting.models.BlobShadowFileSystemStorage.base_location', new_callable=PropertyMock) mocked = self.patcher.start() mocked.return_value = self.storage_dir @@ -125,8 +133,12 @@ def tearDown(self): settings.MEETINGHOST_LOGO_PATH = self.saved_meetinghost_logo_path super().tearDown() - def write_materials_file(self, meeting, doc, content, charset="utf-8"): - path = os.path.join(self.materials_dir, "%s/%s/%s" % (meeting.number, doc.type_id, doc.uploaded_filename)) + def write_materials_file(self, meeting, doc, content, charset="utf-8", with_ext=None): + if with_ext is None: + filename = doc.uploaded_filename + else: + filename = Path(doc.uploaded_filename).with_suffix(with_ext) + path = os.path.join(self.materials_dir, "%s/%s/%s" % (meeting.number, doc.type_id, filename)) dirname = os.path.dirname(path) if not os.path.exists(dirname): @@ -225,6 +237,7 @@ def test_meeting_agenda(self): session.save() slot = TimeSlot.objects.get(sessionassignments__session=session,sessionassignments__schedule=meeting.schedule) meeting.timeslot_set.filter(type_id="break").update(show_location=False) + meeting.importantdate_set.create(name_id='prelimagenda',date=date_today() + datetime.timedelta(days=20)) # self.write_materials_files(meeting, session) # @@ -241,30 +254,35 @@ def test_meeting_agenda(self): # Agenda API tests # -> Meeting data - r = self.client.get(urlreverse("ietf.meeting.views.api_get_agenda_data", kwargs=dict(num=meeting.number))) - self.assertEqual(r.status_code, 200) - rjson = json.loads(r.content.decode("utf8")) - self.assertJSONEqual( - r.content.decode("utf8"), + # First, check that the generation function does the right thing + generated_data = generate_agenda_data(meeting.number) + self.assertEqual( + generated_data, { "meeting": { "number": meeting.number, "city": meeting.city, "startDate": meeting.date.isoformat(), "endDate": meeting.end_date().isoformat(), - "updated": rjson.get("meeting").get("updated"), # Just expect the value to exist + "updated": generated_data.get("meeting").get("updated"), # Just expect the value to exist "timezone": meeting.time_zone, "infoNote": meeting.agenda_info_note, - "warningNote": meeting.agenda_warning_note + "warningNote": meeting.agenda_warning_note, + "prelimAgendaDate": (date_today() + datetime.timedelta(days=20)).isoformat() }, - "categories": rjson.get("categories"), # Just expect the value to exist + "categories": generated_data.get("categories"), # Just expect the value to exist "isCurrentMeeting": True, - "useNotes": True, - "schedule": rjson.get("schedule"), # Just expect the value to exist + "usesNotes": False, # make_meeting_test_data sets number=72 + "schedule": generated_data.get("schedule"), # Just expect the value to exist "floors": [] } ) - # -> Session Materials + with patch("ietf.meeting.views.generate_agenda_data", return_value=generated_data): + r = self.client.get(urlreverse("ietf.meeting.views.api_get_agenda_data", kwargs=dict(num=meeting.number))) + self.assertEqual(r.status_code, 200) + # json.dumps using the DjangoJSONEncoder to handle timestamps consistently + self.assertJSONEqual(r.content.decode("utf8"), json.dumps(generated_data, cls=DjangoJSONEncoder)) + # -> Session MaterialM r = self.client.get(urlreverse("ietf.meeting.views.api_get_session_materials", kwargs=dict(session_id=session.id))) self.assertEqual(r.status_code, 200) rjson = json.loads(r.content.decode("utf8")) @@ -294,6 +312,8 @@ def test_meeting_agenda(self): (slot.time + slot.duration).astimezone(meeting.tz()).strftime("%H%M"), )) self.assertContains(r, f"shown in the {meeting.tz()} time zone") + updated = meeting.updated().astimezone(meeting.tz()).strftime("%Y-%m-%d %H:%M:%S %Z") + self.assertContains(r, f"Updated {updated}") # text, UTC r = self.client.get(urlreverse( @@ -305,10 +325,20 @@ def test_meeting_agenda(self): self.assertContains(r, session.group.parent.acronym.upper()) self.assertContains(r, slot.location.name) self.assertContains(r, "{}-{}".format( - slot.time.astimezone(datetime.timezone.utc).strftime("%H%M"), - (slot.time + slot.duration).astimezone(datetime.timezone.utc).strftime("%H%M"), + slot.time.astimezone(datetime.UTC).strftime("%H%M"), + (slot.time + slot.duration).astimezone(datetime.UTC).strftime("%H%M"), )) self.assertContains(r, "shown in UTC") + updated = meeting.updated().astimezone(datetime.UTC).strftime("%Y-%m-%d %H:%M:%S %Z") + self.assertContains(r, f"Updated {updated}") + + # text, invalid updated (none) + with patch("ietf.meeting.models.Meeting.updated", return_value=None): + r = self.client.get(urlreverse( + "ietf.meeting.views.agenda_plain", + kwargs=dict(num=meeting.number, ext=".txt", utc="-utc"), + )) + self.assertNotContains(r, "Updated ") # future meeting, no agenda r = self.client.get(urlreverse("ietf.meeting.views.agenda_plain", kwargs=dict(num=future_meeting.number, ext=".txt"))) @@ -343,8 +373,8 @@ def test_meeting_agenda(self): self.assertContains(r, session.group.parent.acronym.upper()) self.assertContains(r, slot.location.name) self.assertContains(r, registration_text) - start_time = slot.time.astimezone(datetime.timezone.utc) - end_time = slot.end_time().astimezone(datetime.timezone.utc) + start_time = slot.time.astimezone(datetime.UTC) + end_time = slot.end_time().astimezone(datetime.UTC) self.assertContains(r, '"{}","{}","{}"'.format( start_time.strftime("%Y-%m-%d"), start_time.strftime("%H%M"), @@ -359,33 +389,134 @@ def test_meeting_agenda(self): r = self.client.get(ical_url) assert_ical_response_is_valid(self, r) - self.assertContains(r, "BEGIN:VTIMEZONE") - self.assertContains(r, "END:VTIMEZONE") - + self.assertNotEqual( + meeting.time_zone, + meeting.time_zone.lower(), + "meeting needs a mixed-case tz for this test", + ) + self.assertNotContains(r, meeting.time_zone.lower(), msg_prefix="time_zone should not be lower-cased") + self.assertNotEqual( + meeting.time_zone, + meeting.time_zone.upper(), + "meeting needs a mixed-case tz for this test", + ) + self.assertNotContains(r, meeting.time_zone.upper(), msg_prefix="time_zone should not be upper-cased") + # iCal, single group r = self.client.get(ical_url + "?show=" + session.group.parent.acronym.upper()) assert_ical_response_is_valid(self, r) self.assertContains(r, session.group.acronym) self.assertContains(r, session.group.name) - self.assertContains(r, session.remote_instructions) - self.assertContains(r, slot.location.name) - self.assertContains(r, 'https://onsite.example.com') - self.assertContains(r, 'https://meetecho.example.com') - self.assertContains(r, "BEGIN:VTIMEZONE") - self.assertContains(r, "END:VTIMEZONE") - self.assertContains(r, session.agenda().get_href()) - self.assertContains( - r, + cal = Calendar.from_ical(r.content) + events = [component for component in cal.walk() if component.name == "VEVENT"] + + self.assertEqual(len(events), 2) + self.assertIn(session.remote_instructions, events[0].get('description')) + self.assertIn("Onsite tool: https://onsite.example.com", events[0].get('description')) + self.assertIn("Meetecho: https://meetecho.example.com", events[0].get('description')) + self.assertIn(f"Agenda {session.agenda().get_href()}", events[0].get('description')) + session_materials_url = settings.IDTRACKER_BASE_URL + urlreverse( + 'ietf.meeting.views.session_details', + kwargs=dict(num=meeting.number, acronym=session.group.acronym) + ) + self.assertIn(f"Session materials: {session_materials_url}", events[0].get('description')) + self.assertIn( urlreverse( 'ietf.meeting.views.session_details', kwargs=dict(num=meeting.number, acronym=session.group.acronym)), - msg_prefix='ical should contain link to meeting materials page for session') + events[0].get('description')) + self.assertEqual( + session_materials_url, + events[0].get('url') + ) + self.assertContains(r, f"LOCATION:{slot.location.name}") + # Floor Plan r = self.client.get(urlreverse('floor-plan', kwargs=dict(num=meeting.number))) self.assertEqual(r.status_code, 200) + def test_session_recordings_via_factories(self): + session = SessionFactory(meeting__type_id="ietf", meeting__date=date_today()-datetime.timedelta(days=180), meeting__number=str(random.randint(108,150))) + self.assertEqual(session.meetecho_recording_name, "") + self.assertEqual(len(session.recordings()), 0) + url = urlreverse("ietf.meeting.views.session_details", kwargs=dict(num=session.meeting.number, acronym=session.group.acronym)) + r = self.client.get(url) + q = PyQuery(r.content) + # debug.show("q(f'#notes_and_recordings_{session.pk}')") + self.assertEqual(len(q(f"#notes_and_recordings_{session.pk} tr")), 2) + links = q(f"#notes_and_recordings_{session.pk} tr a") + self.assertEqual(len(links), 2) + self.assertEqual(links[0].attrib['href'], str(session.notes_url())) + self.assertEqual(links[1].attrib['href'], str(session.session_recording_url())) + + session.meetecho_recording_name = 'my_test_session_name' + session.save() + r = self.client.get(url) + q = PyQuery(r.content) + self.assertEqual(len(q(f"#notes_and_recordings_{session.pk} tr")), 2) + links = q(f"#notes_and_recordings_{session.pk} tr a") + self.assertEqual(len(links), 2) + self.assertEqual(links[0].attrib['href'], str(session.notes_url())) + self.assertEqual(links[1].attrib['href'], str(session.session_recording_url())) + + new_recording_url = "https://www.youtube.com/watch?v=jNQXAC9IVRw" + new_recording_title = "Me at the zoo" + create_recording(session, new_recording_url, new_recording_title) + r = self.client.get(url) + q = PyQuery(r.content) + self.assertEqual(len(q(f"#notes_and_recordings_{session.pk} tr")), 3) + links = q(f"#notes_and_recordings_{session.pk} tr a") + self.assertEqual(len(links), 3) + self.assertEqual(links[0].attrib['href'], str(session.notes_url())) + self.assertEqual(links[1].attrib['href'], new_recording_url) + self.assertIn(new_recording_title, links[1].text_content()) + self.assertEqual(links[2].attrib['href'], str(session.session_recording_url())) + #debug.show("q(f'#notes_and_recordings_{session_pk}')") + + def test_delete_recordings(self): + # No user specified, active recording state + sp = SessionPresentationFactory( + document__type_id="recording", + document__external_url="https://example.com/some-recording", + document__states=[("recording", "active")], + ) + doc = sp.document + doc.docevent_set.all().delete() # clear this out + delete_recording(sp) + self.assertFalse(SessionPresentation.objects.filter(pk=sp.pk).exists()) + self.assertEqual(doc.get_state("recording").slug, "deleted", "recording state updated") + self.assertEqual(doc.docevent_set.count(), 1, "one event added") + event = doc.docevent_set.first() + self.assertEqual(event.type, "changed_state", "event is a changed_state event") + self.assertEqual(event.by.name, "(System)", "system user is responsible") + + # Specified user, no recording state + sp = SessionPresentationFactory( + document__type_id="recording", + document__external_url="https://example.com/some-recording", + document__states=[], + ) + doc = sp.document + doc.docevent_set.all().delete() # clear this out + user = PersonFactory() # naming matches the methods - user is a Person, not a User + delete_recording(sp, user=user) + self.assertFalse(SessionPresentation.objects.filter(pk=sp.pk).exists()) + self.assertEqual(doc.get_state("recording").slug, "deleted", "recording state updated") + self.assertEqual(doc.docevent_set.count(), 1, "one event added") + event = doc.docevent_set.first() + self.assertEqual(event.type, "changed_state", "event is a changed_state event") + self.assertEqual(event.by, user, "user is responsible") + + # Document is not a recording + sp = SessionPresentationFactory( + document__type_id="draft", + document__external_url="https://example.com/some-recording", + ) + with self.assertRaises(ValueError): + delete_recording(sp) + def test_agenda_ical_next_meeting_type(self): # start with no upcoming IETF meetings, just an interim MeetingFactory( @@ -468,16 +599,16 @@ def test_materials_through_cdn(self): doc = DocumentFactory.create(name='agenda-172-mars', type_id='agenda', title="Agenda", uploaded_filename="agenda-172-mars.txt", group=session107.group, rev='00', states=[('agenda','active')]) pres = SessionPresentation.objects.create(session=session107,document=doc,rev=doc.rev) - session107.sessionpresentation_set.add(pres) # + session107.presentations.add(pres) # doc = DocumentFactory.create(name='minutes-172-mars', type_id='minutes', title="Minutes", uploaded_filename="minutes-172-mars.md", group=session107.group, rev='00', states=[('minutes','active')]) pres = SessionPresentation.objects.create(session=session107,document=doc,rev=doc.rev) - session107.sessionpresentation_set.add(pres) + session107.presentations.add(pres) doc = DocumentFactory.create(name='slides-172-mars-1-active', type_id='slides', title="Slideshow", uploaded_filename="slides-172-mars.txt", group=session107.group, rev='00', states=[('slides','active'), ('reuse_policy', 'single')]) pres = SessionPresentation.objects.create(session=session107,document=doc,rev=doc.rev) - session107.sessionpresentation_set.add(pres) + session107.presentations.add(pres) for session in ( Session.objects.filter(meeting=meeting, group__acronym="mars").first(), @@ -517,7 +648,7 @@ def test_named_session(self): group = GroupFactory() plain_session = SessionFactory(meeting=meeting, group=group) named_session = SessionFactory(meeting=meeting, group=group, name='I Got a Name') - for doc_type_id in ('agenda', 'minutes', 'bluesheets', 'slides', 'draft'): + for doc_type_id in ('agenda', 'minutes', 'slides', 'draft'): # Set up sessions materials that will have distinct URLs for each session. # This depends on settings.MEETING_DOC_HREFS and may need updating if that changes. SessionPresentationFactory( @@ -548,22 +679,22 @@ def test_named_session(self): named_row = named_label.closest('tr') self.assertTrue(named_row) - for material in (sp.document for sp in plain_session.sessionpresentation_set.all()): + for material in (sp.document for sp in plain_session.presentations.all()): if material.type_id == 'draft': expected_url = urlreverse( 'ietf.doc.views_doc.document_main', - kwargs={'name': material.canonical_name()}, + kwargs={'name': material.name}, ) else: expected_url = material.get_href(meeting) self.assertTrue(plain_row.find(f'a[href="{expected_url}"]')) self.assertFalse(named_row.find(f'a[href="{expected_url}"]')) - for material in (sp.document for sp in named_session.sessionpresentation_set.all()): + for material in (sp.document for sp in named_session.presentations.all()): if material.type_id == 'draft': expected_url = urlreverse( 'ietf.doc.views_doc.document_main', - kwargs={'name': material.canonical_name()}, + kwargs={'name': material.name}, ) else: expected_url = material.get_href(meeting) @@ -741,7 +872,56 @@ def test_materials_has_edit_links(self): ) self.assertEqual(len(q(f'a[href^="{edit_url}#session"]')), 1, f'Link to session_details page for {acro}') + def test_materials_document_extension_choice(self): + def _url(**kwargs): + return urlreverse("ietf.meeting.views.materials_document", kwargs=kwargs) + + presentation = SessionPresentationFactory( + document__rev="00", + document__name="slides-whatever", + document__uploaded_filename="slides-whatever-00.txt", + document__type_id="slides", + document__states=(("reuse_policy", "single"),) + ) + session = presentation.session + meeting = session.meeting + # This is not a realistic set of files to exist, but is useful for testing. Normally, + # we'd have _either_ txt, pdf, or pptx + pdf. + self.write_materials_file(meeting, presentation.document, "Hi I'm a txt", with_ext=".txt") + self.write_materials_file(meeting, presentation.document, "Hi I'm a pptx", with_ext=".pptx") + + # with no rev, prefers the uploaded_filename + r = self.client.get(_url(document="slides-whatever", num=meeting.number)) # no rev + self.assertEqual(r.status_code, 200) + self.assertEqual(r.content.decode(), "Hi I'm a txt") + + # with a rev, prefers pptx because it comes first alphabetically + r = self.client.get(_url(document="slides-whatever-00", num=meeting.number)) + self.assertEqual(r.status_code, 200) + self.assertEqual(r.content.decode(), "Hi I'm a pptx") + + # now create a pdf + self.write_materials_file(meeting, presentation.document, "Hi I'm a pdf", with_ext=".pdf") + # with no rev, still prefers uploaded_filename + r = self.client.get(_url(document="slides-whatever", num=meeting.number)) # no rev + self.assertEqual(r.status_code, 200) + self.assertEqual(r.content.decode(), "Hi I'm a txt") + + # pdf should be preferred with a rev + r = self.client.get(_url(document="slides-whatever-00", num=meeting.number)) + self.assertEqual(r.status_code, 200) + self.assertEqual(r.content.decode(), "Hi I'm a pdf") + + # and explicit extensions should, of course, be respected + for ext in ["pdf", "pptx", "txt"]: + r = self.client.get(_url(document="slides-whatever-00", num=meeting.number, ext=f".{ext}")) + self.assertEqual(r.status_code, 200) + self.assertEqual(r.content.decode(), f"Hi I'm a {ext}") + + # and 404 should come up if the ext is not found + r = self.client.get(_url(document="slides-whatever-00", num=meeting.number, ext=".docx")) + self.assertEqual(r.status_code, 404) def test_materials_editable_groups(self): meeting = make_meeting_test_data() @@ -859,37 +1039,59 @@ def test_important_dates_ical(self): for d in meeting.importantdate_set.all(): self.assertContains(r, d.date.isoformat()) + updated = meeting.updated() + self.assertIsNotNone(updated) + expected_updated = updated.astimezone(datetime.UTC).strftime("%Y%m%dT%H%M%SZ") + self.assertContains(r, f"DTSTAMP:{expected_updated}") + dtstamps_count = r.content.decode("utf-8").count(f"DTSTAMP:{expected_updated}") + self.assertEqual(dtstamps_count, meeting.importantdate_set.count()) + + # With default cached_updated, 1970-01-01 + with patch("ietf.meeting.models.Meeting.updated", return_value=None): + r = self.client.get(url) + for d in meeting.importantdate_set.all(): + self.assertContains(r, d.date.isoformat()) + + expected_updated = "19700101T000000Z" + self.assertContains(r, f"DTSTAMP:{expected_updated}") + dtstamps_count = r.content.decode("utf-8").count(f"DTSTAMP:{expected_updated}") + self.assertEqual(dtstamps_count, meeting.importantdate_set.count()) + def test_group_ical(self): meeting = make_meeting_test_data() s1 = Session.objects.filter(meeting=meeting, group__acronym="mars").first() a1 = s1.official_timeslotassignment() t1 = a1.timeslot + # Create an extra session t2 = TimeSlotFactory.create( meeting=meeting, - time=meeting.tz().localize( + time=pytz.utc.localize( datetime.datetime.combine(meeting.date, datetime.time(11, 30)) ) ) + s2 = SessionFactory.create(meeting=meeting, group=s1.group, add_to_schedule=False) SchedTimeSessAssignment.objects.create(timeslot=t2, session=s2, schedule=meeting.schedule) - # + url = urlreverse('ietf.meeting.views.agenda_ical', kwargs={'num':meeting.number, 'acronym':s1.group.acronym, }) r = self.client.get(url) assert_ical_response_is_valid(self, r, expected_event_summaries=['mars - Martian Special Interest Group'], expected_event_count=2) - self.assertContains(r, t1.local_start_time().strftime('%Y%m%dT%H%M%S')) - self.assertContains(r, t2.local_start_time().strftime('%Y%m%dT%H%M%S')) - # + self.assertContains(r, f"DTSTART:{t1.time.strftime('%Y%m%dT%H%M%SZ')}") + self.assertContains(r, f"DTEND:{(t1.time + t1.duration).strftime('%Y%m%dT%H%M%SZ')}") + self.assertContains(r, f"DTSTART:{t2.time.strftime('%Y%m%dT%H%M%SZ')}") + self.assertContains(r, f"DTEND:{(t2.time + t2.duration).strftime('%Y%m%dT%H%M%SZ')}") + url = urlreverse('ietf.meeting.views.agenda_ical', kwargs={'num':meeting.number, 'session_id':s1.id, }) r = self.client.get(url) assert_ical_response_is_valid(self, r, expected_event_summaries=['mars - Martian Special Interest Group'], expected_event_count=1) - self.assertContains(r, t1.local_start_time().strftime('%Y%m%dT%H%M%S')) - self.assertNotContains(r, t2.local_start_time().strftime('%Y%m%dT%H%M%S')) + self.assertContains(r, f"DTSTART:{t1.time.strftime('%Y%m%dT%H%M%SZ')}") + self.assertNotContains(r, f"DTSTART:{t2.time.strftime('%Y%m%dT%H%M%SZ')}") def test_parse_agenda_filter_params(self): def _r(show=(), hide=(), showtypes=(), hidetypes=()): @@ -955,10 +1157,10 @@ def build_session_setup(self): # but lists a different on in its agenda. The expectation is that the pdf and tgz views will return both. session = SessionFactory(group__type_id='wg',meeting__type_id='ietf') draft1 = WgDraftFactory(group=session.group) - session.sessionpresentation_set.create(document=draft1) + session.presentations.create(document=draft1) draft2 = WgDraftFactory(group=session.group) agenda = DocumentFactory(type_id='agenda',group=session.group, uploaded_filename='agenda-%s-%s' % (session.meeting.number,session.group.acronym), states=[('agenda','active')]) - session.sessionpresentation_set.create(document=agenda) + session.presentations.create(document=agenda) self.write_materials_file(session.meeting, session.materials.get(type="agenda"), "1. WG status (15 minutes)\n\n2. Status of %s\n\n" % draft2.name) filenames = [] @@ -983,8 +1185,8 @@ def test_session_draft_tarfile(self): os.unlink(filename) @skipIf(skip_pdf_tests, skip_message) - @skip_coverage - def test_session_draft_pdf(self): + @disable_coverage() + def test_session_draft_pdf(self): # pragma: no cover session, filenames = self.build_session_setup() try: url = urlreverse('ietf.meeting.views.session_draft_pdf', kwargs={'num':session.meeting.number,'acronym':session.group.acronym}) @@ -1919,8 +2121,8 @@ def test_editor_time_zone(self): # strftime() does not seem to support hours without leading 0, so do this manually time_label_string = f'{ts_start.hour:d}:{ts_start.minute:02d} - {ts_end.hour:d}:{ts_end.minute:02d}' self.assertIn(time_label_string, time_label.text()) - self.assertEqual(time_label.attr('data-start'), ts_start.astimezone(datetime.timezone.utc).isoformat()) - self.assertEqual(time_label.attr('data-end'), ts_end.astimezone(datetime.timezone.utc).isoformat()) + self.assertEqual(time_label.attr('data-start'), ts_start.astimezone(datetime.UTC).isoformat()) + self.assertEqual(time_label.attr('data-end'), ts_end.astimezone(datetime.UTC).isoformat()) ts_swap = time_label.find('.swap-timeslot-col') origin_label = ts_swap.attr('data-origin-label') @@ -1931,8 +2133,8 @@ def test_editor_time_zone(self): timeslot_elt = pq(f'#timeslot{timeslot.pk}') self.assertEqual(len(timeslot_elt), 1) - self.assertEqual(timeslot_elt.attr('data-start'), ts_start.astimezone(datetime.timezone.utc).isoformat()) - self.assertEqual(timeslot_elt.attr('data-end'), ts_end.astimezone(datetime.timezone.utc).isoformat()) + self.assertEqual(timeslot_elt.attr('data-start'), ts_start.astimezone(datetime.UTC).isoformat()) + self.assertEqual(timeslot_elt.attr('data-end'), ts_end.astimezone(datetime.UTC).isoformat()) timeslot_label = pq(f'#timeslot{timeslot.pk} .time-label') self.assertEqual(len(timeslot_label), 1) @@ -1960,7 +2162,8 @@ def create_timeslots_url(meeting): @staticmethod def create_bare_meeting(number=120) -> Meeting: """Create a basic IETF meeting""" - return MeetingFactory( + # Call create() explicitly so mypy sees the correct type + return MeetingFactory.create( type_id='ietf', number=number, date=date_today() + datetime.timedelta(days=10), @@ -3020,7 +3223,9 @@ def test_ajax_delete_timeslots_invalid(self): class ReorderSlidesTests(TestCase): - def test_add_slides_to_session(self): + @override_settings(MEETECHO_API_CONFIG="fake settings") # enough to trigger API calls + @patch("ietf.meeting.views.SlidesManager") + def test_add_slides_to_session(self, mock_slides_manager_cls): for type_id in ('ietf','interim'): chair_role = RoleFactory(name_id='chair') session = SessionFactory(group=chair_role.group, meeting__date=date_today() - datetime.timedelta(days=90), meeting__type_id=type_id) @@ -3031,6 +3236,7 @@ def test_add_slides_to_session(self): r = self.client.post(url, {'order':1, 'name':slides.name }) self.assertEqual(r.status_code, 403) self.assertIn('have permission', unicontent(r)) + self.assertFalse(mock_slides_manager_cls.called) self.client.login(username=chair_role.person.user.username, password=chair_role.person.user.username+"+password") @@ -3038,6 +3244,7 @@ def test_add_slides_to_session(self): r = self.client.post(url, {'order':0, 'name':slides.name }) self.assertEqual(r.status_code, 403) self.assertIn('materials cutoff', unicontent(r)) + self.assertFalse(mock_slides_manager_cls.called) session.meeting.date = date_today() session.meeting.save() @@ -3047,54 +3254,67 @@ def test_add_slides_to_session(self): self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],False) self.assertIn('No data',r.json()['error']) + self.assertFalse(mock_slides_manager_cls.called) r = self.client.post(url, {'garbage':'garbage'}) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],False) self.assertIn('order is not valid',r.json()['error']) + self.assertFalse(mock_slides_manager_cls.called) r = self.client.post(url, {'order':0, 'name':slides.name }) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],False) self.assertIn('order is not valid',r.json()['error']) + self.assertFalse(mock_slides_manager_cls.called) r = self.client.post(url, {'order':2, 'name':slides.name }) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],False) self.assertIn('order is not valid',r.json()['error']) + self.assertFalse(mock_slides_manager_cls.called) r = self.client.post(url, {'order':'garbage', 'name':slides.name }) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],False) self.assertIn('order is not valid',r.json()['error']) + self.assertFalse(mock_slides_manager_cls.called) # Invalid name r = self.client.post(url, {'order':1 }) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],False) self.assertIn('name is not valid',r.json()['error']) + self.assertFalse(mock_slides_manager_cls.called) r = self.client.post(url, {'order':1, 'name':'garbage' }) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],False) self.assertIn('name is not valid',r.json()['error']) + self.assertFalse(mock_slides_manager_cls.called) # Valid post r = self.client.post(url, {'order':1, 'name':slides.name }) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],True) - self.assertEqual(session.sessionpresentation_set.count(),1) + self.assertEqual(session.presentations.count(),1) + self.assertTrue(mock_slides_manager_cls.called) + self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings")) + self.assertTrue(mock_slides_manager_cls.return_value.add.called) + self.assertEqual(mock_slides_manager_cls.return_value.add.call_args, call(session=session, slides=slides, order=1)) + mock_slides_manager_cls.reset_mock() # Ignore a request to add slides that are already in a session r = self.client.post(url, {'order':1, 'name':slides.name }) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],True) - self.assertEqual(session.sessionpresentation_set.count(),1) + self.assertEqual(session.presentations.count(),1) + self.assertFalse(mock_slides_manager_cls.called) session2 = SessionFactory(group=session.group, meeting=session.meeting) SessionPresentationFactory.create_batch(3, document__type_id='slides', session=session2) - for num, sp in enumerate(session2.sessionpresentation_set.filter(document__type_id='slides'),start=1): + for num, sp in enumerate(session2.presentations.filter(document__type_id='slides'),start=1): sp.order = num sp.save() @@ -3106,24 +3326,41 @@ def test_add_slides_to_session(self): r = self.client.post(url, {'order':1, 'name':more_slides[0].name}) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],True) - self.assertEqual(session2.sessionpresentation_set.get(document=more_slides[0]).order,1) - self.assertEqual(list(session2.sessionpresentation_set.order_by('order').values_list('order',flat=True)), list(range(1,5))) + self.assertEqual(session2.presentations.get(document=more_slides[0]).order,1) + self.assertEqual(list(session2.presentations.order_by('order').values_list('order',flat=True)), list(range(1,5))) + self.assertTrue(mock_slides_manager_cls.called) + self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings")) + self.assertTrue(mock_slides_manager_cls.return_value.add.called) + self.assertEqual(mock_slides_manager_cls.return_value.add.call_args, call(session=session2, slides=more_slides[0], order=1)) + mock_slides_manager_cls.reset_mock() # Insert at end r = self.client.post(url, {'order':5, 'name':more_slides[1].name}) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],True) - self.assertEqual(session2.sessionpresentation_set.get(document=more_slides[1]).order,5) - self.assertEqual(list(session2.sessionpresentation_set.order_by('order').values_list('order',flat=True)), list(range(1,6))) + self.assertEqual(session2.presentations.get(document=more_slides[1]).order,5) + self.assertEqual(list(session2.presentations.order_by('order').values_list('order',flat=True)), list(range(1,6))) + self.assertTrue(mock_slides_manager_cls.called) + self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings")) + self.assertTrue(mock_slides_manager_cls.return_value.add.called) + self.assertEqual(mock_slides_manager_cls.return_value.add.call_args, call(session=session2, slides=more_slides[1], order=5)) + mock_slides_manager_cls.reset_mock() # Insert in middle r = self.client.post(url, {'order':3, 'name':more_slides[2].name}) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],True) - self.assertEqual(session2.sessionpresentation_set.get(document=more_slides[2]).order,3) - self.assertEqual(list(session2.sessionpresentation_set.order_by('order').values_list('order',flat=True)), list(range(1,7))) - - def test_remove_slides_from_session(self): + self.assertEqual(session2.presentations.get(document=more_slides[2]).order,3) + self.assertEqual(list(session2.presentations.order_by('order').values_list('order',flat=True)), list(range(1,7))) + self.assertTrue(mock_slides_manager_cls.called) + self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings")) + self.assertTrue(mock_slides_manager_cls.return_value.add.called) + self.assertEqual(mock_slides_manager_cls.return_value.add.call_args, call(session=session2, slides=more_slides[2], order=3)) + mock_slides_manager_cls.reset_mock() + + @override_settings(MEETECHO_API_CONFIG="fake settings") # enough to trigger API calls + @patch("ietf.meeting.views.SlidesManager") + def test_remove_slides_from_session(self, mock_slides_manager_cls): for type_id in ['ietf','interim']: chair_role = RoleFactory(name_id='chair') session = SessionFactory(group=chair_role.group, meeting__date=date_today()-datetime.timedelta(days=90), meeting__type_id=type_id) @@ -3134,6 +3371,7 @@ def test_remove_slides_from_session(self): r = self.client.post(url, {'oldIndex':1, 'name':slides.name }) self.assertEqual(r.status_code, 403) self.assertIn('have permission', unicontent(r)) + self.assertFalse(mock_slides_manager_cls.called) self.client.login(username=chair_role.person.user.username, password=chair_role.person.user.username+"+password") @@ -3141,6 +3379,7 @@ def test_remove_slides_from_session(self): r = self.client.post(url, {'oldIndex':0, 'name':slides.name }) self.assertEqual(r.status_code, 403) self.assertIn('materials cutoff', unicontent(r)) + self.assertFalse(mock_slides_manager_cls.called) session.meeting.date = date_today() session.meeting.save() @@ -3150,40 +3389,47 @@ def test_remove_slides_from_session(self): self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],False) self.assertIn('No data',r.json()['error']) + self.assertFalse(mock_slides_manager_cls.called) r = self.client.post(url, {'garbage':'garbage'}) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],False) self.assertIn('index is not valid',r.json()['error']) + self.assertFalse(mock_slides_manager_cls.called) r = self.client.post(url, {'oldIndex':0, 'name':slides.name }) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],False) self.assertIn('index is not valid',r.json()['error']) + self.assertFalse(mock_slides_manager_cls.called) r = self.client.post(url, {'oldIndex':'garbage', 'name':slides.name }) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],False) self.assertIn('index is not valid',r.json()['error']) - + self.assertFalse(mock_slides_manager_cls.called) + # No matching thing to delete r = self.client.post(url, {'oldIndex':1, 'name':slides.name }) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],False) self.assertIn('index is not valid',r.json()['error']) + self.assertFalse(mock_slides_manager_cls.called) - session.sessionpresentation_set.create(document=slides, rev=slides.rev, order=1) + session.presentations.create(document=slides, rev=slides.rev, order=1) # Bad names r = self.client.post(url, {'oldIndex':1}) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],False) self.assertIn('name is not valid',r.json()['error']) + self.assertFalse(mock_slides_manager_cls.called) r = self.client.post(url, {'oldIndex':1, 'name':'garbage' }) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],False) self.assertIn('name is not valid',r.json()['error']) + self.assertFalse(mock_slides_manager_cls.called) slides2 = DocumentFactory(type_id='slides') @@ -3192,22 +3438,29 @@ def test_remove_slides_from_session(self): self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],False) self.assertIn('SessionPresentation not found',r.json()['error']) + self.assertFalse(mock_slides_manager_cls.called) - session.sessionpresentation_set.create(document=slides2, rev=slides2.rev, order=2) + session.presentations.create(document=slides2, rev=slides2.rev, order=2) r = self.client.post(url, {'oldIndex':1, 'name':slides2.name }) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],False) self.assertIn('Name does not match index',r.json()['error']) + self.assertFalse(mock_slides_manager_cls.called) # valid removal r = self.client.post(url, {'oldIndex':1, 'name':slides.name }) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],True) - self.assertEqual(session.sessionpresentation_set.count(),1) + self.assertEqual(session.presentations.count(),1) + self.assertTrue(mock_slides_manager_cls.called) + self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings")) + self.assertTrue(mock_slides_manager_cls.return_value.delete.called) + self.assertEqual(mock_slides_manager_cls.return_value.delete.call_args, call(session=session, slides=slides)) + mock_slides_manager_cls.reset_mock() session2 = SessionFactory(group=session.group, meeting=session.meeting) sp_list = SessionPresentationFactory.create_batch(5, document__type_id='slides', session=session2) - for num, sp in enumerate(session2.sessionpresentation_set.filter(document__type_id='slides'),start=1): + for num, sp in enumerate(session2.presentations.filter(document__type_id='slides'),start=1): sp.order = num sp.save() @@ -3217,27 +3470,41 @@ def test_remove_slides_from_session(self): r = self.client.post(url, {'oldIndex':1, 'name':sp_list[0].document.name }) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],True) - self.assertFalse(session2.sessionpresentation_set.filter(pk=sp_list[0].pk).exists()) - self.assertEqual(list(session2.sessionpresentation_set.order_by('order').values_list('order',flat=True)), list(range(1,5))) + self.assertFalse(session2.presentations.filter(pk=sp_list[0].pk).exists()) + self.assertEqual(list(session2.presentations.order_by('order').values_list('order',flat=True)), list(range(1,5))) + self.assertTrue(mock_slides_manager_cls.called) + self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings")) + self.assertTrue(mock_slides_manager_cls.return_value.delete.called) + self.assertEqual(mock_slides_manager_cls.return_value.delete.call_args, call(session=session2, slides=sp_list[0].document)) + mock_slides_manager_cls.reset_mock() # delete in middle of list r = self.client.post(url, {'oldIndex':4, 'name':sp_list[4].document.name }) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],True) - self.assertFalse(session2.sessionpresentation_set.filter(pk=sp_list[4].pk).exists()) - self.assertEqual(list(session2.sessionpresentation_set.order_by('order').values_list('order',flat=True)), list(range(1,4))) + self.assertFalse(session2.presentations.filter(pk=sp_list[4].pk).exists()) + self.assertEqual(list(session2.presentations.order_by('order').values_list('order',flat=True)), list(range(1,4))) + self.assertTrue(mock_slides_manager_cls.called) + self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings")) + self.assertTrue(mock_slides_manager_cls.return_value.delete.called) + self.assertEqual(mock_slides_manager_cls.return_value.delete.call_args, call(session=session2, slides=sp_list[4].document)) + mock_slides_manager_cls.reset_mock() # delete at end of list r = self.client.post(url, {'oldIndex':2, 'name':sp_list[2].document.name }) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],True) - self.assertFalse(session2.sessionpresentation_set.filter(pk=sp_list[2].pk).exists()) - self.assertEqual(list(session2.sessionpresentation_set.order_by('order').values_list('order',flat=True)), list(range(1,3))) - - - - - def test_reorder_slides_in_session(self): + self.assertFalse(session2.presentations.filter(pk=sp_list[2].pk).exists()) + self.assertEqual(list(session2.presentations.order_by('order').values_list('order',flat=True)), list(range(1,3))) + self.assertTrue(mock_slides_manager_cls.called) + self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings")) + self.assertTrue(mock_slides_manager_cls.return_value.delete.called) + self.assertEqual(mock_slides_manager_cls.return_value.delete.call_args, call(session=session2, slides=sp_list[2].document)) + mock_slides_manager_cls.reset_mock() + + @override_settings(MEETECHO_API_CONFIG="fake settings") # enough to trigger API calls + @patch("ietf.meeting.views.SlidesManager") + def test_reorder_slides_in_session(self, mock_slides_manager_cls): def _sppk_at(sppk, positions): return [sppk[p-1] for p in positions] chair_role = RoleFactory(name_id='chair') @@ -3259,6 +3526,7 @@ def _sppk_at(sppk, positions): r = self.client.post(url, {'oldIndex':1, 'newIndex':2 }) self.assertEqual(r.status_code, 403) self.assertIn('have permission', unicontent(r)) + self.assertFalse(mock_slides_manager_cls.called) self.client.login(username=chair_role.person.user.username, password=chair_role.person.user.username+"+password") @@ -3266,6 +3534,7 @@ def _sppk_at(sppk, positions): r = self.client.post(url, {'oldIndex':1, 'newIndex':2 }) self.assertEqual(r.status_code, 403) self.assertIn('materials cutoff', unicontent(r)) + self.assertFalse(mock_slides_manager_cls.called) session.meeting.date = date_today() session.meeting.save() @@ -3275,60 +3544,98 @@ def _sppk_at(sppk, positions): self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],False) self.assertIn('index is not valid',r.json()['error']) + self.assertFalse(mock_slides_manager_cls.called) r = self.client.post(url, {'oldIndex':2, 'newIndex':6 }) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],False) self.assertIn('index is not valid',r.json()['error']) + self.assertFalse(mock_slides_manager_cls.called) r = self.client.post(url, {'oldIndex':2, 'newIndex':2 }) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],False) self.assertIn('index is not valid',r.json()['error']) + self.assertFalse(mock_slides_manager_cls.called) # Move from beginning r = self.client.post(url, {'oldIndex':1, 'newIndex':3}) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],True) - self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[2,3,1,4,5])) + self.assertEqual(list(session.presentations.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[2,3,1,4,5])) + self.assertTrue(mock_slides_manager_cls.called) + self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings")) + self.assertTrue(mock_slides_manager_cls.return_value.send_update.called) + self.assertEqual(mock_slides_manager_cls.return_value.send_update.call_args, call(session)) + mock_slides_manager_cls.reset_mock() # Move to beginning r = self.client.post(url, {'oldIndex':3, 'newIndex':1}) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],True) - self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[1,2,3,4,5])) - + self.assertEqual(list(session.presentations.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[1,2,3,4,5])) + self.assertTrue(mock_slides_manager_cls.called) + self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings")) + self.assertTrue(mock_slides_manager_cls.return_value.send_update.called) + self.assertEqual(mock_slides_manager_cls.return_value.send_update.call_args, call(session)) + mock_slides_manager_cls.reset_mock() + # Move from end r = self.client.post(url, {'oldIndex':5, 'newIndex':3}) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],True) - self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[1,2,5,3,4])) + self.assertEqual(list(session.presentations.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[1,2,5,3,4])) + self.assertTrue(mock_slides_manager_cls.called) + self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings")) + self.assertTrue(mock_slides_manager_cls.return_value.send_update.called) + self.assertEqual(mock_slides_manager_cls.return_value.send_update.call_args, call(session)) + mock_slides_manager_cls.reset_mock() # Move to end r = self.client.post(url, {'oldIndex':3, 'newIndex':5}) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],True) - self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[1,2,3,4,5])) + self.assertEqual(list(session.presentations.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[1,2,3,4,5])) + self.assertTrue(mock_slides_manager_cls.called) + self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings")) + self.assertTrue(mock_slides_manager_cls.return_value.send_update.called) + self.assertEqual(mock_slides_manager_cls.return_value.send_update.call_args, call(session)) + mock_slides_manager_cls.reset_mock() # Move beginning to end r = self.client.post(url, {'oldIndex':1, 'newIndex':5}) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],True) - self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[2,3,4,5,1])) + self.assertEqual(list(session.presentations.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[2,3,4,5,1])) + self.assertTrue(mock_slides_manager_cls.called) + self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings")) + self.assertTrue(mock_slides_manager_cls.return_value.send_update.called) + self.assertEqual(mock_slides_manager_cls.return_value.send_update.call_args, call(session)) + mock_slides_manager_cls.reset_mock() # Move middle to middle r = self.client.post(url, {'oldIndex':3, 'newIndex':4}) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],True) - self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[2,3,5,4,1])) + self.assertEqual(list(session.presentations.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[2,3,5,4,1])) + self.assertTrue(mock_slides_manager_cls.called) + self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings")) + self.assertTrue(mock_slides_manager_cls.return_value.send_update.called) + self.assertEqual(mock_slides_manager_cls.return_value.send_update.call_args, call(session)) + mock_slides_manager_cls.reset_mock() r = self.client.post(url, {'oldIndex':3, 'newIndex':2}) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],True) - self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[2,5,3,4,1])) + self.assertEqual(list(session.presentations.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[2,5,3,4,1])) + self.assertTrue(mock_slides_manager_cls.called) + self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings")) + self.assertTrue(mock_slides_manager_cls.return_value.send_update.called) + self.assertEqual(mock_slides_manager_cls.return_value.send_update.call_args, call(session)) + mock_slides_manager_cls.reset_mock() # Reset for next iteration in the loop - session.sessionpresentation_set.update(order=F('pk')) + session.presentations.update(order=F('pk')) self.client.logout() @@ -3345,7 +3652,7 @@ def test_slide_order_reconditioning(self): except AssertionError: pass - self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('order',flat=True)),list(range(1,6))) + self.assertEqual(list(session.presentations.order_by('order').values_list('order',flat=True)),list(range(1,6))) class EditTests(TestCase): @@ -4238,6 +4545,7 @@ def test_persistent_enabled_timeslot_types(self): class SessionDetailsTests(TestCase): + settings_temp_path_overrides = TestCase.settings_temp_path_overrides + ['SLIDE_STAGING_PATH'] def test_session_details(self): @@ -4334,7 +4642,7 @@ def test_add_session_drafts(self): group.role_set.create(name_id='chair',person = group_chair, email = group_chair.email()) session = SessionFactory.create(meeting__type_id='ietf',group=group, meeting__date=date_today() + datetime.timedelta(days=90)) SessionPresentationFactory.create(session=session,document__type_id='draft',rev=None) - old_draft = session.sessionpresentation_set.filter(document__type='draft').first().document + old_draft = session.presentations.filter(document__type='draft').first().document new_draft = DocumentFactory(type_id='draft') url = urlreverse('ietf.meeting.views.add_session_drafts', kwargs=dict(num=session.meeting.number, session_id=session.pk)) @@ -4355,10 +4663,10 @@ def test_add_session_drafts(self): q = PyQuery(r.content) self.assertIn("Already linked:", q('form .text-danger').text()) - self.assertEqual(1,session.sessionpresentation_set.count()) + self.assertEqual(1,session.presentations.count()) r = self.client.post(url,dict(drafts=[new_draft.pk,])) self.assertTrue(r.status_code, 302) - self.assertEqual(2,session.sessionpresentation_set.count()) + self.assertEqual(2,session.presentations.count()) session.meeting.date -= datetime.timedelta(days=180) session.meeting.save() @@ -4370,6 +4678,85 @@ def test_add_session_drafts(self): q = PyQuery(r.content) self.assertEqual(1,len(q(".alert-warning:contains('may affect published proceedings')"))) + def test_proposed_slides_for_approval(self): + # This test overlaps somewhat with MaterialsTests of proposed slides handling. The focus + # here is on the display of slides, not the approval action. + group = GroupFactory() + meeting = MeetingFactory( + type_id="ietf", date=date_today() + datetime.timedelta(days=10) + ) + sessions = SessionFactory.create_batch( + 2, + group=group, + meeting=meeting, + ) + + # slides submission _not_ in the `pending` state + do_not_show = [ + SlideSubmissionFactory( + session=sessions[0], + title="already approved", + status_id="approved", + ), + SlideSubmissionFactory( + session=sessions[1], + title="already rejected", + status_id="rejected", + ), + ] + + # pending submissions + first_session_pending = SlideSubmissionFactory( + session=sessions[0], title="first session title" + ) + second_session_pending = SlideSubmissionFactory( + session=sessions[1], title="second session title" + ) + + # and their approval URLs + def _approval_url(slidesub): + return urlreverse( + "ietf.meeting.views.approve_proposed_slides", + kwargs={"slidesubmission_id": slidesub.pk, "num": meeting.number}, + ) + + first_approval_url = _approval_url(first_session_pending) + second_approval_url = _approval_url(second_session_pending) + do_not_show_urls = [_approval_url(ss) for ss in do_not_show] + + # Retrieve the URL as a group chair + url = urlreverse( + "ietf.meeting.views.session_details", + kwargs={ + "num": meeting.number, + "acronym": group.acronym, + }, + ) + chair = RoleFactory(group=group, name_id="chair").person + self.client.login( + username=chair.user.username, password=f"{chair.user.username}+password" + ) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + pq = PyQuery(r.content) + self.assertEqual( + len(pq(f'a[href="{first_approval_url}"]')), + 1, + "first session proposed slides should be linked for approval", + ) + self.assertEqual( + len(pq(f'a[href="{second_approval_url}"]')), + 1, + "second session proposed slides should be linked for approval", + ) + for no_show_url in do_not_show_urls: + self.assertEqual( + len(pq(f'a[href="{no_show_url}"]')), + 0, + "second session proposed slides should be linked for approval", + ) + + class EditScheduleListTests(TestCase): def setUp(self): super().setUp() @@ -4383,73 +4770,151 @@ def test_list_schedules(self): self.assertTrue(r.status_code, 200) def test_diff_schedules(self): - meeting = make_meeting_test_data() - - url = urlreverse('ietf.meeting.views.diff_schedules',kwargs={'num':meeting.number}) - login_testing_unauthorized(self,"secretary", url) - r = self.client.get(url) - self.assertTrue(r.status_code, 200) - - from_schedule = Schedule.objects.get(meeting=meeting, name="test-unofficial-schedule") - - session1 = Session.objects.filter(meeting=meeting, group__acronym='mars').first() - session2 = Session.objects.filter(meeting=meeting, group__acronym='ames').first() - session3 = SessionFactory(meeting=meeting, group=Group.objects.get(acronym='mars'), - attendees=10, requested_duration=datetime.timedelta(minutes=70), - add_to_schedule=False) - SchedulingEvent.objects.create(session=session3, status_id='schedw', by=Person.objects.first()) - - slot2 = TimeSlot.objects.filter(meeting=meeting, type='regular').order_by('-time').first() - slot3 = TimeSlot.objects.create( - meeting=meeting, type_id='regular', location=slot2.location, - duration=datetime.timedelta(minutes=60), - time=slot2.time + datetime.timedelta(minutes=60), + # Create meeting and some time slots + meeting = MeetingFactory(type_id="ietf", populate_schedule=False) + rooms = RoomFactory.create_batch(2, meeting=meeting) + # first index is room, second is time + timeslots = [ + [ + TimeSlotFactory( + location=room, + meeting=meeting, + time=datetime.datetime.combine( + meeting.date, datetime.time(9, 0, tzinfo=datetime.UTC) + ) + ), + TimeSlotFactory( + location=room, + meeting=meeting, + time=datetime.datetime.combine( + meeting.date, datetime.time(10, 0, tzinfo=datetime.UTC) + ) + ), + TimeSlotFactory( + location=room, + meeting=meeting, + time=datetime.datetime.combine( + meeting.date, datetime.time(11, 0, tzinfo=datetime.UTC) + ) + ), + ] + for room in rooms + ] + sessions = SessionFactory.create_batch( + 5, meeting=meeting, add_to_schedule=False ) - # copy - new_url = urlreverse("ietf.meeting.views.new_meeting_schedule", kwargs=dict(num=meeting.number, owner=from_schedule.owner_email(), name=from_schedule.name)) - r = self.client.post(new_url, { - 'name': "newtest", - 'public': "on", - }) - self.assertNoFormPostErrors(r) + from_schedule = ScheduleFactory(meeting=meeting) + to_schedule = ScheduleFactory(meeting=meeting) - to_schedule = Schedule.objects.get(meeting=meeting, name='newtest') + # sessions[0]: not scheduled in from_schedule, scheduled in to_schedule + SchedTimeSessAssignment.objects.create( + schedule=to_schedule, + session=sessions[0], + timeslot=timeslots[0][0], + ) + # sessions[1]: scheduled in from_schedule, not scheduled in to_schedule + SchedTimeSessAssignment.objects.create( + schedule=from_schedule, + session=sessions[1], + timeslot=timeslots[0][0], + ) + # sessions[2]: moves rooms, not time + SchedTimeSessAssignment.objects.create( + schedule=from_schedule, + session=sessions[2], + timeslot=timeslots[0][1], + ) + SchedTimeSessAssignment.objects.create( + schedule=to_schedule, + session=sessions[2], + timeslot=timeslots[1][1], + ) + # sessions[3]: moves time, not room + SchedTimeSessAssignment.objects.create( + schedule=from_schedule, + session=sessions[3], + timeslot=timeslots[1][1], + ) + SchedTimeSessAssignment.objects.create( + schedule=to_schedule, + session=sessions[3], + timeslot=timeslots[1][2], + ) + # sessions[4]: moves room and time + SchedTimeSessAssignment.objects.create( + schedule=from_schedule, + session=sessions[4], + timeslot=timeslots[1][0], + ) + SchedTimeSessAssignment.objects.create( + schedule=to_schedule, + session=sessions[4], + timeslot=timeslots[0][2], + ) - # make some changes + # Check the raw diffs + raw_diffs = diff_meeting_schedules(from_schedule, to_schedule) + self.assertCountEqual( + raw_diffs, + [ + { + "change": "schedule", + "session": sessions[0].pk, + "to": timeslots[0][0].pk, + }, + { + "change": "unschedule", + "session": sessions[1].pk, + "from": timeslots[0][0].pk, + }, + { + "change": "move", + "session": sessions[2].pk, + "from": timeslots[0][1].pk, + "to": timeslots[1][1].pk, + }, + { + "change": "move", + "session": sessions[3].pk, + "from": timeslots[1][1].pk, + "to": timeslots[1][2].pk, + }, + { + "change": "move", + "session": sessions[4].pk, + "from": timeslots[1][0].pk, + "to": timeslots[0][2].pk, + }, + ] + ) - edit_url = urlreverse("ietf.meeting.views.edit_meeting_schedule", kwargs=dict(num=meeting.number, owner=to_schedule.owner_email(), name=to_schedule.name)) + # Check the view + url = urlreverse("ietf.meeting.views.diff_schedules", + kwargs={"num": meeting.number}) + login_testing_unauthorized(self, "secretary", url) + r = self.client.get(url) + self.assertTrue(r.status_code, 200) - # schedule session - r = self.client.post(edit_url, { - 'action': 'assign', - 'timeslot': slot3.pk, - 'session': session3.pk, - }) - self.assertEqual(json.loads(r.content)['success'], True) - # unschedule session - r = self.client.post(edit_url, { - 'action': 'unassign', - 'session': session1.pk, - }) - self.assertEqual(json.loads(r.content)['success'], True) - # move session - r = self.client.post(edit_url, { - 'action': 'assign', - 'timeslot': slot2.pk, - 'session': session2.pk, + # with show room changes disabled - does not show sessions[2] because it did + # not change time + r = self.client.get(url, { + "from_schedule": from_schedule.name, + "to_schedule": to_schedule.name, }) - self.assertEqual(json.loads(r.content)['success'], True) + self.assertTrue(r.status_code, 200) + q = PyQuery(r.content) + self.assertEqual(len(q(".schedule-diffs tr")), 4 + 1) - # now get differences + # with show room changes enabled - shows all changes r = self.client.get(url, { - 'from_schedule': from_schedule.name, - 'to_schedule': to_schedule.name, + "from_schedule": from_schedule.name, + "to_schedule": to_schedule.name, + "show_room_changes": "on", }) self.assertTrue(r.status_code, 200) - q = PyQuery(r.content) - self.assertEqual(len(q(".schedule-diffs tr")), 3+1) + self.assertEqual(len(q(".schedule-diffs tr")), 5 + 1) def test_delete_schedule(self): url = urlreverse('ietf.meeting.views.delete_schedule', @@ -4838,7 +5303,9 @@ def test_upcoming_ical(self): assert_ical_response_is_valid(self, r, expected_event_summaries=expected_event_summaries, expected_event_count=len(expected_event_summaries)) - self.assertContains(r, 'Remote instructions: https://someurl.example.com') + # Unfold long lines that might have been folded by iCal + content_unfolded = r.content.decode('utf-8').replace('\r\n ', '') + self.assertIn('Remote instructions: https://someurl.example.com', content_unfolded) Session.objects.filter(meeting__type_id='interim').update(remote_instructions='') r = self.client.get(url) @@ -4846,9 +5313,26 @@ def test_upcoming_ical(self): assert_ical_response_is_valid(self, r, expected_event_summaries=expected_event_summaries, expected_event_count=len(expected_event_summaries)) - self.assertNotContains(r, 'Remote instructions:') + content_unfolded = r.content.decode('utf-8').replace('\r\n ', '') + self.assertNotIn('Remote instructions:', content_unfolded) + + updated = meeting.updated() + self.assertIsNotNone(updated) + expected_updated = updated.astimezone(datetime.UTC).strftime("%Y%m%dT%H%M%SZ") + self.assertContains(r, f"DTSTAMP:{expected_updated}") + + # With default cached_updated, 1970-01-01 + with patch("ietf.meeting.models.Meeting.updated", return_value=None): + r = self.client.get(url) + self.assertEqual(r.status_code, 200) - def test_upcoming_ical_filter(self): + self.assertEqual(meeting.type_id, "ietf") + + expected_updated = "19700101T000000Z" + self.assertEqual(1, r.content.decode("utf-8").count(f"DTSTAMP:{expected_updated}")) + + @patch("ietf.meeting.utils.preprocess_meeting_important_dates") + def test_upcoming_ical_filter(self, mock_preprocess_meeting_important_dates): # Just a quick check of functionality - details tested by test_js.InterimTests make_meeting_test_data(create_interims=True) url = urlreverse("ietf.meeting.views.upcoming_ical") @@ -4870,6 +5354,8 @@ def test_upcoming_ical_filter(self): ], expected_event_count=2) + # Verify preprocess_meeting_important_dates isn't being called + mock_preprocess_meeting_important_dates.assert_not_called() def test_upcoming_json(self): make_meeting_test_data(create_interims=True) @@ -4940,6 +5426,7 @@ def test_interim_request_options(self): def do_interim_request_single_virtual(self, emails_expected): make_meeting_test_data() + TestBlobstoreManager().emptyTestBlobstores() group = Group.objects.get(acronym='mars') date = date_today() + datetime.timedelta(days=30) time = time_now().replace(microsecond=0,second=0) @@ -4990,6 +5477,12 @@ def do_interim_request_single_virtual(self, emails_expected): doc = session.materials.first() path = os.path.join(doc.get_file_path(),doc.filename_with_rev()) self.assertTrue(os.path.exists(path)) + with Path(path).open() as f: + self.assertEqual(f.read(), agenda) + self.assertEqual( + retrieve_str("agenda",doc.uploaded_filename), + agenda + ) # check notices to secretariat and chairs self.assertEqual(len(outbox), length_before + emails_expected) return meeting @@ -5011,6 +5504,7 @@ def test_interim_request_single_virtual_settings_approval_not_required(self): def test_interim_request_single_in_person(self): make_meeting_test_data() + TestBlobstoreManager().emptyTestBlobstores() group = Group.objects.get(acronym='mars') date = date_today() + datetime.timedelta(days=30) time = time_now().replace(microsecond=0,second=0) @@ -5057,6 +5551,10 @@ def test_interim_request_single_in_person(self): timeslot = session.official_timeslotassignment().timeslot self.assertEqual(timeslot.time,dt) self.assertEqual(timeslot.duration,duration) + self.assertEqual( + retrieve_str("agenda",session.agenda().uploaded_filename), + agenda + ) def test_interim_request_multi_day(self): make_meeting_test_data() @@ -5124,6 +5622,11 @@ def test_interim_request_multi_day(self): self.assertEqual(timeslot.time,dt2) self.assertEqual(timeslot.duration,duration) self.assertEqual(session.agenda_note,agenda_note) + for session in meeting.session_set.all(): + self.assertEqual( + retrieve_str("agenda",session.agenda().uploaded_filename), + agenda + ) def test_interim_request_multi_day_non_consecutive(self): make_meeting_test_data() @@ -5186,6 +5689,7 @@ def test_interim_request_multi_day_cancel(self): def test_interim_request_series(self): make_meeting_test_data() + TestBlobstoreManager().emptyTestBlobstores() meeting_count_before = Meeting.objects.filter(type='interim').count() date = date_today() + datetime.timedelta(days=30) if (date.month, date.day) == (12, 31): @@ -5273,6 +5777,11 @@ def test_interim_request_series(self): self.assertEqual(timeslot.time,dt2) self.assertEqual(timeslot.duration,duration) self.assertEqual(session.agenda_note,agenda_note) + for session in meeting.session_set.all(): + self.assertEqual( + retrieve_str("agenda",session.agenda().uploaded_filename), + agenda + ) # test_interim_pending subsumed by test_appears_on_pending @@ -5595,8 +6104,17 @@ def test_interim_request_cancel(self, mock): self.assertEqual(r.status_code, 403) self.assertFalse(mock.called, 'Should not cancel sessions if request rejected') - # test cancelling before announcement + # test with overly-long comments + comments += '0123456789abcdef'*32 self.client.login(username="marschairman", password="marschairman+password") + r = self.client.post(url, {'comments': comments}) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertTrue(q('form .is-invalid')) + # truncate to max_length + comments = comments[:512] + + # test cancelling before announcement length_before = len(outbox) r = self.client.post(url, {'comments': comments}) self.assertRedirects(r, urlreverse('ietf.meeting.views.upcoming')) @@ -5618,6 +6136,7 @@ def test_interim_request_cancel(self, mock): self.assertEqual(session.agenda_note, comments) self.assertEqual(len(outbox), length_before + 1) self.assertIn('Interim Meeting Cancelled', outbox[-1]['Subject']) + self.assertIn(comments, get_payload_text(outbox[-1])) self.assertTrue(mock.called, 'Should cancel sessions if request handled') self.assertCountEqual(mock.call_args[0][1], meeting.session_set.all()) @@ -5801,6 +6320,7 @@ def strfdelta(self, tdelta, fmt): def test_interim_request_edit_agenda_updates_doc(self): """Updating the agenda through the request edit form should update the doc correctly""" make_interim_test_data() + TestBlobstoreManager().emptyTestBlobstores() meeting = add_event_info_to_session_qs(Session.objects.filter(meeting__type='interim', group__acronym='mars')).filter(current_status='sched').first().meeting group = meeting.session_set.first().group url = urlreverse('ietf.meeting.views.interim_request_edit', kwargs={'number': meeting.number}) @@ -5836,6 +6356,10 @@ def test_interim_request_edit_agenda_updates_doc(self): self.assertNotEqual(agenda_doc.uploaded_filename, uploaded_filename_before, 'Uploaded filename should be updated') with (Path(agenda_doc.get_file_path()) / agenda_doc.uploaded_filename).open() as f: self.assertEqual(f.read(), 'modified agenda contents', 'New agenda contents should be saved') + self.assertEqual( + retrieve_str(agenda_doc.type_id, agenda_doc.uploaded_filename), + "modified agenda contents" + ) def test_interim_request_details_permissions(self): make_interim_test_data() @@ -5972,7 +6496,7 @@ class FinalizeProceedingsTests(TestCase): def test_finalize_proceedings(self): make_meeting_test_data() meeting = Meeting.objects.filter(type_id='ietf').order_by('id').last() - meeting.session_set.filter(group__acronym='mars').first().sessionpresentation_set.create(document=Document.objects.filter(type='draft').first(),rev=None) + meeting.session_set.filter(group__acronym='mars').first().presentations.create(document=Document.objects.filter(type='draft').first(),rev=None) url = urlreverse('ietf.meeting.views.finalize_proceedings',kwargs={'num':meeting.number}) login_testing_unauthorized(self,"secretary",url) @@ -5980,13 +6504,41 @@ def test_finalize_proceedings(self): self.assertEqual(r.status_code, 200) self.assertEqual(meeting.proceedings_final,False) - self.assertEqual(meeting.session_set.filter(group__acronym="mars").first().sessionpresentation_set.filter(document__type="draft").first().rev,None) + self.assertEqual(meeting.session_set.filter(group__acronym="mars").first().presentations.filter(document__type="draft").first().rev,None) r = self.client.post(url,{'finalize':1}) self.assertEqual(r.status_code, 302) meeting = Meeting.objects.get(pk=meeting.pk) self.assertEqual(meeting.proceedings_final,True) - self.assertEqual(meeting.session_set.filter(group__acronym="mars").first().sessionpresentation_set.filter(document__type="draft").first().rev,'00') + self.assertEqual(meeting.session_set.filter(group__acronym="mars").first().presentations.filter(document__type="draft").first().rev,'00') + @patch("ietf.meeting.utils.generate_bluesheet") + def test_bluesheet_generation(self, mock): + meeting = MeetingFactory(type_id="ietf", number="107") # number where generate_bluesheets should not be called + SessionFactory.create_batch(5, meeting=meeting) + url = urlreverse("ietf.meeting.views.finalize_proceedings", kwargs={"num": meeting.number}) + self.client.login(username="secretary", password="secretary+password") + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + self.assertFalse(mock.called) + r = self.client.post(url,{'finalize': 1}) + self.assertEqual(r.status_code, 302) + self.assertFalse(mock.called) + + meeting = MeetingFactory(type_id="ietf", number="108") # number where generate_bluesheets should be called + SessionFactory.create_batch(5, meeting=meeting) + url = urlreverse("ietf.meeting.views.finalize_proceedings", kwargs={"num": meeting.number}) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + self.assertFalse(mock.called) + r = self.client.post(url,{'finalize': 1}) + self.assertEqual(r.status_code, 302) + self.assertTrue(mock.called) + self.assertCountEqual( + [call_args[0][1] for call_args in mock.call_args_list], + [sess for sess in meeting.session_set.all()], + ) + + class MaterialsTests(TestCase): settings_temp_path_overrides = TestCase.settings_temp_path_overrides + [ 'AGENDA_PATH', @@ -6027,13 +6579,15 @@ def test_upload_bluesheets(self): self.assertEqual(r.status_code, 200) q = PyQuery(r.content) self.assertIn('Upload', str(q("title"))) - self.assertFalse(session.sessionpresentation_set.exists()) - test_file = StringIO('%PDF-1.4\n%âãÏÓ\nthis is some text for a test') + self.assertFalse(session.presentations.exists()) + test_content = '%PDF-1.4\n%âãÏÓ\nthis is some text for a test' + test_file = StringIO(test_content) test_file.name = "not_really.pdf" r = self.client.post(url,dict(file=test_file)) self.assertEqual(r.status_code, 302) - bs_doc = session.sessionpresentation_set.filter(document__type_id='bluesheets').first().document + bs_doc = session.presentations.filter(document__type_id='bluesheets').first().document self.assertEqual(bs_doc.rev,'00') + self.assertEqual(retrieve_str("bluesheets", f"{bs_doc.name}-{bs_doc.rev}.pdf"), test_content) r = self.client.get(url) self.assertEqual(r.status_code, 200) q = PyQuery(r.content) @@ -6062,13 +6616,15 @@ def test_upload_bluesheets_interim(self): self.assertEqual(r.status_code, 200) q = PyQuery(r.content) self.assertIn('Upload', str(q("title"))) - self.assertFalse(session.sessionpresentation_set.exists()) - test_file = StringIO('%PDF-1.4\n%âãÏÓ\nthis is some text for a test') + self.assertFalse(session.presentations.exists()) + test_content = '%PDF-1.4\n%âãÏÓ\nthis is some text for a test' + test_file = StringIO(test_content) test_file.name = "not_really.pdf" r = self.client.post(url,dict(file=test_file)) self.assertEqual(r.status_code, 302) - bs_doc = session.sessionpresentation_set.filter(document__type_id='bluesheets').first().document + bs_doc = session.presentations.filter(document__type_id='bluesheets').first().document self.assertEqual(bs_doc.rev,'00') + self.assertEqual(retrieve_str("bluesheets", f"{bs_doc.name}-{bs_doc.rev}.pdf"), test_content) def test_upload_bluesheets_interim_chair_access(self): make_meeting_test_data() @@ -6082,96 +6638,130 @@ def test_upload_bluesheets_interim_chair_access(self): self.assertIn('Upload', str(q("title"))) - def test_upload_minutes_agenda(self): - for doctype in ('minutes','agenda'): - session = SessionFactory(meeting__type_id='ietf') - if doctype == 'minutes': - url = urlreverse('ietf.meeting.views.upload_session_minutes',kwargs={'num':session.meeting.number,'session_id':session.id}) - else: - url = urlreverse('ietf.meeting.views.upload_session_agenda',kwargs={'num':session.meeting.number,'session_id':session.id}) - self.client.logout() - login_testing_unauthorized(self,"secretary",url) - r = self.client.get(url) - self.assertEqual(r.status_code, 200) - q = PyQuery(r.content) - self.assertIn('Upload', str(q("Title"))) - self.assertFalse(session.sessionpresentation_set.exists()) - self.assertFalse(q('form input[type="checkbox"]')) - - session2 = SessionFactory(meeting=session.meeting,group=session.group) - r = self.client.get(url) - self.assertEqual(r.status_code, 200) - q = PyQuery(r.content) - self.assertTrue(q('form input[type="checkbox"]')) - - test_file = BytesIO(b'this is some text for a test') - test_file.name = "not_really.json" - r = self.client.post(url,dict(file=test_file)) - self.assertEqual(r.status_code, 200) - q = PyQuery(r.content) - self.assertTrue(q('form .is-invalid')) - - test_file = BytesIO(b'this is some text for a test'*1510000) - test_file.name = "not_really.pdf" - r = self.client.post(url,dict(file=test_file)) - self.assertEqual(r.status_code, 200) - q = PyQuery(r.content) - self.assertTrue(q('form .is-invalid')) - - test_file = BytesIO(b'') - test_file.name = "not_really.html" - r = self.client.post(url,dict(file=test_file)) - self.assertEqual(r.status_code, 200) - q = PyQuery(r.content) - self.assertTrue(q('form .is-invalid')) - - # Test html sanitization - test_file = BytesIO(b'Title

Title

Some text
') - test_file.name = "some.html" - r = self.client.post(url,dict(file=test_file)) - self.assertEqual(r.status_code, 302) - doc = session.sessionpresentation_set.filter(document__type_id=doctype).first().document - self.assertEqual(doc.rev,'00') - text = doc.text() - self.assertIn('Some text', text) - self.assertNotIn('
', text) - self.assertIn('charset="utf-8"', text) - - # txt upload - test_file = BytesIO(b'This is some text for a test, with the word\nvirtual at the beginning of a line.') - test_file.name = "some.txt" - r = self.client.post(url,dict(file=test_file,apply_to_all=False)) - self.assertEqual(r.status_code, 302) - doc = session.sessionpresentation_set.filter(document__type_id=doctype).first().document - self.assertEqual(doc.rev,'01') - self.assertFalse(session2.sessionpresentation_set.filter(document__type_id=doctype)) - + def test_label_future_sessions(self): + self.client.login(username='secretary', password='secretary+password') + for future in (True, False): + mtg_date = date_today()+datetime.timedelta(days=180 if future else -180) + session = SessionFactory(meeting__type_id='ietf', meeting__date=mtg_date) + # Verify future warning shows on the session details panel + url = urlreverse('ietf.meeting.views.session_details', kwargs={'num':session.meeting.number, 'acronym': session.group.acronym}) r = self.client.get(url) - self.assertEqual(r.status_code, 200) - q = PyQuery(r.content) - self.assertIn('Revise', str(q("Title"))) - test_file = BytesIO(b'this is some different text for a test') - test_file.name = "also_some.txt" - r = self.client.post(url,dict(file=test_file,apply_to_all=True)) - self.assertEqual(r.status_code, 302) - doc = Document.objects.get(pk=doc.pk) - self.assertEqual(doc.rev,'02') - self.assertTrue(session2.sessionpresentation_set.filter(document__type_id=doctype)) - - # Test bad encoding - test_file = BytesIO('

Title

Some\x93text
'.encode('latin1')) - test_file.name = "some.html" - r = self.client.post(url,dict(file=test_file)) - self.assertContains(r, 'Could not identify the file encoding') - doc = Document.objects.get(pk=doc.pk) - self.assertEqual(doc.rev,'02') + self.assertTrue(r.status_code==200) + if future: + self.assertContains(r, "Session has not ended yet") + else: + self.assertNotContains(r, "Session has not ended yet") - # Verify that we don't have dead links - url = urlreverse('ietf.meeting.views.session_details', kwargs={'num':session.meeting.number, 'acronym': session.group.acronym}) - top = '/meeting/%s/' % session.meeting.number - self.requests_mock.get(f'{session.notes_url()}/download', text='markdown notes') - self.requests_mock.get(f'{session.notes_url()}/info', text=json.dumps({'title': 'title', 'updatetime': '2021-12-01T17:11:00z'})) - self.crawl_materials(url=url, top=top) + def test_upload_minutes_agenda(self): + for doctype in ('minutes','agenda'): + for future in (True, False): + mtg_date = date_today()+datetime.timedelta(days=180 if future else -180) + session = SessionFactory(meeting__type_id='ietf', meeting__date=mtg_date) + if doctype == 'minutes': + url = urlreverse('ietf.meeting.views.upload_session_minutes',kwargs={'num':session.meeting.number,'session_id':session.id}) + else: + url = urlreverse('ietf.meeting.views.upload_session_agenda',kwargs={'num':session.meeting.number,'session_id':session.id}) + self.client.logout() + login_testing_unauthorized(self,"secretary",url) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertIn('Upload', str(q("Title"))) + self.assertFalse(session.presentations.exists()) + self.assertFalse(q('form input[type="checkbox"]')) + if future and doctype == "minutes": + self.assertContains(r, "Session has not ended yet") + else: + self.assertNotContains(r, "Session has not ended yet") + + session2 = SessionFactory(meeting=session.meeting,group=session.group) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertTrue(q('form input[type="checkbox"]')) + + # test not submitting a file + r = self.client.post(url, dict(submission_method="upload")) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertTrue(q("form .is-invalid")) + + test_file = BytesIO(b'this is some text for a test') + test_file.name = "not_really.json" + r = self.client.post(url,dict(submission_method="upload",file=test_file)) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertTrue(q('form .is-invalid')) + + test_file = BytesIO(b'this is some text for a test'*1510000) + test_file.name = "not_really.pdf" + r = self.client.post(url,dict(submission_method="upload",file=test_file)) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertTrue(q('form .is-invalid')) + + test_file = BytesIO(b'') + test_file.name = "not_really.html" + r = self.client.post(url,dict(submission_method="upload",file=test_file)) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertTrue(q('form .is-invalid')) + + # Test html sanitization + test_file = BytesIO(b'Title

Title

Some text
') + test_file.name = "some.html" + r = self.client.post(url,dict(submission_method="upload",file=test_file)) + self.assertEqual(r.status_code, 302) + doc = session.presentations.filter(document__type_id=doctype).first().document + self.assertEqual(doc.rev,'00') + text = doc.text() + self.assertIn('Some text', text) + self.assertNotIn('
', text) + text = retrieve_str(doctype, f"{doc.name}-{doc.rev}.html") + self.assertIn('Some text', text) + self.assertNotIn('
', text) + + # txt upload + test_bytes = b'This is some text for a test, with the word\nvirtual at the beginning of a line.' + test_file = BytesIO(test_bytes) + test_file.name = "some.txt" + r = self.client.post(url,dict(submission_method="upload",file=test_file,apply_to_all=False)) + self.assertEqual(r.status_code, 302) + doc = session.presentations.filter(document__type_id=doctype).first().document + self.assertEqual(doc.rev,'01') + self.assertFalse(session2.presentations.filter(document__type_id=doctype)) + retrieved_bytes = retrieve_bytes(doctype, f"{doc.name}-{doc.rev}.txt") + self.assertEqual(retrieved_bytes, test_bytes) + + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertIn('Revise', str(q("Title"))) + test_bytes = b'this is some different text for a test' + test_file = BytesIO(test_bytes) + test_file.name = "also_some.txt" + r = self.client.post(url,dict(submission_method="upload",file=test_file,apply_to_all=True)) + self.assertEqual(r.status_code, 302) + doc = Document.objects.get(pk=doc.pk) + self.assertEqual(doc.rev,'02') + self.assertTrue(session2.presentations.filter(document__type_id=doctype)) + retrieved_bytes = retrieve_bytes(doctype, f"{doc.name}-{doc.rev}.txt") + self.assertEqual(retrieved_bytes, test_bytes) + + # Test bad encoding + test_file = BytesIO('

Title

Some\x93text
'.encode('latin1')) + test_file.name = "some.html" + r = self.client.post(url,dict(submission_method="upload",file=test_file)) + self.assertContains(r, 'Could not identify the file encoding') + doc = Document.objects.get(pk=doc.pk) + self.assertEqual(doc.rev,'02') + + # Verify that we don't have dead links + url = urlreverse('ietf.meeting.views.session_details', kwargs={'num':session.meeting.number, 'acronym': session.group.acronym}) + top = '/meeting/%s/' % session.meeting.number + self.requests_mock.get(f'{session.notes_url()}/download', text='markdown notes') + self.requests_mock.get(f'{session.notes_url()}/info', text=json.dumps({'title': 'title', 'updatetime': '2021-12-01T17:11:00z'})) + self.crawl_materials(url=url, top=top) def test_upload_minutes_agenda_unscheduled(self): for doctype in ('minutes','agenda'): @@ -6186,35 +6776,74 @@ def test_upload_minutes_agenda_unscheduled(self): self.assertEqual(r.status_code, 200) q = PyQuery(r.content) self.assertIn('Upload', str(q("Title"))) - self.assertFalse(session.sessionpresentation_set.exists()) + self.assertFalse(session.presentations.exists()) self.assertFalse(q('form input[type="checkbox"]')) + self.assertNotContains(r, "Session has not ended yet") test_file = BytesIO(b'this is some text for a test') test_file.name = "not_really.txt" - r = self.client.post(url,dict(file=test_file,apply_to_all=False)) + r = self.client.post(url,dict(submission_method="upload",file=test_file,apply_to_all=False)) self.assertEqual(r.status_code, 410) @override_settings(MEETING_MATERIALS_SERVE_LOCALLY=True) def test_upload_minutes_agenda_interim(self): - session=SessionFactory(meeting__type_id='interim') for doctype in ('minutes','agenda'): - if doctype=='minutes': - url = urlreverse('ietf.meeting.views.upload_session_minutes',kwargs={'num':session.meeting.number,'session_id':session.id}) - else: - url = urlreverse('ietf.meeting.views.upload_session_agenda',kwargs={'num':session.meeting.number,'session_id':session.id}) + for future in (True, False): + session=SessionFactory(meeting__type_id='interim', meeting__date = date_today()+datetime.timedelta(days=180 if future else -180)) + if doctype=='minutes': + url = urlreverse('ietf.meeting.views.upload_session_minutes',kwargs={'num':session.meeting.number,'session_id':session.id}) + else: + url = urlreverse('ietf.meeting.views.upload_session_agenda',kwargs={'num':session.meeting.number,'session_id':session.id}) + self.client.logout() + login_testing_unauthorized(self,"secretary",url) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertIn('Upload', str(q("title"))) + self.assertFalse(session.presentations.filter(document__type_id=doctype)) + if future and doctype == "minutes": + self.assertContains(r, "Session has not ended yet") + else: + self.assertNotContains(r, "Session has not ended yet") + test_bytes = b'this is some text for a test' + test_file = BytesIO(test_bytes) + test_file.name = "not_really.txt" + r = self.client.post(url,dict(submission_method="upload",file=test_file)) + self.assertEqual(r.status_code, 302) + doc = session.presentations.filter(document__type_id=doctype).first().document + self.assertEqual(doc.rev,'00') + retrieved_bytes = retrieve_bytes(doctype, f"{doc.name}-{doc.rev}.txt") + self.assertEqual(retrieved_bytes, test_bytes) + + # Verify that we don't have dead links + url = urlreverse('ietf.meeting.views.session_details', kwargs={'num':session.meeting.number, 'acronym': session.group.acronym}) + top = '/meeting/%s/' % session.meeting.number + self.requests_mock.get(f'{session.notes_url()}/download', text='markdown notes') + self.requests_mock.get(f'{session.notes_url()}/info', text=json.dumps({'title': 'title', 'updatetime': '2021-12-01T17:11:00z'})) + self.crawl_materials(url=url, top=top) + + @override_settings(MEETING_MATERIALS_SERVE_LOCALLY=True) + def test_upload_narrativeminutes(self): + for type_id in ["interim","ietf"]: + session=SessionFactory(meeting__type_id=type_id,group__acronym='iesg') + doctype='narrativeminutes' + url = urlreverse('ietf.meeting.views.upload_session_narrativeminutes',kwargs={'num':session.meeting.number,'session_id':session.id}) self.client.logout() login_testing_unauthorized(self,"secretary",url) r = self.client.get(url) self.assertEqual(r.status_code, 200) q = PyQuery(r.content) self.assertIn('Upload', str(q("title"))) - self.assertFalse(session.sessionpresentation_set.filter(document__type_id=doctype)) - test_file = BytesIO(b'this is some text for a test') + self.assertFalse(session.presentations.filter(document__type_id=doctype)) + test_bytes = b'this is some text for a test' + test_file = BytesIO(test_bytes) test_file.name = "not_really.txt" - r = self.client.post(url,dict(file=test_file)) + r = self.client.post(url,dict(submission_method="upload",file=test_file)) self.assertEqual(r.status_code, 302) - doc = session.sessionpresentation_set.filter(document__type_id=doctype).first().document + doc = session.presentations.filter(document__type_id=doctype).first().document self.assertEqual(doc.rev,'00') + retrieved_bytes = retrieve_bytes(doctype, f"{doc.name}-{doc.rev}.txt") + self.assertEqual(retrieved_bytes, test_bytes) # Verify that we don't have dead links url = urlreverse('ietf.meeting.views.session_details', kwargs={'num':session.meeting.number, 'acronym': session.group.acronym}) @@ -6223,7 +6852,55 @@ def test_upload_minutes_agenda_interim(self): self.requests_mock.get(f'{session.notes_url()}/info', text=json.dumps({'title': 'title', 'updatetime': '2021-12-01T17:11:00z'})) self.crawl_materials(url=url, top=top) - def test_upload_slides(self): + def test_enter_agenda(self): + session = SessionFactory(meeting__type_id='ietf') + url = urlreverse('ietf.meeting.views.upload_session_agenda',kwargs={'num':session.meeting.number,'session_id':session.id}) + redirect_url = urlreverse('ietf.meeting.views.session_details', kwargs={'num':session.meeting.number,'acronym':session.group.acronym}) + login_testing_unauthorized(self,"secretary",url) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertIn('Upload', str(q("Title"))) + self.assertFalse(session.presentations.exists()) + + test_text = 'Enter agenda from scratch' + r = self.client.post(url,dict(submission_method="enter",content=test_text)) + self.assertRedirects(r, redirect_url) + doc = session.presentations.filter(document__type_id='agenda').first().document + self.assertEqual(doc.rev,'00') + self.assertEqual(retrieve_str("agenda",f"{doc.name}-{doc.rev}.md"), test_text) + + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertIn('Revise', str(q("Title"))) + + test_bytes = b'Upload after enter' + test_file = BytesIO(test_bytes) + test_file.name = "some.txt" + r = self.client.post(url,dict(submission_method="upload",file=test_file)) + self.assertRedirects(r, redirect_url) + doc = Document.objects.get(pk=doc.pk) + self.assertEqual(doc.rev,'01') + retrieved_bytes = retrieve_bytes("agenda", f"{doc.name}-{doc.rev}.txt") + self.assertEqual(retrieved_bytes, test_bytes) + + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertIn('Revise', str(q("Title"))) + + test_text = 'Enter after upload' + r = self.client.post(url,dict(submission_method="enter",content=test_text)) + self.assertRedirects(r, redirect_url) + doc = Document.objects.get(pk=doc.pk) + self.assertEqual(doc.rev,'02') + self.assertEqual(retrieve_str("agenda",f"{doc.name}-{doc.rev}.md"), test_text) + + + @override_settings(MEETECHO_API_CONFIG="fake settings") # enough to trigger API calls + @patch("ietf.meeting.views.SlidesManager") + def test_upload_slides(self, mock_slides_manager_cls): session1 = SessionFactory(meeting__type_id='ietf') session2 = SessionFactory(meeting=session1.meeting,group=session1.group) @@ -6231,46 +6908,80 @@ def test_upload_slides(self): login_testing_unauthorized(self,"secretary",url) r = self.client.get(url) self.assertEqual(r.status_code, 200) + self.assertFalse(mock_slides_manager_cls.called) q = PyQuery(r.content) self.assertIn('Upload', str(q("title"))) - self.assertFalse(session1.sessionpresentation_set.filter(document__type_id='slides')) - test_file = BytesIO(b'this is not really a slide') + self.assertFalse(session1.presentations.filter(document__type_id='slides')) + test_bytes = b'this is not really a slide' + test_file = BytesIO(test_bytes) test_file.name = 'not_really.txt' - r = self.client.post(url,dict(file=test_file,title='a test slide file',apply_to_all=True)) + r = self.client.post(url,dict(file=test_file,title='a test slide file',apply_to_all=True,approved=True)) self.assertEqual(r.status_code, 302) - self.assertEqual(session1.sessionpresentation_set.count(),1) - self.assertEqual(session2.sessionpresentation_set.count(),1) - sp = session2.sessionpresentation_set.first() + self.assertEqual(session1.presentations.count(),1) + self.assertEqual(session2.presentations.count(),1) + sp = session2.presentations.first() self.assertEqual(sp.document.name, 'slides-%s-%s-a-test-slide-file' % (session1.meeting.number,session1.group.acronym ) ) self.assertEqual(sp.order,1) + self.assertEqual(mock_slides_manager_cls.call_count, 1) + self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings")) + self.assertEqual(mock_slides_manager_cls.return_value.add.call_count, 2) + self.assertEqual(retrieve_bytes("slides", f"{sp.document.name}-{sp.document.rev}.txt"), test_bytes) + # don't care which order they were called in, just that both sessions were updated + self.assertCountEqual( + mock_slides_manager_cls.return_value.add.call_args_list, + [ + call(session=session1, slides=sp.document, order=1), + call(session=session2, slides=sp.document, order=1), + ], + ) + mock_slides_manager_cls.reset_mock() url = urlreverse('ietf.meeting.views.upload_session_slides',kwargs={'num':session2.meeting.number,'session_id':session2.id}) - test_file = BytesIO(b'some other thing still not slidelike') + test_bytes = b'some other thing still not slidelike' + test_file = BytesIO(test_bytes) test_file.name = 'also_not_really.txt' - r = self.client.post(url,dict(file=test_file,title='a different slide file',apply_to_all=False)) + r = self.client.post(url,dict(file=test_file,title='a different slide file',apply_to_all=False,approved=True)) self.assertEqual(r.status_code, 302) - self.assertEqual(session1.sessionpresentation_set.count(),1) - self.assertEqual(session2.sessionpresentation_set.count(),2) - sp = session2.sessionpresentation_set.get(document__name__endswith='-a-different-slide-file') + self.assertEqual(session1.presentations.count(),1) + self.assertEqual(session2.presentations.count(),2) + sp = session2.presentations.get(document__name__endswith='-a-different-slide-file') self.assertEqual(sp.order,2) self.assertEqual(sp.rev,'00') self.assertEqual(sp.document.rev,'00') + self.assertEqual(mock_slides_manager_cls.call_count, 1) + self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings")) + self.assertEqual(mock_slides_manager_cls.return_value.add.call_count, 1) + self.assertEqual(retrieve_bytes("slides", f"{sp.document.name}-{sp.document.rev}.txt"), test_bytes) + self.assertEqual( + mock_slides_manager_cls.return_value.add.call_args, + call(session=session2, slides=sp.document, order=2), + ) + mock_slides_manager_cls.reset_mock() - url = urlreverse('ietf.meeting.views.upload_session_slides',kwargs={'num':session2.meeting.number,'session_id':session2.id,'name':session2.sessionpresentation_set.get(order=2).document.name}) + url = urlreverse('ietf.meeting.views.upload_session_slides',kwargs={'num':session2.meeting.number,'session_id':session2.id,'name':session2.presentations.get(order=2).document.name}) r = self.client.get(url) self.assertTrue(r.status_code, 200) q = PyQuery(r.content) self.assertIn('Revise', str(q("title"))) - test_file = BytesIO(b'new content for the second slide deck') + test_bytes = b'new content for the second slide deck' + test_file = BytesIO(test_bytes) test_file.name = 'doesnotmatter.txt' - r = self.client.post(url,dict(file=test_file,title='rename the presentation',apply_to_all=False)) + r = self.client.post(url,dict(file=test_file,title='rename the presentation',apply_to_all=False, approved=True)) self.assertEqual(r.status_code, 302) - self.assertEqual(session1.sessionpresentation_set.count(),1) - self.assertEqual(session2.sessionpresentation_set.count(),2) - sp = session2.sessionpresentation_set.get(order=2) - self.assertEqual(sp.rev,'01') - self.assertEqual(sp.document.rev,'01') - + self.assertEqual(session1.presentations.count(),1) + self.assertEqual(session2.presentations.count(),2) + replacement_sp = session2.presentations.get(order=2) + self.assertEqual(replacement_sp.rev,'01') + self.assertEqual(replacement_sp.document.rev,'01') + self.assertEqual(mock_slides_manager_cls.call_count, 1) + self.assertEqual(retrieve_bytes("slides", f"{replacement_sp.document.name}-{replacement_sp.document.rev}.txt"), test_bytes) + self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings")) + self.assertEqual(mock_slides_manager_cls.return_value.revise.call_count, 1) + self.assertEqual( + mock_slides_manager_cls.return_value.revise.call_args, + call(session=session2, slides=sp.document), + ) + def test_upload_slide_title_bad_unicode(self): session1 = SessionFactory(meeting__type_id='ietf') url = urlreverse('ietf.meeting.views.upload_session_slides',kwargs={'num':session1.meeting.number,'session_id':session1.id}) @@ -6279,7 +6990,7 @@ def test_upload_slide_title_bad_unicode(self): self.assertEqual(r.status_code, 200) q = PyQuery(r.content) self.assertIn('Upload', str(q("title"))) - self.assertFalse(session1.sessionpresentation_set.filter(document__type_id='slides')) + self.assertFalse(session1.presentations.filter(document__type_id='slides')) test_file = BytesIO(b'this is not really a slide') test_file.name = 'not_really.txt' r = self.client.post(url,dict(file=test_file,title='title with bad character \U0001fabc ')) @@ -6288,29 +6999,60 @@ def test_upload_slide_title_bad_unicode(self): self.assertTrue(q('form .is-invalid')) self.assertIn("Unicode BMP", q('form .is-invalid div').text()) - def test_remove_sessionpresentation(self): + @override_settings(MEETECHO_API_CONFIG="fake settings") # enough to trigger API calls + @patch("ietf.meeting.views.SlidesManager") + def test_remove_sessionpresentation(self, mock_slides_manager_cls): session = SessionFactory(meeting__type_id='ietf') + agenda = DocumentFactory(type_id='agenda') doc = DocumentFactory(type_id='slides') - session.sessionpresentation_set.create(document=doc) + session.presentations.create(document=agenda) + session.presentations.create(document=doc) url = urlreverse('ietf.meeting.views.remove_sessionpresentation',kwargs={'num':session.meeting.number,'session_id':session.id,'name':'no-such-doc'}) response = self.client.get(url) self.assertEqual(response.status_code, 404) + self.assertFalse(mock_slides_manager_cls.called) url = urlreverse('ietf.meeting.views.remove_sessionpresentation',kwargs={'num':session.meeting.number,'session_id':0,'name':doc.name}) response = self.client.get(url) self.assertEqual(response.status_code, 404) + self.assertFalse(mock_slides_manager_cls.called) url = urlreverse('ietf.meeting.views.remove_sessionpresentation',kwargs={'num':session.meeting.number,'session_id':session.id,'name':doc.name}) login_testing_unauthorized(self,"secretary",url) response = self.client.get(url) self.assertEqual(response.status_code, 200) + self.assertFalse(mock_slides_manager_cls.called) - self.assertEqual(1,session.sessionpresentation_set.count()) + # Removing slides should remove the materials and call MeetechoAPI + self.assertEqual(2, session.presentations.count()) response = self.client.post(url,{'remove_session':''}) self.assertEqual(response.status_code, 302) - self.assertEqual(0,session.sessionpresentation_set.count()) - self.assertEqual(2,doc.docevent_set.count()) + self.assertEqual(1, session.presentations.count()) + self.assertEqual(2, doc.docevent_set.count()) + self.assertEqual(mock_slides_manager_cls.call_count, 1) + self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings")) + self.assertEqual(mock_slides_manager_cls.return_value.delete.call_count, 1) + self.assertEqual( + mock_slides_manager_cls.return_value.delete.call_args, + call(session=session, slides=doc), + ) + mock_slides_manager_cls.reset_mock() + + # Removing non-slides should only remove the materials + url = urlreverse( + "ietf.meeting.views.remove_sessionpresentation", + kwargs={ + "num": session.meeting.number, + "session_id": session.id, + "name": agenda.name, + }, + ) + response = self.client.post(url, {"remove_session" : ""}) + self.assertEqual(response.status_code, 302) + self.assertEqual(0, session.presentations.count()) + self.assertEqual(2, agenda.docevent_set.count()) + self.assertFalse(mock_slides_manager_cls.called) def test_propose_session_slides(self): for type_id in ['ietf','interim']: @@ -6320,7 +7062,7 @@ def test_propose_session_slides(self): newperson = PersonFactory() session_overview_url = urlreverse('ietf.meeting.views.session_details',kwargs={'num':session.meeting.number,'acronym':session.group.acronym}) - propose_url = urlreverse('ietf.meeting.views.propose_session_slides', kwargs={'session_id':session.pk, 'num': session.meeting.number}) + upload_url = urlreverse('ietf.meeting.views.upload_session_slides', kwargs={'session_id':session.pk, 'num': session.meeting.number}) r = self.client.get(session_overview_url) self.assertEqual(r.status_code,200) @@ -6335,17 +7077,22 @@ def test_propose_session_slides(self): self.assertTrue(q('.proposeslides')) self.client.logout() - login_testing_unauthorized(self,newperson.user.username,propose_url) - r = self.client.get(propose_url) + login_testing_unauthorized(self,newperson.user.username,upload_url) + r = self.client.get(upload_url) self.assertEqual(r.status_code,200) - test_file = BytesIO(b'this is not really a slide') + test_bytes = b'this is not really a slide' + test_file = BytesIO(test_bytes) test_file.name = 'not_really.txt' empty_outbox() - r = self.client.post(propose_url,dict(file=test_file,title='a test slide file',apply_to_all=True)) + r = self.client.post(upload_url,dict(file=test_file,title='a test slide file',apply_to_all=True,approved=False)) self.assertEqual(r.status_code, 302) session = Session.objects.get(pk=session.pk) self.assertEqual(session.slidesubmission_set.count(),1) self.assertEqual(len(outbox),1) + self.assertEqual( + retrieve_bytes("staging", session.slidesubmission_set.get().filename), + test_bytes + ) r = self.client.get(session_overview_url) self.assertEqual(r.status_code, 200) @@ -6362,6 +7109,32 @@ def test_propose_session_slides(self): self.assertEqual(len(q('.proposedslidelist p')), 2) self.client.logout() + login_testing_unauthorized(self,chair.user.username,upload_url) + r = self.client.get(upload_url) + self.assertEqual(r.status_code,200) + test_bytes = b'this is not really a slide either' + test_file = BytesIO(test_bytes) + test_file.name = 'again_not_really.txt' + empty_outbox() + r = self.client.post(upload_url,dict(file=test_file,title='a selfapproved test slide file',apply_to_all=True,approved=True)) + self.assertEqual(r.status_code, 302) + self.assertEqual(len(outbox),0) + self.assertEqual(session.slidesubmission_set.count(),2) + sp = session.presentations.get(document__title__contains="selfapproved") + self.assertFalse(exists_in_storage("staging", sp.document.uploaded_filename)) + self.assertEqual( + retrieve_bytes("slides", sp.document.uploaded_filename), + test_bytes + ) + self.client.logout() + + self.client.login(username=chair.user.username, password=chair.user.username+"+password") + r = self.client.get(session_overview_url) + self.assertEqual(r.status_code, 200) + q = PyQuery(r.content) + self.assertEqual(len(q('.uploadslidelist p')), 0) + self.client.logout() + def test_disapprove_proposed_slides(self): submission = SlideSubmissionFactory() submission.session.meeting.importantdate_set.create(name_id='revsub',date=date_today() + datetime.timedelta(days=20)) @@ -6375,11 +7148,15 @@ def test_disapprove_proposed_slides(self): self.assertEqual(r.status_code,302) self.assertEqual(SlideSubmission.objects.filter(status__slug = 'rejected').count(), 1) self.assertEqual(SlideSubmission.objects.filter(status__slug = 'pending').count(), 0) + if submission.filename is not None and submission.filename != "": + self.assertFalse(exists_in_storage("staging", submission.filename)) r = self.client.get(url) self.assertEqual(r.status_code, 200) - self.assertRegex(r.content.decode(), r"These\s+slides\s+have\s+already\s+been\s+rejected") + self.assertRegex(r.content.decode(), r"These\s+slides\s+have\s+already\s+been\s+declined") - def test_approve_proposed_slides(self): + @override_settings(MEETECHO_API_CONFIG="fake settings") # enough to trigger API calls + @patch("ietf.meeting.views.SlidesManager") + def test_approve_proposed_slides(self, mock_slides_manager_cls): submission = SlideSubmissionFactory() session = submission.session session.meeting.importantdate_set.create(name_id='revsub',date=date_today() + datetime.timedelta(days=20)) @@ -6391,23 +7168,38 @@ def test_approve_proposed_slides(self): r = self.client.get(url) self.assertEqual(r.status_code,200) empty_outbox() + self.assertTrue(exists_in_storage("staging", submission.filename)) r = self.client.post(url,dict(title='different title',approve='approve')) self.assertEqual(r.status_code,302) self.assertEqual(SlideSubmission.objects.filter(status__slug = 'pending').count(), 0) self.assertEqual(SlideSubmission.objects.filter(status__slug = 'approved').count(), 1) - submission = SlideSubmission.objects.get(id = submission.id) + submission.refresh_from_db() self.assertEqual(submission.status_id, 'approved') self.assertIsNotNone(submission.doc) - self.assertEqual(session.sessionpresentation_set.count(),1) - self.assertEqual(session.sessionpresentation_set.first().document.title,'different title') + self.assertEqual(session.presentations.count(),1) + self.assertEqual(session.presentations.first().document.title,'different title') + self.assertTrue(exists_in_storage("slides", submission.doc.uploaded_filename)) + self.assertFalse(exists_in_storage("staging", submission.filename)) + self.assertEqual(mock_slides_manager_cls.call_count, 1) + self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings")) + self.assertEqual(mock_slides_manager_cls.return_value.add.call_count, 1) + self.assertEqual( + mock_slides_manager_cls.return_value.add.call_args, + call(session=session, slides=submission.doc, order=1), + ) + mock_slides_manager_cls.reset_mock() r = self.client.get(url) self.assertEqual(r.status_code, 200) self.assertRegex(r.content.decode(), r"These\s+slides\s+have\s+already\s+been\s+approved") + self.assertFalse(mock_slides_manager_cls.called) self.assertEqual(len(outbox), 1) self.assertIn(submission.submitter.email_address(), outbox[0]['To']) self.assertIn('Slides approved', outbox[0]['Subject']) - def test_approve_proposed_slides_multisession_apply_one(self): + @override_settings(MEETECHO_API_CONFIG="fake settings") # enough to trigger API calls + @patch("ietf.meeting.views.SlidesManager") + def test_approve_proposed_slides_multisession_apply_one(self, mock_slides_manager_cls): + TestBlobstoreManager().emptyTestBlobstores() submission = SlideSubmissionFactory(session__meeting__type_id='ietf') session1 = submission.session session2 = SessionFactory(group=submission.session.group, meeting=submission.session.meeting) @@ -6420,11 +7212,23 @@ def test_approve_proposed_slides_multisession_apply_one(self): q = PyQuery(r.content) self.assertTrue(q('#id_apply_to_all')) r = self.client.post(url,dict(title='yet another title',approve='approve')) + submission.refresh_from_db() + self.assertIsNotNone(submission.doc) self.assertEqual(r.status_code,302) - self.assertEqual(session1.sessionpresentation_set.count(),1) - self.assertEqual(session2.sessionpresentation_set.count(),0) + self.assertEqual(session1.presentations.count(),1) + self.assertEqual(session2.presentations.count(),0) + self.assertEqual(mock_slides_manager_cls.call_count, 1) + self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings")) + self.assertEqual(mock_slides_manager_cls.return_value.add.call_count, 1) + self.assertEqual( + mock_slides_manager_cls.return_value.add.call_args, + call(session=session1, slides=submission.doc, order=1), + ) - def test_approve_proposed_slides_multisession_apply_all(self): + @override_settings(MEETECHO_API_CONFIG="fake settings") # enough to trigger API calls + @patch("ietf.meeting.views.SlidesManager") + def test_approve_proposed_slides_multisession_apply_all(self, mock_slides_manager_cls): + TestBlobstoreManager().emptyTestBlobstores() submission = SlideSubmissionFactory(session__meeting__type_id='ietf') session1 = submission.session session2 = SessionFactory(group=submission.session.group, meeting=submission.session.meeting) @@ -6435,70 +7239,174 @@ def test_approve_proposed_slides_multisession_apply_all(self): r = self.client.get(url) self.assertEqual(r.status_code,200) r = self.client.post(url,dict(title='yet another title',apply_to_all=1,approve='approve')) + submission.refresh_from_db() self.assertEqual(r.status_code,302) - self.assertEqual(session1.sessionpresentation_set.count(),1) - self.assertEqual(session2.sessionpresentation_set.count(),1) + self.assertEqual(session1.presentations.count(),1) + self.assertEqual(session2.presentations.count(),1) + self.assertEqual(mock_slides_manager_cls.call_count, 1) + self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings")) + self.assertEqual(mock_slides_manager_cls.return_value.add.call_count, 2) + self.assertCountEqual( + mock_slides_manager_cls.return_value.add.call_args_list, + [ + call(session=session1, slides=submission.doc, order=1), + call(session=session2, slides=submission.doc, order=1), + ] + ) - def test_submit_and_approve_multiple_versions(self): + @override_settings(MEETECHO_API_CONFIG="fake settings") # enough to trigger API calls + @patch("ietf.meeting.views.SlidesManager") + def test_submit_and_approve_multiple_versions(self, mock_slides_manager_cls): session = SessionFactory(meeting__type_id='ietf') chair = RoleFactory(group=session.group,name_id='chair').person session.meeting.importantdate_set.create(name_id='revsub',date=date_today()+datetime.timedelta(days=20)) newperson = PersonFactory() - propose_url = urlreverse('ietf.meeting.views.propose_session_slides', kwargs={'session_id':session.pk, 'num': session.meeting.number}) + upload_url = urlreverse('ietf.meeting.views.upload_session_slides', kwargs={'session_id':session.pk, 'num': session.meeting.number}) - login_testing_unauthorized(self,newperson.user.username,propose_url) + login_testing_unauthorized(self,newperson.user.username,upload_url) test_file = BytesIO(b'this is not really a slide') test_file.name = 'not_really.txt' - r = self.client.post(propose_url,dict(file=test_file,title='a test slide file',apply_to_all=True)) + r = self.client.post(upload_url,dict(file=test_file,title='a test slide file',apply_to_all=True,approved=False)) self.assertEqual(r.status_code, 302) self.client.logout() - submission = SlideSubmission.objects.get(session = session) + submission = SlideSubmission.objects.get(session=session) + self.assertTrue(exists_in_storage("staging", submission.filename)) approve_url = urlreverse('ietf.meeting.views.approve_proposed_slides', kwargs={'slidesubmission_id':submission.pk,'num':submission.session.meeting.number}) login_testing_unauthorized(self, chair.user.username, approve_url) r = self.client.post(approve_url,dict(title=submission.title,approve='approve')) + submission.refresh_from_db() self.assertEqual(r.status_code,302) self.client.logout() + self.assertFalse(exists_in_storage("staging", submission.filename)) + self.assertTrue(exists_in_storage("slides", submission.doc.uploaded_filename)) + self.assertEqual(mock_slides_manager_cls.call_count, 1) + self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings")) + self.assertEqual(mock_slides_manager_cls.return_value.add.call_count, 1) + self.assertEqual( + mock_slides_manager_cls.return_value.add.call_args, + call(session=session, slides=submission.doc, order=1), + ) + mock_slides_manager_cls.reset_mock() + + self.assertEqual(session.presentations.first().document.rev,'00') - self.assertEqual(session.sessionpresentation_set.first().document.rev,'00') - - login_testing_unauthorized(self,newperson.user.username,propose_url) + login_testing_unauthorized(self,newperson.user.username,upload_url) test_file = BytesIO(b'this is not really a slide, but it is another version of it') test_file.name = 'not_really.txt' - r = self.client.post(propose_url,dict(file=test_file,title='a test slide file',apply_to_all=True)) + r = self.client.post(upload_url,dict(file=test_file,title='a test slide file',apply_to_all=True)) self.assertEqual(r.status_code, 302) test_file = BytesIO(b'this is not really a slide, but it is third version of it') test_file.name = 'not_really.txt' - r = self.client.post(propose_url,dict(file=test_file,title='a test slide file',apply_to_all=True)) + r = self.client.post(upload_url,dict(file=test_file,title='a test slide file',apply_to_all=True)) self.assertEqual(r.status_code, 302) self.client.logout() (first_submission, second_submission) = SlideSubmission.objects.filter(session=session, status__slug = 'pending').order_by('id') + self.assertTrue(exists_in_storage("staging", first_submission.filename)) + self.assertTrue(exists_in_storage("staging", second_submission.filename)) approve_url = urlreverse('ietf.meeting.views.approve_proposed_slides', kwargs={'slidesubmission_id':second_submission.pk,'num':second_submission.session.meeting.number}) login_testing_unauthorized(self, chair.user.username, approve_url) r = self.client.post(approve_url,dict(title=submission.title,approve='approve')) + first_submission.refresh_from_db() + second_submission.refresh_from_db() + self.assertTrue(exists_in_storage("staging", first_submission.filename)) + self.assertFalse(exists_in_storage("staging", second_submission.filename)) + self.assertTrue(exists_in_storage("slides", second_submission.doc.uploaded_filename)) self.assertEqual(r.status_code,302) + self.assertEqual(mock_slides_manager_cls.call_count, 1) + self.assertEqual(mock_slides_manager_cls.call_args, call(api_config="fake settings")) + self.assertEqual(mock_slides_manager_cls.return_value.add.call_count, 0) + self.assertEqual(mock_slides_manager_cls.return_value.revise.call_count, 1) + self.assertEqual( + mock_slides_manager_cls.return_value.revise.call_args, + call(session=session, slides=second_submission.doc), + ) + mock_slides_manager_cls.reset_mock() disapprove_url = urlreverse('ietf.meeting.views.approve_proposed_slides', kwargs={'slidesubmission_id':first_submission.pk,'num':first_submission.session.meeting.number}) r = self.client.post(disapprove_url,dict(title='some title',disapprove="disapprove")) self.assertEqual(r.status_code,302) self.client.logout() + self.assertFalse(mock_slides_manager_cls.called) + self.assertFalse(exists_in_storage("staging", first_submission.filename)) self.assertEqual(SlideSubmission.objects.filter(status__slug = 'pending').count(),0) self.assertEqual(SlideSubmission.objects.filter(status__slug = 'rejected').count(),1) - self.assertEqual(session.sessionpresentation_set.first().document.rev,'01') + self.assertEqual(session.presentations.first().document.rev,'01') path = os.path.join(submission.session.meeting.get_materials_path(),'slides') - filename = os.path.join(path,session.sessionpresentation_set.first().document.name+'-01.txt') + filename = os.path.join(path,session.presentations.first().document.name+'-01.txt') self.assertTrue(os.path.exists(filename)) fd = io.open(filename, 'r') contents = fd.read() fd.close() self.assertIn('third version', contents) + @override_settings( + MEETECHO_API_CONFIG="fake settings" + ) # enough to trigger API calls + @patch("ietf.meeting.views.SlidesManager") + def test_notify_meetecho_of_all_slides(self, mock_slides_manager_cls): + for meeting_type in ["ietf", "interim"]: + # Reset for the sake of the second iteration + self.client.logout() + mock_slides_manager_cls.reset_mock() + + session = SessionFactory(meeting__type_id=meeting_type) + meeting = session.meeting + + # bad meeting + url = urlreverse( + "ietf.meeting.views.notify_meetecho_of_all_slides", + kwargs={"num": 9999, "acronym": session.group.acronym}, + ) + login_testing_unauthorized(self, "secretary", url) + r = self.client.get(url) + self.assertEqual(r.status_code, 404) + r = self.client.post(url) + self.assertEqual(r.status_code, 404) + self.assertFalse(mock_slides_manager_cls.called) + self.client.logout() + + # good meeting + url = urlreverse( + "ietf.meeting.views.notify_meetecho_of_all_slides", + kwargs={"num": meeting.number, "acronym": session.group.acronym}, + ) + login_testing_unauthorized(self, "secretary", url) + r = self.client.get(url) + self.assertEqual(r.status_code, 405) + self.assertFalse(mock_slides_manager_cls.called) + mock_slides_manager = mock_slides_manager_cls.return_value + mock_slides_manager.send_update.return_value = True + r = self.client.post(url) + self.assertEqual(r.status_code, 302) + self.assertEqual(mock_slides_manager.send_update.call_count, 1) + self.assertEqual(mock_slides_manager.send_update.call_args, call(session)) + r = self.client.get(r["Location"]) + messages = list(r.context["messages"]) + self.assertEqual(len(messages), 1) + self.assertEqual( + str(messages[0]), f"Notified Meetecho about slides for {session}" + ) + + mock_slides_manager.send_update.reset_mock() + mock_slides_manager.send_update.return_value = False + r = self.client.post(url) + self.assertEqual(r.status_code, 302) + self.assertEqual(mock_slides_manager.send_update.call_count, 1) + self.assertEqual(mock_slides_manager.send_update.call_args, call(session)) + r = self.client.get(r["Location"]) + messages = list(r.context["messages"]) + self.assertEqual(len(messages), 1) + self.assertIn( + "No sessions were eligible for Meetecho slides update.", str(messages[0]) + ) + @override_settings(IETF_NOTES_URL='https://notes.ietf.org/') class ImportNotesTests(TestCase): @@ -6577,6 +7485,10 @@ def test_imports_previewed_text(self): minutes_path = Path(self.meeting.get_materials_path()) / 'minutes' with (minutes_path / self.session.minutes().uploaded_filename).open() as f: self.assertEqual(f.read(), 'original markdown text') + self.assertEqual( + retrieve_str("minutes", self.session.minutes().uploaded_filename), + 'original markdown text' + ) def test_refuses_identical_import(self): """Should not be able to import text identical to the current revision""" @@ -6609,7 +7521,7 @@ def test_allows_import_on_existing_bad_unicode(self): self.client.login(username='secretary', password='secretary+password') r = self.client.post(url, {'markdown_text': 'replaced below'}) # create a rev with open( - self.session.sessionpresentation_set.filter(document__type="minutes").first().document.get_file_name(), + self.session.presentations.filter(document__type="minutes").first().document.get_file_name(), 'wb' ) as f: # Replace existing content with an invalid Unicode byte string. The particular invalid @@ -6634,9 +7546,11 @@ def test_handles_missing_previous_revision_file(self): self.client.login(username='secretary', password='secretary+password') r = self.client.post(url, {'markdown_text': 'original markdown text'}) # create a rev # remove the file uploaded for the first rev - minutes_docs = self.session.sessionpresentation_set.filter(document__type='minutes') + minutes_docs = self.session.presentations.filter(document__type='minutes') self.assertEqual(minutes_docs.count(), 1) - Path(minutes_docs.first().document.get_file_name()).unlink() + to_remove = Path(minutes_docs.first().document.get_file_name()) + to_remove.unlink() + remove_from_storage("minutes", to_remove.name) self.assertEqual(r.status_code, 302) with requests_mock.Mocker() as mock: @@ -6771,9 +7685,23 @@ def test_meeting_requests(self): status_id='schedw', add_to_schedule=False, ) + session_with_none_purpose = SessionFactory( + meeting=meeting, + group__parent=area, + purpose_id="none", + status_id="schedw", + add_to_schedule=False, + ) + tutorial_session = SessionFactory( + meeting=meeting, + group__parent=area, + purpose_id="tutorial", + status_id="schedw", + add_to_schedule=False, + ) def _sreq_edit_link(sess): return urlreverse( - 'ietf.secr.sreq.views.edit', + 'ietf.meeting.views_session_request.edit_request', kwargs={ 'num': meeting.number, 'acronym': sess.group.acronym, @@ -6809,6 +7737,8 @@ def _sreq_edit_link(sess): self.assertContains(r, _sreq_edit_link(proposed_wg_session)) # link to the session request self.assertContains(r, rg_session.group.acronym) self.assertContains(r, _sreq_edit_link(rg_session)) # link to the session request + self.assertContains(r, session_with_none_purpose.group.acronym) + self.assertContains(r, tutorial_session.group.acronym) # check headings - note that the special types (has_meetings, etc) do not have a group parent # so they show up in 'other' q = PyQuery(r.content) @@ -6816,6 +7746,22 @@ def _sreq_edit_link(sess): self.assertEqual(len(q('h2#other-groups')), 1) self.assertEqual(len(q('h2#irtf')), 1) # rg group has irtf group as parent + # check rounded pills + self.assertNotContains( # no rounded pill for sessions with regular purpose + r, + 'Regular', + html=True, + ) + self.assertNotContains( # no rounded pill for session with no purpose specified + r, + 'None', + html=True, + ) + self.assertContains( # rounded pill for session with non-regular purpose + r, + 'Tutorial', + html=True, + ) def test_request_minutes(self): meeting = MeetingFactory(type_id='ietf') @@ -6838,6 +7784,156 @@ def test_request_minutes(self): self.assertEqual(r.status_code,302) self.assertEqual(len(outbox),1) + @override_settings(YOUTUBE_DOMAINS=["youtube.com"]) + def test_add_session_recordings(self): + session = SessionFactory(meeting__type_id="ietf") + url = urlreverse( + "ietf.meeting.views.add_session_recordings", + kwargs={"session_id": session.pk, "num": session.meeting.number}, + ) + # does not fully validate authorization for non-secretariat users :-( + login_testing_unauthorized(self, "secretary", url) + r = self.client.get(url) + pq = PyQuery(r.content) + title_input = pq("input#id_title") + self.assertIsNotNone(title_input) + self.assertEqual( + title_input.attr.value, + "Video recording of {acro} for {timestamp}".format( + acro=session.group.acronym, + timestamp=session.official_timeslotassignment().timeslot.utc_start_time().strftime( + "%Y-%m-%d %H:%M" + ), + ), + ) + + with patch("ietf.meeting.views.create_recording") as mock_create: + r = self.client.post( + url, + data={ + "title": "This is my video title", + "url": "", + } + ) + self.assertFalse(mock_create.called) + + with patch("ietf.meeting.views.create_recording") as mock_create: + r = self.client.post( + url, + data={ + "title": "This is my video title", + "url": "https://yubtub.com/this-is-not-a-youtube-video", + } + ) + self.assertFalse(mock_create.called) + + with patch("ietf.meeting.views.create_recording") as mock_create: + r = self.client.post( + url, + data={ + "title": "This is my video title", + "url": "https://youtube.com/finally-a-video", + } + ) + self.assertTrue(mock_create.called) + self.assertEqual( + mock_create.call_args, + call( + session, + "https://youtube.com/finally-a-video", + title="This is my video title", + user=Person.objects.get(user__username="secretary"), + ), + ) + + # CAN delete session presentation for this session + sp = SessionPresentationFactory( + session=session, + document__type_id="recording", + document__external_url="https://example.com/some-video", + ) + with patch("ietf.meeting.views.delete_recording") as mock_delete: + r = self.client.post( + url, + data={ + "delete": str(sp.pk), + } + ) + self.assertEqual(r.status_code, 200) + self.assertTrue(mock_delete.called) + self.assertEqual(mock_delete.call_args, call(sp)) + + # ValueError message from delete_recording does not reach the user + sp = SessionPresentationFactory( + session=session, + document__type_id="recording", + document__external_url="https://example.com/some-video", + ) + with patch("ietf.meeting.views.delete_recording", side_effect=ValueError("oh joy!")) as mock_delete: + r = self.client.post( + url, + data={ + "delete": str(sp.pk), + } + ) + self.assertTrue(mock_delete.called) + self.assertNotContains(r, "oh joy!", status_code=200) + + # CANNOT delete session presentation for a different session + sp_for_other_session = SessionPresentationFactory( + document__type_id="recording", + document__external_url="https://example.com/some-other-video", + ) + with patch("ietf.meeting.views.delete_recording") as mock_delete: + r = self.client.post( + url, + data={ + "delete": str(sp_for_other_session.pk), + } + ) + self.assertEqual(r.status_code, 404) + self.assertFalse(mock_delete.called) + + def test_show_chatlog_links(self): + meeting = MeetingFactory(type_id='ietf', number='122') + session = SessionFactory(meeting=meeting) + doc_name = 'chatlog-72-mars-197001010000' + SessionPresentation.objects.create(session=session,document=DocumentFactory(type_id='chatlog', name=doc_name)) + + session_url = urlreverse('ietf.meeting.views.session_details', + kwargs={'num':meeting.number, 'acronym':session.group.acronym}) + + r = self.client.get(session_url) + + self.assertEqual(r.status_code, 200) + + q = PyQuery(r.content) + + # Find the chatlog link in the desktop view + link_chatlog_box = q(f'a[title="Chat logs for {session.group.acronym}"]') + self.assertTrue(link_chatlog_box, 'Expected element with title "Chat logs for {group.acronym}" not found.') + self.assertEqual(link_chatlog_box.attr('href'), '/doc/'+ doc_name) + + # Find the chatlog link in the mobile view + link_chatlog_list = q('li:contains("Chat logs")') + self.assertTrue(link_chatlog_list, 'Expected
  • element containing "Chat logs" not found.') + self.assertEqual(link_chatlog_list.find('a').attr('href'), '/doc/'+ doc_name) + + def test_hide_chatlog_links(self): + # mock meeting and session, but no chatlog document + meeting = MeetingFactory(type_id='ietf', number='122') + session = SessionFactory(meeting=meeting) + + session_url = urlreverse('ietf.meeting.views.session_details', + kwargs={'num':meeting.number, 'acronym':session.group.acronym}) + + r = self.client.get(session_url) + + self.assertEqual(r.status_code, 200) + # validate no links for chat logs exist + self.assertNotContains(r, 'Chat logs') + + class HasMeetingsTests(TestCase): settings_temp_path_overrides = TestCase.settings_temp_path_overrides + ['AGENDA_PATH'] @@ -6932,10 +8028,7 @@ def test_cannot_request_interim(self): for gf in GroupFeatures.objects.filter(has_meetings=True): for role_name in all_role_names - set(gf.groupman_roles): role = RoleFactory(group__type_id=gf.type_id,name_id=role_name) - self.client.login(username=role.person.user.username, password=role.person.user.username+'+password') - r = self.client.get(url) - self.assertEqual(r.status_code, 403) - self.client.logout() + self.assertFalse(can_request_interim_meeting(role.person.user)) def test_appears_on_upcoming(self): url = urlreverse('ietf.meeting.views.upcoming') @@ -7619,8 +8712,7 @@ def _proceedings_file(): path = Path(settings.BASE_DIR) / 'meeting/test_procmat.pdf' return path.open('rb') - def _assertMeetingHostsDisplayed(self, response, meeting): - pq = PyQuery(response.content) + def _assertMeetingHostsDisplayed(self, pq: PyQuery, meeting): host_divs = pq('div.host-logo') self.assertEqual(len(host_divs), meeting.meetinghosts.count(), 'Should have a logo for every meeting host') self.assertEqual( @@ -7636,12 +8728,11 @@ def _assertMeetingHostsDisplayed(self, response, meeting): 'Correct image and name for each host should appear in the correct order' ) - def _assertProceedingsMaterialsDisplayed(self, response, meeting): + def _assertProceedingsMaterialsDisplayed(self, pq: PyQuery, meeting): """Checks that all (and only) active materials are linked with correct href and title""" expected_materials = [ m for m in meeting.proceedings_materials.order_by('type__order') if m.active() ] - pq = PyQuery(response.content) links = pq('div.proceedings-material a') self.assertEqual(len(links), len(expected_materials), 'Should have an entry for each active ProceedingsMaterial') self.assertEqual( @@ -7650,9 +8741,8 @@ def _assertProceedingsMaterialsDisplayed(self, response, meeting): 'Correct title and link for each ProceedingsMaterial should appear in the correct order' ) - def _assertGroupSessions(self, response, meeting): + def _assertGroupSessions(self, pq: PyQuery): """Checks that group/sessions are present""" - pq = PyQuery(response.content) sections = ["plenaries", "gen", "iab", "editorial", "irtf", "training"] for section in sections: self.assertEqual(len(pq(f"#{section}")), 1, f"{section} section should exists in proceedings") @@ -7660,10 +8750,9 @@ def _assertGroupSessions(self, response, meeting): def test_proceedings(self): """Proceedings should be displayed correctly - Currently only tests that the view responds with a 200 response code and checks the ProceedingsMaterials - at the top of the proceedings. Ought to actually test the display of the individual group/session - materials as well. + Proceedings contents are tested in detail when testing generate_proceedings_content. """ + # number must be >97 (settings.PROCEEDINGS_VERSION_CHANGES) meeting = make_meeting_test_data(meeting=MeetingFactory(type_id='ietf', number='100')) session = Session.objects.filter(meeting=meeting, group__acronym="mars").first() GroupEventFactory(group=session.group,type='status_update') @@ -7688,16 +8777,73 @@ def test_proceedings(self): self._create_proceedings_materials(meeting) url = urlreverse("ietf.meeting.views.proceedings", kwargs=dict(num=meeting.number)) - r = self.client.get(url) + cached_content = mark_safe("

    Fake proceedings content

    ") + with patch("ietf.meeting.views.generate_proceedings_content") as mock_gpc: + mock_gpc.return_value = cached_content + r = self.client.get(url) self.assertEqual(r.status_code, 200) + self.assertIn(cached_content, r.content.decode()) + self.assertTemplateUsed(r, "meeting/proceedings_wrapper.html") + self.assertTemplateNotUsed(r, "meeting/proceedings.html") + # These are rendered in proceedings_wrapper.html, so test them here if len(meeting.city) > 0: self.assertContains(r, meeting.city) if len(meeting.venue_name) > 0: self.assertContains(r, meeting.venue_name) + self._assertMeetingHostsDisplayed(PyQuery(r.content), meeting) + + @patch("ietf.meeting.utils.caches") + def test_generate_proceedings_content(self, mock_caches): + # number must be >97 (settings.PROCEEDINGS_VERSION_CHANGES) + meeting = make_meeting_test_data(meeting=MeetingFactory(type_id='ietf', number='100')) + + # First, check that by default a value in the cache is used without doing any other computation + mock_default_cache = mock_caches["default"] + mock_default_cache.get.return_value = "a cached value" + result = generate_proceedings_content(meeting) + self.assertEqual(result, "a cached value") + self.assertFalse(mock_default_cache.set.called) + self.assertTrue(mock_default_cache.get.called) + cache_key = mock_default_cache.get.call_args.args[0] + mock_default_cache.get.reset_mock() + + # Now set up for actual computation of the proceedings content. + session = Session.objects.filter(meeting=meeting, group__acronym="mars").first() + GroupEventFactory(group=session.group,type='status_update') + SessionPresentationFactory(document__type_id='recording',session=session) + SessionPresentationFactory(document__type_id='recording',session=session,document__title="Audio recording for tests") + + # Add various group sessions + groups = [] + parent_groups = [ + GroupFactory.create(type_id="area", acronym="gen"), + GroupFactory.create(acronym="iab"), + GroupFactory.create(acronym="irtf"), + ] + for parent in parent_groups: + groups.append(GroupFactory.create(parent=parent)) + for acronym in ["rsab", "edu"]: + groups.append(GroupFactory.create(acronym=acronym)) + for group in groups: + SessionFactory(meeting=meeting, group=group) + + self.write_materials_files(meeting, session) + self._create_proceedings_materials(meeting) + + # Now "empty" the mock cache and see that we compute the expected proceedings content. + mock_default_cache.get.return_value = None + proceedings_content = generate_proceedings_content(meeting) + self.assertTrue(mock_default_cache.get.called) + self.assertEqual(mock_default_cache.get.call_args.args[0], cache_key, "same cache key each time") + self.assertTrue(mock_default_cache.set.called) + self.assertEqual(mock_default_cache.set.call_args.args, (cache_key, proceedings_content)) + self.assertGreater(mock_default_cache.set.call_args.kwargs["timeout"], 86400) + mock_default_cache.get.reset_mock() + mock_default_cache.set.reset_mock() # standard items on every proceedings - pq = PyQuery(r.content) + pq = PyQuery(proceedings_content) self.assertNotEqual( pq('a[href="{}"]'.format( urlreverse('ietf.meeting.views.proceedings_overview', kwargs=dict(num=meeting.number))) @@ -7728,13 +8874,22 @@ def test_proceedings(self): ) # configurable contents - self._assertMeetingHostsDisplayed(r, meeting) - self._assertProceedingsMaterialsDisplayed(r, meeting) - self._assertGroupSessions(r, meeting) + self._assertProceedingsMaterialsDisplayed(pq, meeting) + self._assertGroupSessions(pq) + + # Finally, repeat the first cache test, but now with force_refresh=True. The cached value + # should be ignored and we should recompute the proceedings as before. + mock_default_cache.get.return_value = "a cached value" + result = generate_proceedings_content(meeting, force_refresh=True) + self.assertEqual(result, proceedings_content) # should have recomputed the same thing + self.assertFalse(mock_default_cache.get.called, "don't bother reading cache when force_refresh is True") + self.assertTrue(mock_default_cache.set.called) + self.assertEqual(mock_default_cache.set.call_args.args, (cache_key, proceedings_content)) + self.assertGreater(mock_default_cache.set.call_args.kwargs["timeout"], 86400) def test_named_session(self): """Session with a name should appear separately in the proceedings""" - meeting = MeetingFactory(type_id='ietf', number='100') + meeting = MeetingFactory(type_id='ietf', number='100', proceedings_final=True) group = GroupFactory() plain_session = SessionFactory(meeting=meeting, group=group) named_session = SessionFactory(meeting=meeting, group=group, name='I Got a Name') @@ -7769,22 +8924,22 @@ def test_named_session(self): named_row = named_label.closest('tr') self.assertTrue(named_row) - for material in (sp.document for sp in plain_session.sessionpresentation_set.all()): + for material in (sp.document for sp in plain_session.presentations.all()): if material.type_id == 'draft': expected_url = urlreverse( 'ietf.doc.views_doc.document_main', - kwargs={'name': material.canonical_name()}, + kwargs={'name': material.name}, ) else: expected_url = material.get_href(meeting) self.assertTrue(plain_row.find(f'a[href="{expected_url}"]')) self.assertFalse(named_row.find(f'a[href="{expected_url}"]')) - for material in (sp.document for sp in named_session.sessionpresentation_set.all()): + for material in (sp.document for sp in named_session.presentations.all()): if material.type_id == 'draft': expected_url = urlreverse( 'ietf.doc.views_doc.document_main', - kwargs={'name': material.canonical_name()}, + kwargs={'name': material.name}, ) else: expected_url = material.get_href(meeting) @@ -7853,36 +9008,52 @@ def test_proceedings_attendees(self): - assert onsite checkedin=True appears, not onsite checkedin=False - assert remote attended appears, not remote not attended - prefer onsite checkedin=True to remote attended when same person has both + - summary stats row shows correct counts + - chart data JSON is embedded with correct values """ - make_meeting_test_data() - meeting = MeetingFactory(type_id='ietf', date=datetime.date(2016, 7, 14), number="97") + m = MeetingFactory(type_id='ietf', date=datetime.date(2023, 11, 4), number="118") person_a = PersonFactory(name='Person A') person_b = PersonFactory(name='Person B') person_c = PersonFactory(name='Person C') person_d = PersonFactory(name='Person D') - MeetingRegistrationFactory(meeting=meeting, person=person_a, reg_type='onsite', checkedin=True) - MeetingRegistrationFactory(meeting=meeting, person=person_b, reg_type='onsite', checkedin=False) - MeetingRegistrationFactory(meeting=meeting, person=person_a, reg_type='remote') - AttendedFactory(session__meeting=meeting, session__type_id='plenary', person=person_a) - MeetingRegistrationFactory(meeting=meeting, person=person_c, reg_type='remote') - AttendedFactory(session__meeting=meeting, session__type_id='plenary', person=person_c) - MeetingRegistrationFactory(meeting=meeting, person=person_d, reg_type='remote') - url = urlreverse('ietf.meeting.views.proceedings_attendees',kwargs={'num': 97}) + areg = RegistrationFactory(meeting=m, person=person_a, checkedin=True, with_ticket={'attendance_type_id': 'onsite'}) + RegistrationFactory(meeting=m, person=person_b, checkedin=False, with_ticket={'attendance_type_id': 'onsite'}) + creg = RegistrationFactory(meeting=m, person=person_c, with_ticket={'attendance_type_id': 'remote'}) + RegistrationFactory(meeting=m, person=person_d, with_ticket={'attendance_type_id': 'remote'}) + AttendedFactory(session__meeting=m, session__type_id='plenary', person=person_a) + AttendedFactory(session__meeting=m, session__type_id='plenary', person=person_c) + url = urlreverse('ietf.meeting.views.proceedings_attendees',kwargs={'num': 118}) response = self.client.get(url) self.assertContains(response, 'Attendee list') q = PyQuery(response.content) self.assertEqual(2, len(q("#id_attendees tbody tr"))) text = q('#id_attendees tbody tr').text().replace('\n', ' ') - self.assertEqual(text, "A Person onsite C Person remote") + self.assertEqual(text, f"A Person {areg.affiliation} {areg.country_code} onsite C Person {creg.affiliation} {creg.country_code} remote") + + # Summary stats row: Onsite / Remote / Total (matches registration.ietf.org) + self.assertContains(response, 'Onsite:') + self.assertContains(response, 'Remote:') + self.assertContains(response, 'Total:') + self.assertContains(response, '1') # onsite and remote + self.assertContains(response, '2') # total + + # Chart data embedded in page + chart_json = json.loads(q('#attendees-chart-data').text()) + self.assertEqual(chart_json['type'], [['Onsite', 1], ['Remote', 1]]) def test_proceedings_overview(self): '''Test proceedings IETF Overview page. Note: old meetings aren't supported so need to add a new meeting then test. ''' - make_meeting_test_data() - meeting = MeetingFactory(type_id='ietf', date=datetime.date(2016,7,14), number="97") - finalize(meeting) + meeting = make_meeting_test_data(meeting=MeetingFactory(type_id='ietf', date=datetime.date(2016,7,14), number="97")) + + # finalize meeting + url = urlreverse('ietf.meeting.views.finalize_proceedings',kwargs={'num':meeting.number}) + login_testing_unauthorized(self,"secretary",url) + r = self.client.post(url,{'finalize':1}) + self.assertEqual(r.status_code, 302) + url = urlreverse('ietf.meeting.views.proceedings_overview',kwargs={'num':97}) response = self.client.get(url) self.assertContains(response, 'The Internet Engineering Task Force') @@ -8270,18 +9441,139 @@ def test_get_next_sequence(self): self.assertEqual(sequence,1) def test_participants_for_meeting(self): - person_a = PersonFactory() - person_b = PersonFactory() - person_c = PersonFactory() - person_d = PersonFactory() m = MeetingFactory.create(type_id='ietf') - MeetingRegistrationFactory(meeting=m, person=person_a, reg_type='onsite', checkedin=True) - MeetingRegistrationFactory(meeting=m, person=person_b, reg_type='onsite', checkedin=False) - MeetingRegistrationFactory(meeting=m, person=person_c, reg_type='remote') - MeetingRegistrationFactory(meeting=m, person=person_d, reg_type='remote') - AttendedFactory(session__meeting=m, session__type_id='plenary', person=person_c) + areg = RegistrationFactory(meeting=m, checkedin=True, with_ticket={'attendance_type_id': 'onsite'}) + breg = RegistrationFactory(meeting=m, checkedin=False, with_ticket={'attendance_type_id': 'onsite'}) + creg = RegistrationFactory(meeting=m, with_ticket={'attendance_type_id': 'remote'}) + dreg = RegistrationFactory(meeting=m, with_ticket={'attendance_type_id': 'remote'}) + AttendedFactory(session__meeting=m, session__type_id='plenary', person=creg.person) checked_in, attended = participants_for_meeting(m) - self.assertTrue(person_a.pk in checked_in) - self.assertTrue(person_b.pk not in checked_in) - self.assertTrue(person_c.pk in attended) - self.assertTrue(person_d.pk not in attended) \ No newline at end of file + self.assertIn(areg.person.pk, checked_in) + self.assertNotIn(breg.person.pk, checked_in) + self.assertNotIn(areg.person.pk, attended) + self.assertNotIn(breg.person.pk, attended) + self.assertIn(creg.person.pk, attended) + self.assertNotIn(dreg.person.pk, attended) + + def test_session_attendance(self): + meeting = MeetingFactory(type_id='ietf', date=datetime.date(2023, 11, 4), number='118') + make_meeting_test_data(meeting=meeting) + session = Session.objects.filter(meeting=meeting, group__acronym='mars').first() + regs = RegistrationFactory.create_batch(3, meeting=meeting) + persons = [reg.person for reg in regs] + self.assertEqual(session.attended_set.count(), 0) + + # If there are no attendees, the link isn't offered, and getting + # the page directly returns an empty list. + session_url = urlreverse('ietf.meeting.views.session_details', kwargs={'num':meeting.number, 'acronym':session.group.acronym}) + attendance_url = urlreverse('ietf.meeting.views.session_attendance', kwargs={'num':meeting.number, 'session_id':session.id}) + r = self.client.get(session_url) + self.assertNotContains(r, attendance_url) + r = self.client.get(attendance_url) + self.assertEqual(r.status_code, 200) + self.assertContains(r, '0 attendees') + + # Add some attendees + add_attendees_url = urlreverse('ietf.meeting.views.api_add_session_attendees') + recmanrole = RoleFactory(group__type_id='ietf', name_id='recman', person__user__last_login=timezone.now()) + recman = recmanrole.person + apikey = PersonalApiKeyFactory(endpoint=add_attendees_url, person=recman) + attendees = [person.user.pk for person in persons] + self.client.login(username='recman', password='recman+password') + r = self.client.post(add_attendees_url, {'apikey':apikey.hash(), 'attended':f'{{"session_id":{session.pk},"attendees":{attendees}}}'}) + self.assertEqual(r.status_code, 200) + self.assertEqual(session.attended_set.count(), 3) + + # Before a meeting is finalized, session_attendance renders a live + # view of the Attended records for the session. + r = self.client.get(session_url) + self.assertContains(r, attendance_url) + r = self.client.get(attendance_url) + self.assertEqual(r.status_code, 200) + self.assertContains(r, '3 attendees') + for person in persons: + self.assertContains(r, escape(person.plain_name())) + + # Test for the "I was there" button. + def _test_button(person, expected): + username = person.user.username + self.client.login(username=username, password=f'{username}+password') + r = self.client.get(attendance_url) + self.assertEqual(b"I was there" in r.content, expected) + # recman isn't registered for the meeting + _test_button(recman, False) + # person0 is already on the bluesheet + _test_button(persons[0], False) + # person3 attests he was there + persons.append(RegistrationFactory(meeting=meeting).person) + # button isn't shown if we're outside the corrections windows + meeting.importantdate_set.create(name_id='revsub',date=date_today() - datetime.timedelta(days=20)) + _test_button(persons[3], False) + # attempt to POST anyway is ignored + r = self.client.post(attendance_url) + self.assertEqual(r.status_code, 200) + self.assertNotContains(r, escape(persons[3].plain_name())) + self.assertEqual(session.attended_set.count(), 3) + # button is shown, and POST is accepted + meeting.importantdate_set.update(name_id='revsub',date=date_today() + datetime.timedelta(days=20)) + _test_button(persons[3], True) + r = self.client.post(attendance_url) + self.assertEqual(r.status_code, 200) + self.assertContains(r, escape(persons[3].plain_name())) + self.assertEqual(session.attended_set.count(), 4) + + # When the meeting is finalized, a bluesheet file is generated, + # and session_attendance redirects to the file. + self.client.login(username='secretary',password='secretary+password') + finalize_url = urlreverse('ietf.meeting.views.finalize_proceedings', kwargs={'num':meeting.number}) + r = self.client.post(finalize_url, {'finalize':1}) + self.assertRedirects(r, urlreverse('ietf.meeting.views.proceedings', kwargs={'num':meeting.number})) + doc = session.presentations.filter(document__type_id='bluesheets').first().document + self.assertEqual(doc.rev,'00') + text = doc.text() + self.assertIn('4 attendees', text) + for person in persons: + self.assertIn(person.plain_name(), text) + r = self.client.get(session_url) + self.assertContains(r, doc.get_href()) + self.assertNotContains(r, attendance_url) + r = self.client.get(attendance_url) + self.assertEqual(r.status_code,302) + self.assertEqual(r['Location'],doc.get_href()) + + # An interim meeting is considered finalized immediately. + meeting = make_interim_meeting(group=GroupFactory(acronym='mars'), date=date_today()) + session = Session.objects.filter(meeting=meeting, group__acronym='mars').first() + attendance_url = urlreverse('ietf.meeting.views.session_attendance', kwargs={'num':meeting.number, 'session_id':session.id}) + self.assertEqual(session.attended_set.count(), 0) + self.client.login(username='recman', password='recman+password') + attendees = [person.user.pk for person in persons] + r = self.client.post(add_attendees_url, {'apikey':apikey.hash(), 'attended':f'{{"session_id":{session.pk},"attendees":{attendees}}}'}) + self.assertEqual(r.status_code, 200) + self.assertEqual(session.attended_set.count(), 4) + doc = session.presentations.filter(document__type_id='bluesheets').first().document + self.assertEqual(doc.rev,'00') + session_url = urlreverse('ietf.meeting.views.session_details', kwargs={'num':meeting.number, 'acronym':session.group.acronym}) + r = self.client.get(session_url) + self.assertContains(r, doc.get_href()) + self.assertNotContains(r, attendance_url) + r = self.client.get(attendance_url) + self.assertEqual(r.status_code,302) + self.assertEqual(r['Location'],doc.get_href()) + + def test_bluesheet_data(self): + session = SessionFactory(meeting__type_id="ietf") + attended_with_affil = RegistrationFactory(meeting=session.meeting, affiliation="Somewhere") + AttendedFactory(session=session, person=attended_with_affil.person, time="2023-03-13T01:24:00Z") # joined 2nd + attended_no_affil = RegistrationFactory(meeting=session.meeting, affiliation="") + AttendedFactory(session=session, person=attended_no_affil.person, time="2023-03-13T01:23:00Z") # joined 1st + RegistrationFactory(meeting=session.meeting) # did not attend + + data = bluesheet_data(session) + self.assertEqual( + data, + [ + {"name": attended_no_affil.person.plain_name(), "affiliation": ""}, + {"name": attended_with_affil.person.plain_name(), "affiliation": "Somewhere"}, + ] + ) diff --git a/ietf/meeting/urls.py b/ietf/meeting/urls.py index d7a623899e..a038e1cfe6 100644 --- a/ietf/meeting/urls.py +++ b/ietf/meeting/urls.py @@ -1,10 +1,10 @@ -# Copyright The IETF Trust 2007-2020, All Rights Reserved +# Copyright The IETF Trust 2007-2025, All Rights Reserved from django.conf import settings from django.urls import include from django.views.generic import RedirectView -from ietf.meeting import views, views_proceedings +from ietf.meeting import views, views_proceedings, views_session_request from ietf.utils.urls import url class AgendaRedirectView(RedirectView): @@ -15,12 +15,15 @@ def get_redirect_url(self, *args, **kwargs): safe_for_all_meeting_types = [ url(r'^session/(?P[-a-z0-9]+)/?$', views.session_details), + url(r'^session/(?P[-a-z0-9]+)/send_slide_notifications$', views.notify_meetecho_of_all_slides), url(r'^session/(?P\d+)/drafts$', views.add_session_drafts), + url(r'^session/(?P\d+)/recordings$', views.add_session_recordings), + url(r'^session/(?P\d+)/attendance$', views.session_attendance), url(r'^session/(?P\d+)/bluesheets$', views.upload_session_bluesheets), url(r'^session/(?P\d+)/minutes$', views.upload_session_minutes), + url(r'^session/(?P\d+)/narrativeminutes$', views.upload_session_narrativeminutes), url(r'^session/(?P\d+)/agenda$', views.upload_session_agenda), url(r'^session/(?P\d+)/import/minutes$', views.import_session_minutes), - url(r'^session/(?P\d+)/propose_slides$', views.propose_session_slides), url(r'^session/(?P\d+)/slides(?:/%(name)s)?$' % settings.URL_REGEXPS, views.upload_session_slides), url(r'^session/(?P\d+)/add_to_session$', views.ajax_add_slides_to_session), url(r'^session/(?P\d+)/remove_from_session$', views.ajax_remove_slides_from_session), @@ -28,7 +31,7 @@ def get_redirect_url(self, *args, **kwargs): url(r'^session/(?P\d+)/doc/%(name)s/remove$' % settings.URL_REGEXPS, views.remove_sessionpresentation), url(r'^session/(?P\d+)\.ics$', views.agenda_ical), url(r'^sessions/(?P[-a-z0-9]+)\.ics$', views.agenda_ical), - url(r'^slidesubmission/(?P\d+)$', views.approve_proposed_slides) + url(r'^slidesubmission/(?P\d+)$', views.approve_proposed_slides), ] @@ -62,7 +65,8 @@ def get_redirect_url(self, *args, **kwargs): type_interim_patterns = [ url(r'^agenda/(?P[A-Za-z0-9-]+)-drafts.pdf$', views.session_draft_pdf), url(r'^agenda/(?P[A-Za-z0-9-]+)-drafts.tgz$', views.session_draft_tarfile), - url(r'^materials/%(document)s((?P\.[a-z0-9]+)|/)?$' % settings.URL_REGEXPS, views.materials_document), + url(r'^materials/%(document)s(?P\.[A-Za-z0-9]+)$' % settings.URL_REGEXPS, views.materials_document), + url(r'^materials/%(document)s/?$' % settings.URL_REGEXPS, views.materials_document), url(r'^agenda.json$', views.agenda_json) ] @@ -79,11 +83,10 @@ def get_redirect_url(self, *args, **kwargs): url(r'^agenda.json$', views.agenda_json), url(r'^agenda/week-view(?:.html)?/?$', AgendaRedirectView.as_view(pattern_name='agenda', permanent=True)), url(r'^floor-plan/?$', views.agenda, name='floor-plan'), - url(r'^floor-plan/(?P[-a-z0-9_]+)/?$', RedirectView.as_view(pattern_name='floor-plan', permanent=True)), url(r'^week-view(?:.html)?/?$', AgendaRedirectView.as_view(pattern_name='agenda', permanent=True)), url(r'^materials(?:.html)?/?$', views.materials), url(r'^request_minutes/?$', views.request_minutes), - url(r'^materials/%(document)s((?P\.[a-z0-9]+)|/)?$' % settings.URL_REGEXPS, views.materials_document), + url(r'^materials/%(document)s(?P\.[A-Za-z0-9]+)?/?$' % settings.URL_REGEXPS, views.materials_document), url(r'^session/?$', views.materials_editable_groups), url(r'^proceedings(?:.html)?/?$', views.proceedings), url(r'^proceedings(?:.html)?/finalize/?$', views.finalize_proceedings), @@ -106,6 +109,8 @@ def get_redirect_url(self, *args, **kwargs): url(r'^important-dates.(?Pics)$', views.important_dates), url(r'^proceedings/meetinghosts/edit/', views_proceedings.edit_meetinghosts), url(r'^proceedings/meetinghosts/(?P\d+)/logo/$', views_proceedings.meetinghost_logo), + url(r'^session/request/%(acronym)s/edit/$' % settings.URL_REGEXPS, views_session_request.edit_request), + url(r'^session/request/%(acronym)s/view/$' % settings.URL_REGEXPS, views_session_request.view_request), ] urlpatterns = [ @@ -125,6 +130,13 @@ def get_redirect_url(self, *args, **kwargs): url(r'^upcoming/?$', views.upcoming), url(r'^upcoming\.ics/?$', views.upcoming_ical), url(r'^upcoming\.json/?$', views.upcoming_json), + url(r'^session/request/$', views_session_request.list_view), + url(r'^session/request/%(acronym)s/new/$' % settings.URL_REGEXPS, views_session_request.new_request), + url(r'^session/request/%(acronym)s/approve/$' % settings.URL_REGEXPS, views_session_request.approve_request), + url(r'^session/request/%(acronym)s/no_session/$' % settings.URL_REGEXPS, views_session_request.no_session), + url(r'^session/request/%(acronym)s/cancel/$' % settings.URL_REGEXPS, views_session_request.cancel_request), + url(r'^session/request/%(acronym)s/confirm/$' % settings.URL_REGEXPS, views_session_request.confirm), + url(r'^session/request/status/$', views_session_request.status), url(r'^session/(?P\d+)/agenda_materials$', views.session_materials), url(r'^session/(?P\d+)/cancel/?', views.cancel_session), url(r'^session/(?P\d+)/edit/?', views.edit_session), @@ -138,4 +150,3 @@ def get_redirect_url(self, *args, **kwargs): url(r'^(?P\d+)/', include(safe_for_all_meeting_types)), url(r'^(?Pinterim-[a-z0-9-]+)/', include(safe_for_all_meeting_types)), ] - diff --git a/ietf/meeting/utils.py b/ietf/meeting/utils.py index b8bb082479..10ae0d3667 100644 --- a/ietf/meeting/utils.py +++ b/ietf/meeting/utils.py @@ -1,8 +1,14 @@ -# Copyright The IETF Trust 2016-2020, All Rights Reserved +# Copyright The IETF Trust 2016-2024, All Rights Reserved # -*- coding: utf-8 -*- import datetime import itertools +from contextlib import suppress +from dataclasses import dataclass + +import jsonschema import os +import requests + import pytz import subprocess @@ -11,28 +17,55 @@ from django.conf import settings from django.contrib import messages -from django.db.models import Q +from django.core.cache import caches +from django.core.files.base import ContentFile +from django.db import IntegrityError +from django.db.models import OuterRef, Subquery, TextField, Q, Value, Max +from django.db.models.functions import Coalesce +from django.template.loader import render_to_string from django.utils import timezone from django.utils.encoding import smart_str import debug # pyflakes:ignore from ietf.dbtemplate.models import DBTemplate -from ietf.meeting.models import (Session, SchedulingEvent, TimeSlot, - Constraint, SchedTimeSessAssignment, SessionPresentation, Attended) -from ietf.doc.models import Document, DocAlias, State, NewRevisionDocEvent +from ietf.doc.storage_utils import store_bytes, store_str, AlreadyExistsError +from ietf.meeting.models import ( + Session, + SchedulingEvent, + TimeSlot, + Constraint, + SchedTimeSessAssignment, + SessionPresentation, + Attended, + Registration, + Meeting, + RegistrationTicket, +) +from ietf.blobdb.models import ResolvedMaterial +from ietf.doc.models import ( + Document, + State, + NewRevisionDocEvent, + StateDocEvent, + StoredObject, +) from ietf.doc.models import DocEvent from ietf.group.models import Group from ietf.group.utils import can_manage_materials from ietf.name.models import SessionStatusName, ConstraintName, DocTypeName from ietf.person.models import Person -from ietf.utils.html import sanitize_document +from ietf.utils import markdown +from ietf.utils.html import clean_html from ietf.utils.log import log from ietf.utils.timezone import date_today def session_time_for_sorting(session, use_meeting_date): - official_timeslot = TimeSlot.objects.filter(sessionassignments__session=session, sessionassignments__schedule__in=[session.meeting.schedule, session.meeting.schedule.base if session.meeting.schedule else None]).first() + if hasattr(session, "_otsa"): + official_timeslot=session._otsa.timeslot + else: + official_timeslot = TimeSlot.objects.filter(sessionassignments__session=session, sessionassignments__schedule__in=[session.meeting.schedule, session.meeting.schedule.base if session.meeting.schedule else None]).first() if official_timeslot: return official_timeslot.time elif use_meeting_date and session.meeting.date: @@ -75,13 +108,14 @@ def group_sessions(sessions): in_progress = [] recent = [] past = [] + for s in sessions: today = date_today(s.meeting.tz()) if s.meeting.date > today: future.append(s) elif s.meeting.end_date() >= today: in_progress.append(s) - elif not s.is_material_submission_cutoff(): + elif not getattr(s, "cached_is_cutoff", lambda: s.is_material_submission_cutoff): recent.append(s) else: past.append(s) @@ -91,6 +125,7 @@ def group_sessions(sessions): recent.reverse() past.reverse() + return future, in_progress, recent, past def get_upcoming_manageable_sessions(user): @@ -139,7 +174,88 @@ def create_proceedings_templates(meeting): meeting.overview = template meeting.save() -def finalize(meeting): + +def bluesheet_data(session): + attendance = ( + Attended.objects.filter(session=session) + .annotate( + affiliation=Coalesce( + Subquery( + Registration.objects.filter( + Q(meeting=session.meeting), + Q(person=OuterRef("person")) | Q(email=OuterRef("person__email")), + ).values("affiliation")[:1] + ), + Value(""), + output_field=TextField(), + ) + ).distinct() + .order_by("time") + ) + + return [ + { + "name": attended.person.plain_name(), + "affiliation": attended.affiliation, + } + for attended in attendance + ] + + +def save_bluesheet(request, session, file, encoding='utf-8'): + bluesheet_sp = session.presentations.filter(document__type='bluesheets').first() + _, ext = os.path.splitext(file.name) + + if bluesheet_sp: + doc = bluesheet_sp.document + doc.rev = '%02d' % (int(doc.rev)+1) + bluesheet_sp.rev = doc.rev + bluesheet_sp.save() + else: + ota = session.official_timeslotassignment() + sess_time = ota and ota.timeslot.time + + if session.meeting.type_id=='ietf': + name = 'bluesheets-%s-%s-%s' % (session.meeting.number, + session.group.acronym, + sess_time.strftime("%Y%m%d%H%M")) + title = 'Bluesheets IETF%s: %s : %s' % (session.meeting.number, + session.group.acronym, + sess_time.strftime("%a %H:%M")) + else: + name = 'bluesheets-%s-%s' % (session.meeting.number, sess_time.strftime("%Y%m%d%H%M")) + title = 'Bluesheets %s: %s' % (session.meeting.number, sess_time.strftime("%a %H:%M")) + doc = Document.objects.create( + name = name, + type_id = 'bluesheets', + title = title, + group = session.group, + rev = '00', + ) + doc.states.add(State.objects.get(type_id='bluesheets',slug='active')) + session.presentations.create(document=doc,rev='00') + filename = '%s-%s%s'% ( doc.name, doc.rev, ext) + doc.uploaded_filename = filename + e = NewRevisionDocEvent.objects.create(doc=doc, rev=doc.rev, by=request.user.person, type='new_revision', desc='New revision available: %s'%doc.rev) + save_error = handle_upload_file(file, filename, session.meeting, 'bluesheets', request=request, encoding=encoding) + if not save_error: + doc.save_with_history([e]) + resolve_uploaded_material(meeting=session.meeting, doc=doc) + return save_error + + +def generate_bluesheet(request, session): + data = bluesheet_data(session) + if not data: + return + text = render_to_string('meeting/bluesheet.txt', { + 'session': session, + 'data': data, + }) + return save_bluesheet(request, session, ContentFile(text.encode("utf-8"), name="unusednamepartsothereisanextension.txt")) + + +def finalize(request, meeting): end_date = meeting.end_date() end_time = meeting.tz().localize( datetime.datetime.combine( @@ -148,13 +264,19 @@ def finalize(meeting): ) ).astimezone(pytz.utc) + datetime.timedelta(days=1) for session in meeting.session_set.all(): - for sp in session.sessionpresentation_set.filter(document__type='draft',rev=None): + for sp in session.presentations.filter(document__type='draft',rev=None): rev_before_end = [e for e in sp.document.docevent_set.filter(newrevisiondocevent__isnull=False).order_by('-time') if e.time <= end_time ] if rev_before_end: sp.rev = rev_before_end[-1].newrevisiondocevent.rev else: sp.rev = '00' sp.save() + + # Don't try to generate a bluesheet if it's before we had Attended records. + if int(meeting.number) >= 108: + save_error = generate_bluesheet(request, session) + if save_error: + messages.error(request, save_error) create_proceedings_templates(meeting) meeting.proceedings_final = True @@ -180,7 +302,7 @@ def sort_accept_tuple(accept): return tup def condition_slide_order(session): - qs = session.sessionpresentation_set.filter(document__type_id='slides').order_by('order') + qs = session.presentations.filter(document__type_id='slides').order_by('order') order_list = qs.values_list('order',flat=True) if list(order_list) != list(range(1,qs.count()+1)): for num, sp in enumerate(qs, start=1): @@ -518,7 +640,8 @@ def bulk_create_timeslots(meeting, times, locations, other_props): def preprocess_meeting_important_dates(meetings): for m in meetings: - m.cached_updated = m.updated() + # cached_updated must be present, set it to 1970-01-01 if necessary + m.cached_updated = m.updated() or pytz.utc.localize(datetime.datetime(1970, 1, 1, 0, 0, 0)) m.important_dates = m.importantdate_set.prefetch_related("name") for d in m.important_dates: d.midnight_cutoff = "UTC 23:59" in d.name.name @@ -550,7 +673,7 @@ class SaveMaterialsError(Exception): pass -def save_session_minutes_revision(session, file, ext, request, encoding=None, apply_to_all=False): +def save_session_minutes_revision(session, file, ext, request, encoding=None, apply_to_all=False, narrative=False): """Creates or updates session minutes records This updates the database models to reflect a new version. It does not handle @@ -563,7 +686,8 @@ def save_session_minutes_revision(session, file, ext, request, encoding=None, ap Returns (Document, [DocEvents]), which should be passed to doc.save_with_history() if the file contents are stored successfully. """ - minutes_sp = session.sessionpresentation_set.filter(document__type='minutes').first() + document_type = DocTypeName.objects.get(slug= 'narrativeminutes' if narrative else 'minutes') + minutes_sp = session.presentations.filter(document__type=document_type).first() if minutes_sp: doc = minutes_sp.document doc.rev = '%02d' % (int(doc.rev)+1) @@ -575,40 +699,37 @@ def save_session_minutes_revision(session, file, ext, request, encoding=None, ap if not sess_time: raise SessionNotScheduledError if session.meeting.type_id=='ietf': - name = 'minutes-%s-%s' % (session.meeting.number, - session.group.acronym) - title = 'Minutes IETF%s: %s' % (session.meeting.number, - session.group.acronym) + name = f"{document_type.prefix}-{session.meeting.number}-{session.group.acronym}" + title = f"{document_type.name} IETF{session.meeting.number}: {session.group.acronym}" if not apply_to_all: name += '-%s' % (sess_time.strftime("%Y%m%d%H%M"),) title += ': %s' % (sess_time.strftime("%a %H:%M"),) else: - name = 'minutes-%s-%s' % (session.meeting.number, sess_time.strftime("%Y%m%d%H%M")) - title = 'Minutes %s: %s' % (session.meeting.number, sess_time.strftime("%a %H:%M")) + name =f"{document_type.prefix}-{session.meeting.number}-{sess_time.strftime('%Y%m%d%H%M')}" + title = f"{document_type.name} {session.meeting.number}: {sess_time.strftime('%a %H:%M')}" if Document.objects.filter(name=name).exists(): doc = Document.objects.get(name=name) doc.rev = '%02d' % (int(doc.rev)+1) else: doc = Document.objects.create( name = name, - type_id = 'minutes', + type = document_type, title = title, group = session.group, rev = '00', ) - DocAlias.objects.create(name=doc.name).docs.add(doc) - doc.states.add(State.objects.get(type_id='minutes',slug='active')) - if session.sessionpresentation_set.filter(document=doc).exists(): - sp = session.sessionpresentation_set.get(document=doc) + doc.states.add(State.objects.get(type_id=document_type.slug,slug='active')) + if session.presentations.filter(document=doc).exists(): + sp = session.presentations.get(document=doc) sp.rev = doc.rev sp.save() else: - session.sessionpresentation_set.create(document=doc,rev=doc.rev) + session.presentations.create(document=doc,rev=doc.rev) if apply_to_all: for other_session in get_meeting_sessions(session.meeting.number, session.group.acronym): if other_session != session: - other_session.sessionpresentation_set.filter(document__type='minutes').delete() - other_session.sessionpresentation_set.create(document=doc,rev=doc.rev) + other_session.presentations.filter(document__type=document_type).delete() + other_session.presentations.create(document=doc,rev=doc.rev) filename = f'{doc.name}-{doc.rev}{ext}' doc.uploaded_filename = filename e = NewRevisionDocEvent.objects.create( @@ -624,7 +745,7 @@ def save_session_minutes_revision(session, file, ext, request, encoding=None, ap file=file, filename=doc.uploaded_filename, meeting=session.meeting, - subdir='minutes', + subdir=document_type.slug, request=request, encoding=encoding, ) @@ -637,31 +758,27 @@ def save_session_minutes_revision(session, file, ext, request, encoding=None, ap def handle_upload_file(file, filename, meeting, subdir, request=None, encoding=None): """Accept an uploaded materials file - This function takes a file object, a filename and a meeting object and subdir as string. + This function takes a _binary mode_ file object, a filename and a meeting object and subdir as string. It saves the file to the appropriate directory, get_materials_path() + subdir. - If the file is a zip file, it creates a new directory in 'slides', which is the basename of the - zip file and unzips the file in the new directory. """ filename = Path(filename) - is_zipfile = filename.suffix == '.zip' path = Path(meeting.get_materials_path()) / subdir - if is_zipfile: - path = path / filename.stem path.mkdir(parents=True, exist_ok=True) - # agendas and minutes can only have one file instance so delete file if it already exists - if subdir in ('agenda', 'minutes'): - for f in path.glob(f'{filename.stem}.*'): + with (path / filename).open('wb+') as destination: + # prep file for reading + if hasattr(file, "chunks"): + chunks = file.chunks() + else: try: - f.unlink() - except FileNotFoundError: - pass # if the file is already gone, so be it + file.seek(0) + except AttributeError: + pass + chunks = [file.read()] # pretend we have chunks - with (path / filename).open('wb+') as destination: if filename.suffix in settings.MEETING_VALID_MIME_TYPE_EXTENSIONS['text/html']: - file.open() - text = file.read() + text = b"".join(chunks) if encoding: try: text = text.decode(encoding) @@ -678,8 +795,13 @@ def handle_upload_file(file, filename, meeting, subdir, request=None, encoding=N return "Failure trying to save '%s'. Hint: Try to upload as UTF-8: %s..." % (filename, str(e)[:120]) # Whole file sanitization; add back what's missing from a complete # document (sanitize will remove these). - clean = sanitize_document(text) - destination.write(clean.encode('utf8')) + clean = clean_html(text) + clean_bytes = clean.encode('utf8') + destination.write(clean_bytes) + # Assumes contents of subdir are always document type ids + # TODO-BLOBSTORE: see if we can refactor this so that the connection to the document isn't lost + # In the meantime, consider faking it by parsing filename (shudder). + store_bytes(subdir, filename.name, clean_bytes) if request and clean != text: messages.warning(request, ( @@ -688,15 +810,13 @@ def handle_upload_file(file, filename, meeting, subdir, request=None, encoding=N f"please check the resulting content. " )) else: - if hasattr(file, 'chunks'): - for chunk in file.chunks(): - destination.write(chunk) - else: - destination.write(file.read()) - - # unzip zipfile - if is_zipfile: - subprocess.call(['unzip', filename], cwd=path) + for chunk in chunks: + destination.write(chunk) + file.seek(0) + if hasattr(file, "chunks"): + chunks = file.chunks() + # TODO-BLOBSTORE: See above question about refactoring + store_bytes(subdir, filename.name, b"".join(chunks)) return None @@ -720,17 +840,354 @@ def new_doc_for_session(type_id, session): rev = '00', ) doc.states.add(State.objects.get(type_id=type_id, slug='active')) - DocAlias.objects.create(name=doc.name).docs.add(doc) - session.sessionpresentation_set.create(document=doc,rev='00') + session.presentations.create(document=doc,rev='00') return doc +# TODO-BLOBSTORE - consider adding doc to this signature and factoring away type_id def write_doc_for_session(session, type_id, filename, contents): filename = Path(filename) path = Path(session.meeting.get_materials_path()) / type_id path.mkdir(parents=True, exist_ok=True) with open(path / filename, "wb") as file: file.write(contents.encode('utf-8')) - return + store_str(type_id, filename.name, contents) + return None + + +@dataclass +class BlobSpec: + bucket: str + name: str + + +def resolve_one_material( + doc: Document, rev: str | None, ext: str | None +) -> BlobSpec | None: + if doc.type_id is None: + log(f"Cannot resolve a doc with no type: {doc.name}") + return None + + # Get the Document's base name. It may or may not have an extension. + if rev is None: + basename = Path(doc.get_base_name()) + else: + basename = Path(f"{doc.name}-{int(rev):02d}") + + # If the document's file exists, the blob is _always_ named with this stem, + # even if it's different from the original. + blob_stem = Path(f"{doc.name}-{rev or doc.rev}") + + # If we have an extension, either from the URL or the Document's base name, look up + # the blob or file or return 404. N.b. the suffix check needs adjustment to handle + # a bare "." extension when we reach py3.14. + if ext or basename.suffix != "": + if ext: + blob_name = str(blob_stem.with_suffix(ext)) + else: + blob_name = str(blob_stem.with_suffix(basename.suffix)) + + # See if we have a stored object under that name + preferred_blob = ( + StoredObject.objects.exclude_deleted() + .filter(store=doc.type_id, name=blob_name) + .first() + ) + if preferred_blob is not None: + return BlobSpec( + bucket=preferred_blob.store, + name=preferred_blob.name, + ) + # No stored object, fall back to the file system. + filename = Path(doc.get_file_path()) / basename # use basename for file + if filename.is_file(): + return BlobSpec( + bucket=doc.type_id, + name=str(blob_stem.with_suffix(filename.suffix)), + ) + else: + return None + + # No extension has been specified so far, so look one up. + matching_stored_objects = ( + StoredObject.objects.exclude_deleted() + .filter( + store=doc.type_id, + name__startswith=f"{blob_stem}.", # anchor to end with trailing "." + ) + .order_by("name") + ) # orders by suffix + blob_ext_choices = { + Path(stored_obj.name).suffix: stored_obj + for stored_obj in matching_stored_objects + } + + # Short-circuit to return pdf if present + if ".pdf" in blob_ext_choices: + pdf_blob = blob_ext_choices[".pdf"] + return BlobSpec( + bucket=pdf_blob.store, + name=str(blob_stem.with_suffix(".pdf")), + ) + + # Now look for files + filename = Path(doc.get_file_path()) / basename + file_ext_choices = { + # Construct a map from suffix to full filename + fn.suffix: fn.name + for fn in sorted(filename.parent.glob(filename.stem + ".*")) + } + + # Short-circuit to return pdf if we have the file + if ".pdf" in file_ext_choices: + return BlobSpec( + bucket=doc.type_id, + name=str(blob_stem.with_suffix(".pdf")), + ) + + all_exts = set(blob_ext_choices.keys()).union(file_ext_choices.keys()) + if len(all_exts) > 0: + preferred_ext = sorted(all_exts)[0] + if preferred_ext in blob_ext_choices: + preferred_blob = blob_ext_choices[preferred_ext] + return BlobSpec( + bucket=preferred_blob.store, + name=preferred_blob.name, + ) + else: + return BlobSpec( + bucket=doc.type_id, + name=str(blob_stem.with_suffix(preferred_ext)), + ) + + return None + + +def resolve_materials_for_one_meeting(meeting: Meeting): + start_time = timezone.now() + meeting_documents = ( + Document.objects.filter( + type_id__in=settings.MATERIALS_TYPES_SERVED_BY_WORKER + ).filter( + Q(session__meeting=meeting) | Q(proceedingsmaterial__meeting=meeting) + ) + ).distinct() + + resolved = [] + for doc in meeting_documents: + # request by doc name with no rev + blob = resolve_one_material(doc, rev=None, ext=None) + if blob is not None: + resolved.append( + ResolvedMaterial( + name=doc.name, + meeting_number=meeting.number, + bucket=blob.bucket, + blob=blob.name, + ) + ) + # request by doc name + rev + blob = resolve_one_material(doc, rev=doc.rev, ext=None) + if blob is not None: + resolved.append( + ResolvedMaterial( + name=f"{doc.name}-{doc.rev:02}", + meeting_number=meeting.number, + bucket=blob.bucket, + blob=blob.name, + ) + ) + # for other revisions, only need request by doc name + rev + other_revisions = doc.revisions_by_newrevisionevent() + other_revisions.remove(doc.rev) + for rev in other_revisions: + blob = resolve_one_material(doc, rev=rev, ext=None) + if blob is not None: + resolved.append( + ResolvedMaterial( + name=f"{doc.name}-{rev:02}", + meeting_number=meeting.number, + bucket=blob.bucket, + blob=blob.name, + ) + ) + ResolvedMaterial.objects.bulk_create( + resolved, + update_conflicts=True, + unique_fields=["name", "meeting_number"], + update_fields=["bucket", "blob"], + ) + # Warn if any files were updated during the above process + last_update = meeting_documents.aggregate(Max("time"))["time__max"] + if last_update and last_update > start_time: + log( + f"Warning: materials for meeting {meeting.number} " + "changed during ResolvedMaterial update" + ) + +def resolve_uploaded_material(meeting: Meeting, doc: Document): + resolved: list[ResolvedMaterial] = [] + remove = ResolvedMaterial.objects.none() + blob = resolve_one_material(doc, rev=None, ext=None) + if blob is None: + # Versionless file does not exist. Remove the versionless ResolvedMaterial + # if it existed. This is to avoid leaving behind a stale link to a replaced + # version. This comes up e.g. if a ProceedingsMaterial is changed from having + # an uploaded file to being an external URL. + remove = ResolvedMaterial.objects.filter( + name=doc.name, meeting_number=meeting.number + ) + else: + resolved.append( + ResolvedMaterial( + name=doc.name, + meeting_number=meeting.number, + bucket=blob.bucket, + blob=blob.name, + ) + ) + # request by doc name + rev + blob = resolve_one_material(doc, rev=doc.rev, ext=None) + if blob is not None: + resolved.append( + ResolvedMaterial( + name=f"{doc.name}-{doc.rev:02}", + meeting_number=meeting.number, + bucket=blob.bucket, + blob=blob.name, + ) + ) + # Create the new record(s) + ResolvedMaterial.objects.bulk_create( + resolved, + update_conflicts=True, + unique_fields=["name", "meeting_number"], + update_fields=["bucket", "blob"], + ) + # and remove one if necessary (will be a none() queryset if not) + remove.delete() + + +def store_blob_for_one_material_file(doc: Document, rev: str, filepath: Path): + if not settings.ENABLE_BLOBSTORAGE: + raise RuntimeError("Cannot store blobs: ENABLE_BLOBSTORAGE is False") + + bucket = doc.type_id + if bucket not in settings.MATERIALS_TYPES_SERVED_BY_WORKER: + raise ValueError(f"Bucket {bucket} not found for doc {doc.name}.") + blob_stem = f"{doc.name}-{rev}" + suffix = filepath.suffix # includes leading "." + + # Store the file + try: + file_bytes = filepath.read_bytes() + except Exception as err: + log(f"Failed to read {filepath}: {err}") + raise + with suppress(AlreadyExistsError): + store_bytes( + kind=bucket, + name= blob_stem + suffix, + content=file_bytes, + mtime=datetime.datetime.fromtimestamp( + filepath.stat().st_mtime, + tz=datetime.UTC, + ), + allow_overwrite=False, + doc_name=doc.name, + doc_rev=rev, + ) + + # Special case: pre-render markdown into HTML as .md.html + if suffix == ".md": + try: + markdown_source = file_bytes.decode("utf-8") + except UnicodeDecodeError as err: + log(f"Unable to decode {filepath} as UTF-8, treating as latin-1: {err}") + markdown_source = file_bytes.decode("latin-1") + # render the markdown + try: + html = render_to_string( + "minimal.html", + { + "content": markdown.markdown(markdown_source), + "title": blob_stem, + "static_ietf_org": settings.STATIC_IETF_ORG, + }, + ) + except Exception as err: + log(f"Failed to render markdown for {filepath}: {err}") + else: + # Don't overwrite, but don't fail if the blob exists + with suppress(AlreadyExistsError): + store_str( + kind=bucket, + name=blob_stem + ".md.html", + content=html, + allow_overwrite=False, + doc_name=doc.name, + doc_rev=rev, + content_type="text/html;charset=utf-8", + ) + + +def store_blobs_for_one_material_doc(doc: Document): + """Ensure that all files related to a materials Document are in the blob store""" + if doc.type_id not in settings.MATERIALS_TYPES_SERVED_BY_WORKER: + log(f"This method does not handle docs of type {doc.name}") + return + + # Store files for current Document / rev + file_path = Path(doc.get_file_path()) + base_name = Path(doc.get_base_name()) + # .stem would remove directories, so use .with_suffix("") + base_name_stem = str(base_name.with_suffix("")) + if base_name_stem.endswith(".") and base_name.suffix == "": + # In Python 3.14, a trailing "." is a valid suffix, but in prior versions + # it is left as part of the stem. The suffix check ensures that either way, + # only a single "." will be removed. + base_name_stem = base_name_stem[:-1] + # Add any we find without the rev + for file_to_store in file_path.glob(base_name_stem + ".*"): + if not (file_to_store.is_file()): + continue + try: + store_blob_for_one_material_file(doc, doc.rev, file_to_store) + except Exception as err: + log( + f"Failed to store blob for {doc} rev {doc.rev} " + f"from {file_to_store}: {err}" + ) + + # Get other revisions + for rev in doc.revisions_by_newrevisionevent(): + if rev == doc.rev: + continue # already handled this + + # Add some that have the rev + for file_to_store in file_path.glob(doc.name + f"-{rev}.*"): + if not file_to_store.is_file(): + continue + try: + store_blob_for_one_material_file(doc, rev, file_to_store) + except Exception as err: + log( + f"Failed to store blob for {doc} rev {rev} " + f"from {file_to_store}: {err}" + ) + + +def store_blobs_for_one_meeting(meeting: Meeting): + meeting_documents = ( + Document.objects.filter( + type_id__in=settings.MATERIALS_TYPES_SERVED_BY_WORKER + ).filter( + Q(session__meeting=meeting) | Q(proceedingsmaterial__meeting=meeting) + ) + ).distinct() + + for doc in meeting_documents: + store_blobs_for_one_material_doc(doc) + def create_recording(session, url, title=None, user=None): ''' @@ -753,8 +1210,6 @@ def create_recording(session, url, title=None, user=None): rev='00', type_id='recording') doc.set_state(State.objects.get(type='recording', slug='active')) - - DocAlias.objects.create(name=doc.name).docs.add(doc) # create DocEvent NewRevisionDocEvent.objects.create(type='new_revision', @@ -764,20 +1219,40 @@ def create_recording(session, url, title=None, user=None): desc='New revision available', time=doc.time) pres = SessionPresentation.objects.create(session=session,document=doc,rev=doc.rev) - session.sessionpresentation_set.add(pres) + session.presentations.add(pres) return doc +def delete_recording(session_presentation, user=None): + """Delete a session recording""" + document = session_presentation.document + if document.type_id != "recording": + raise ValueError(f"Document {document.pk} is not a recording (type_id={document.type_id})") + recording_state = document.get_state("recording") + deleted_state = State.objects.get(type_id="recording", slug="deleted") + if recording_state != deleted_state: + # Update the recording state and create a history event + document.set_state(deleted_state) + StateDocEvent.objects.create( + type="changed_state", + by=user or Person.objects.get(name="(System)"), + doc=document, + rev=document.rev, + state_type=deleted_state.type, + state=deleted_state, + ) + session_presentation.delete() + def get_next_sequence(group, meeting, type): ''' Returns the next sequence number to use for a document of type = type. Takes a group=Group object, meeting=Meeting object, type = string ''' - aliases = DocAlias.objects.filter(name__startswith='{}-{}-{}-'.format(type, meeting.number, group.acronym)) - if not aliases: + docs = Document.objects.filter(name__startswith='{}-{}-{}-'.format(type, meeting.number, group.acronym)) + if not docs: return 1 - aliases = aliases.order_by('name') - sequence = int(aliases.last().name.split('-')[-1]) + 1 + docs = docs.order_by('name') + sequence = int(docs.last().name.split('-')[-1]) + 1 return sequence def get_activity_stats(sdate, edate): @@ -830,13 +1305,14 @@ def get_activity_stats(sdate, edate): data['ffw_update_count'] = ffw_update_count data['ffw_update_percent'] = ffw_update_percent - rfcs = events.filter(type='published_rfc') - data['rfcs'] = rfcs.select_related('doc').select_related('doc__group').select_related('doc__intended_std_level') + rfcs_events = DocEvent.objects.filter(doc__type='rfc', time__gte=sdatetime, time__lt=edatetime) + rfcs = rfcs_events.filter(type='published_rfc') + data['rfcs'] = rfcs.select_related('doc').select_related('doc__group').select_related('doc__std_level') - data['counts'] = {'std': rfcs.filter(doc__intended_std_level__in=('ps', 'ds', 'std')).count(), - 'bcp': rfcs.filter(doc__intended_std_level='bcp').count(), - 'exp': rfcs.filter(doc__intended_std_level='exp').count(), - 'inf': rfcs.filter(doc__intended_std_level='inf').count()} + data['counts'] = {'std': rfcs.filter(doc__std_level__in=('ps', 'ds', 'std')).count(), + 'bcp': rfcs.filter(doc__std_level='bcp').count(), + 'exp': rfcs.filter(doc__std_level='exp').count(), + 'inf': rfcs.filter(doc__std_level='inf').count()} data['new_groups'] = Group.objects.filter( type='wg', @@ -863,9 +1339,9 @@ def post_process(doc): Does post processing on uploaded file. - Convert PPT to PDF ''' - if is_powerpoint(doc) and hasattr(settings, 'SECR_PPT2PDF_COMMAND'): + if is_powerpoint(doc) and hasattr(settings, 'PPT2PDF_COMMAND'): try: - cmd = list(settings.SECR_PPT2PDF_COMMAND) # Don't operate on the list actually in settings + cmd = list(settings.PPT2PDF_COMMAND) # Don't operate on the list actually in settings cmd.append(doc.get_file_path()) # outdir cmd.append(os.path.join(doc.get_file_path(), doc.uploaded_filename)) # filename subprocess.check_call(cmd) @@ -891,7 +1367,501 @@ def participants_for_meeting(meeting): checked_in = queryset of onsite, checkedin participants values_list('person') attended = queryset of remote participants who attended a session values_list('person') """ - checked_in = meeting.meetingregistration_set.filter(reg_type='onsite', checkedin=True).values_list('person', flat=True).distinct() + checked_in = meeting.registration_set.onsite().filter(checkedin=True).values_list('person', flat=True).distinct() sessions = meeting.session_set.filter(Q(type='plenary') | Q(group__type__in=['wg', 'rg'])) attended = Attended.objects.filter(session__in=sessions).values_list('person', flat=True).distinct() return (checked_in, attended) + + +def generate_proceedings_content(meeting, force_refresh=False): + """Render proceedings content for a meeting and update cache + + Caches its value for 25 hours to ensure that the cache never expires if + we recompute the value daily. + + :meeting: meeting whose proceedings should be rendered + :force_refresh: true to force regeneration and cache refresh + """ + cache = caches["proceedings"] + key_components = [ + "proceedings", + str(meeting.number), + ] + if meeting.proceedings_final: + # Freeze the cache key once proceedings are finalized. Further changes will + # not be picked up until the cache expires or is refreshed by the + # proceedings_content_refresh_task() + key_components.append("final") + else: + # Build a cache key that changes when materials are modified. For all but drafts, + # use the last modification time of the document. Exclude drafts from this because + # revisions long after the meeting ends will otherwise show up as changes and + # incorrectly invalidate the cache. Instead, include an ordered list of the + # drafts linked to the meeting so adding or removing drafts will trigger a + # recalculation. The list is long but that doesn't matter because we hash it into + # a fixed-length key. + meeting_docs = Document.objects.filter(session__meeting__number=meeting.number) + last_materials_update = ( + meeting_docs.exclude(type_id="draft") + .filter(session__meeting__number=meeting.number) + .aggregate(Max("time"))["time__max"] + ) + draft_names = ( + meeting_docs + .filter(type_id="draft") + .order_by("name") + .values_list("name", flat=True) + ) + key_components += [ + last_materials_update.isoformat() if last_materials_update else "-", + ",".join(draft_names), + ] + + # Key is potentially long, but the "proceedings" cache hashes it to a fixed + # length. If that changes, hash it separately here first. + cache_key = ".".join(key_components) + if not force_refresh: + cached_content = cache.get(cache_key, None) + if cached_content is not None: + return cached_content + + def area_and_group_acronyms_from_session(s): + area = s.group_parent_at_the_time() + if area == None: + area = s.group.parent + group = s.group_at_the_time() + return (area.acronym, group.acronym) + + schedule = meeting.schedule + sessions = ( + meeting.session_set.with_current_status() + .filter(Q(timeslotassignments__schedule__in=[schedule, schedule.base if schedule else None]) + | Q(current_status='notmeet')) + .select_related() + .order_by('-current_status') + ) + + plenaries, _ = organize_proceedings_sessions( + sessions.filter(name__icontains='plenary') + .exclude(current_status='notmeet') + ) + irtf_meeting, irtf_not_meeting = organize_proceedings_sessions( + sessions.filter(group__parent__acronym = 'irtf').order_by('group__acronym') + ) + # per Colin (datatracker #5010) - don't report not meeting rags + irtf_not_meeting = [item for item in irtf_not_meeting if item["group"].type_id != "rag"] + irtf = {"meeting_groups":irtf_meeting, "not_meeting_groups":irtf_not_meeting} + + training, _ = organize_proceedings_sessions( + sessions.filter(group__acronym__in=['edu','iaoc'], type_id__in=['regular', 'other',]) + .exclude(current_status='notmeet') + ) + iab, _ = organize_proceedings_sessions( + sessions.filter(group__parent__acronym = 'iab') + .exclude(current_status='notmeet') + ) + editorial, _ = organize_proceedings_sessions( + sessions.filter(group__acronym__in=['rsab','rswg']) + .exclude(current_status='notmeet') + ) + + ietf = sessions.filter(group__parent__type__slug = 'area').exclude(group__acronym__in=['edu','iepg','tools']) + ietf = list(ietf) + ietf.sort(key=lambda s: area_and_group_acronyms_from_session(s)) + ietf_areas = [] + for area, area_sessions in itertools.groupby(ietf, key=lambda s: s.group_parent_at_the_time()): + meeting_groups, not_meeting_groups = organize_proceedings_sessions(area_sessions) + ietf_areas.append((area, meeting_groups, not_meeting_groups)) + + with timezone.override(meeting.tz()): + rendered_content = render_to_string( + "meeting/proceedings.html", + { + 'meeting': meeting, + 'plenaries': plenaries, + 'training': training, + 'irtf': irtf, + 'iab': iab, + 'editorial': editorial, + 'ietf_areas': ietf_areas, + 'meetinghost_logo': { + 'max_height': settings.MEETINGHOST_LOGO_MAX_DISPLAY_HEIGHT, + 'max_width': settings.MEETINGHOST_LOGO_MAX_DISPLAY_WIDTH, + } + }, + ) + cache.set( + cache_key, + rendered_content, + timeout=3600 + 86400, # one day + one hour, in seconds + ) + return rendered_content + + +def organize_proceedings_sessions(sessions): + # Collect sessions by Group, then bin by session name (including sessions with blank names). + # If all of a group's sessions are 'notmeet', the processed data goes in not_meeting_sessions. + # Otherwise, the data goes in meeting_sessions. + meeting_groups = [] + not_meeting_groups = [] + for group_acronym, group_sessions in itertools.groupby(sessions, key=lambda s: s.group.acronym): + by_name = {} + is_meeting = False + all_canceled = True + group = None + for s in sorted( + group_sessions, + key=lambda gs: ( + gs.official_timeslotassignment().timeslot.time + if gs.official_timeslotassignment() else datetime.datetime(datetime.MAXYEAR, 1, 1) + ), + ): + group = s.group + if s.current_status != 'notmeet': + is_meeting = True + if s.current_status != 'canceled': + all_canceled = False + by_name.setdefault(s.name, []) + if s.current_status != 'notmeet' or s.presentations.exists(): + by_name[s.name].append(s) # for notmeet, only include sessions with materials + for sess_name, ss in by_name.items(): + session = ss[0] if ss else None + def _format_materials(items): + """Format session/material for template + + Input is a list of (session, materials) pairs. The materials value can be a single value or a list. + """ + material_times = {} # key is material, value is first timestamp it appeared + for s, mats in items: + tsa = s.official_timeslotassignment() + timestamp = tsa.timeslot.time if tsa else None + if not isinstance(mats, list): + mats = [mats] + for mat in mats: + if mat and mat not in material_times: + material_times[mat] = timestamp + n_mats = len(material_times) + result = [] + if n_mats == 1: + result.append({'material': list(material_times)[0]}) # no 'time' when only a single material + elif n_mats > 1: + for mat, timestamp in material_times.items(): + result.append({'material': mat, 'time': timestamp}) + return result + + entry = { + 'group': group, + 'name': sess_name, + 'session': session, + 'canceled': all_canceled, + 'has_materials': s.presentations.exists(), + 'agendas': _format_materials((s, s.agenda()) for s in ss), + 'minutes': _format_materials((s, s.minutes()) for s in ss), + 'bluesheets': _format_materials((s, s.bluesheets()) for s in ss), + 'recordings': _format_materials((s, s.recordings()) for s in ss), + 'meetecho_recordings': _format_materials((s, [s.session_recording_url()]) for s in ss), + 'chatlogs': _format_materials((s, s.chatlogs()) for s in ss), + 'slides': _format_materials((s, s.slides()) for s in ss), + 'drafts': _format_materials((s, s.drafts()) for s in ss), + 'last_update': session.last_update if hasattr(session, 'last_update') else None + } + if session and session.meeting.type_id == 'ietf' and not session.meeting.proceedings_final: + entry['attendances'] = _format_materials((s, s) for s in ss if Attended.objects.filter(session=s).exists()) + if is_meeting: + meeting_groups.append(entry) + else: + not_meeting_groups.append(entry) + return meeting_groups, not_meeting_groups + + +import_registration_json_validator = jsonschema.Draft202012Validator( + schema={ + "type": "object", + "properties": { + "objects": { + "type": "object", + "patternProperties": { + # Email address as key (simplified pattern or just allow any key) + ".*": { + "type": "object", + "properties": { + "first_name": {"type": "string"}, + "last_name": {"type": "string"}, + "email": {"type": "string", "format": "email"}, + "affiliation": {"type": "string"}, + "country_code": {"type": "string", "minLength": 2, "maxLength": 2}, + "meeting": {"type": "string"}, + "checkedin": {"type": "boolean"}, + "cancelled": {"type": "boolean"}, + "is_nomcom_volunteer": {"type": "boolean"}, + "tickets": { + "type": "array", + "items": { + "type": "object", + "properties": { + "attendance_type": {"type": "string"}, + "ticket_type": {"type": "string"} + }, + "required": ["attendance_type", "ticket_type"] + } + } + }, + "required": [ + "first_name", "last_name", "email", + "country_code", "meeting", 'affiliation', + "checkedin", "is_nomcom_volunteer", "tickets", + "cancelled", + ] + } + }, + "additionalProperties": False + } + }, + "required": ["objects"] + } +) + + +def get_registration_data(meeting): + '''Retrieve data from registation system for meeting''' + url = settings.REGISTRATION_PARTICIPANTS_API_URL + key = settings.REGISTRATION_PARTICIPANTS_API_KEY + params = {'meeting': meeting.number, 'apikey': key} + try: + response = requests.get(url, params=params, timeout=settings.DEFAULT_REQUESTS_TIMEOUT) + except requests.Timeout as e: + log(f'GET request timed out for [{url}]: {e}') + raise Exception("Timeout retrieving data from registration API") from e + if response.status_code == 200: + try: + decoded = response.json() + except ValueError as e: + raise ValueError(f'Could not decode response from registration API: {e}') + else: + raise Exception(f'Bad response from registration API: {response.status_code}, {response.content[:64]}') + + # validate registration data + import_registration_json_validator.validate(decoded) + return decoded + + +def sync_registration_data(meeting): + """"Sync meeting.Registration with registration system. + + Registration records are created in realtime as people register for a + meeting. This function serves as an audit / reconciliation. Most records are + expected to already exist. The function has been optimized with this in mind. + + - Creates new registrations if they don't exist + - Updates existing registrations if fields differ + - Updates tickets as needed + - Deletes registrations that exist in the database but not in the JSON data + + Returns: + dict: Summary of changes made (created, updated, deleted counts) + """ + reg_data = get_registration_data(meeting) + + # Get the meeting ID from the first registration, the API only deals with one meeting at a time + first_email = next(iter(reg_data['objects'])) + meeting_number = reg_data['objects'][first_email]['meeting'] + try: + Meeting.objects.get(number=meeting_number) + except Meeting.DoesNotExist: + raise Exception(f'meeting does not exist {meeting_number}') + + # Get all existing registrations for this meeting + existing_registrations = meeting.registration_set.all() + existing_emails = set(reg.email for reg in existing_registrations if reg.email) + + # Track changes for reporting + stats = { + 'created': 0, + 'updated': 0, + 'deleted': 0, + 'processed': 0, + } + + # Process registrations from reg_data + reg_emails = set() + for email, data in reg_data['objects'].items(): + stats['processed'] += 1 + reg_emails.add(email) + + # Process this registration + _, action_taken = process_single_registration(data, meeting) + + # Update stats + if action_taken == 'created': + stats['created'] += 1 + elif action_taken == 'updated': + stats['updated'] += 1 + + # Delete registrations that exist in the DB but not in registration data, they've been cancelled + emails_to_delete = existing_emails - reg_emails + if emails_to_delete: + log(f"sync_reg: emails marked for deletion: {emails_to_delete}") + result = Registration.objects.filter( + email__in=emails_to_delete, + meeting=meeting + ).delete() + if 'meeting.Registration' in result[1]: + deleted_count = result[1]['meeting.Registration'] + else: + deleted_count = 0 + stats['deleted'] = deleted_count + # set meeting.attendees + count = Registration.objects.onsite().filter(meeting=meeting, checkedin=True).count() + if meeting.attendees != count: + meeting.attendees = count + meeting.save() + + return stats + + +def process_single_registration(reg_data, meeting): + """ + Process a single registration record - create, update, or leave unchanged as needed. + + Args: + reg_data (dict): Registration data + meeting (obj): The IETF meeting + + Returns: + tuple: (registration, action_taken) + - registration: Registration object + - action_taken: String indicating 'created', 'updated', or None + """ + # import here to avoid circular imports + from ietf.nomcom.models import Volunteer, NomCom + + action_taken = None + fields_updated = False + tickets_modified = False + + # handle deleted + # should not see cancelled records during nightly sync but can see + # them from realtime notifications + if reg_data['cancelled']: + try: + registration = Registration.objects.get(meeting=meeting, email=reg_data['email']) + except Registration.DoesNotExist: + return (None, None) + for ticket in reg_data['tickets']: + target = registration.tickets.filter( + attendance_type__slug=ticket['attendance_type'], + ticket_type__slug=ticket['ticket_type']).first() + if target: + target.delete() + if registration.tickets.count() == 0: + registration.delete() + log(f"sync_reg: cancelled registration {reg_data['email']}") + return (None, 'deleted') + + person = Person.objects.filter(email__address=reg_data['email']).first() + if not person: + log(f"ERROR: meeting registration email unknown {reg_data['email']}") + + registration, created = Registration.objects.get_or_create( + email=reg_data['email'], + meeting=meeting, + defaults={ + 'first_name': reg_data['first_name'], + 'last_name': reg_data['last_name'], + 'person': person, + 'affiliation': reg_data['affiliation'], + 'country_code': reg_data['country_code'], + 'checkedin': reg_data['checkedin'], + } + ) + + # If not created, check if we need to update + if not created: + for field in ['first_name', 'last_name', 'affiliation', 'country_code', 'checkedin']: + if getattr(registration, field) != reg_data[field]: + log(f"sync_reg: found update {reg_data['email']}, {field} different, data from reg: {reg_data}") + setattr(registration, field, reg_data[field]) + fields_updated = True + + if fields_updated: + registration.save() + + # Process tickets - handle counting properly for multiple same-type tickets + # Build count dictionaries for existing and new tickets + existing_ticket_counts = {} + for ticket in registration.tickets.all(): + key = (ticket.attendance_type.slug, ticket.ticket_type.slug) + existing_ticket_counts[key] = existing_ticket_counts.get(key, 0) + 1 + + # Get new tickets from reg_data and count them + reg_data_ticket_counts = {} + for ticket_data in reg_data.get('tickets', []): + key = (ticket_data['attendance_type'], ticket_data['ticket_type']) + reg_data_ticket_counts[key] = reg_data_ticket_counts.get(key, 0) + 1 + + # Calculate tickets to add and remove + all_ticket_types = set(existing_ticket_counts.keys()) | set(reg_data_ticket_counts.keys()) + + for ticket_type in all_ticket_types: + existing_count = existing_ticket_counts.get(ticket_type, 0) + new_count = reg_data_ticket_counts.get(ticket_type, 0) + + # Delete excess tickets + if existing_count > new_count: + tickets_to_delete = existing_count - new_count + # Get all tickets of this type + matching_tickets = registration.tickets.filter( + attendance_type__slug=ticket_type[0], + ticket_type__slug=ticket_type[1] + ).order_by('id') # Use a consistent order for deterministic deletion + + # Delete the required number + log(f"sync_reg: deleting {tickets_to_delete} of {ticket_type[0]}:{ticket_type[1]} of {reg_data['email']}") + for ticket in matching_tickets[:tickets_to_delete]: + ticket.delete() + tickets_modified = True + + # Add missing tickets + elif new_count > existing_count: + tickets_to_add = new_count - existing_count + + # Create the new tickets + log(f"sync_reg: adding {tickets_to_add} of {ticket_type[0]}:{ticket_type[1]} of {reg_data['email']}") + for _ in range(tickets_to_add): + try: + RegistrationTicket.objects.create( + registration=registration, + attendance_type_id=ticket_type[0], + ticket_type_id=ticket_type[1], + ) + tickets_modified = True + except IntegrityError as e: + log(f"Error adding RegistrationTicket {e}") + # handle nomcom volunteer + if reg_data['is_nomcom_volunteer'] and person: + try: + nomcom = NomCom.objects.get(is_accepting_volunteers=True) + except (NomCom.DoesNotExist, NomCom.MultipleObjectsReturned): + nomcom = None + if nomcom: + Volunteer.objects.get_or_create( + nomcom=nomcom, + person=person, + defaults={ + "affiliation": reg_data["affiliation"], + "origin": "registration" + } + ) + + # set action_taken + if created: + log(f"sync_reg: created record. {reg_data['email']}") + action_taken = 'created' + elif fields_updated or tickets_modified: + action_taken = 'updated' + + return registration, action_taken + + +def fetch_attendance_from_meetings(meetings): + return [sync_registration_data(meeting) for meeting in meetings] diff --git a/ietf/meeting/views.py b/ietf/meeting/views.py index 75444c5700..67a81305b4 100644 --- a/ietf/meeting/views.py +++ b/ietf/meeting/views.py @@ -1,15 +1,15 @@ -# Copyright The IETF Trust 2007-2022, All Rights Reserved +# Copyright The IETF Trust 2007-2024, All Rights Reserved # -*- coding: utf-8 -*- import csv import datetime -import glob import io import itertools import json import math import os + import pytz import re import tarfile @@ -19,21 +19,28 @@ from calendar import timegm from collections import OrderedDict, Counter, deque, defaultdict, namedtuple from functools import partialmethod -from urllib.parse import parse_qs, unquote, urlencode, urlsplit, urlunsplit +import jsonschema +from pathlib import Path +from urllib.parse import parse_qs, unquote, urlencode, urlsplit, urlunsplit, urlparse from tempfile import mkstemp from wsgiref.handlers import format_date_time +from itertools import chain from django import forms +from django.core.cache import caches +from django.core.files.storage import storages from django.shortcuts import render, redirect, get_object_or_404 from django.http import (HttpResponse, HttpResponseRedirect, HttpResponseForbidden, HttpResponseNotFound, Http404, HttpResponseBadRequest, - JsonResponse, HttpResponseGone, HttpResponseNotAllowed) + JsonResponse, HttpResponseGone, HttpResponseNotAllowed, + FileResponse) from django.conf import settings from django.contrib import messages from django.contrib.auth.decorators import login_required from django.core.exceptions import ValidationError +from django.core.files.uploadedfile import SimpleUploadedFile from django.core.validators import URLValidator -from django.urls import reverse,reverse_lazy +from django.urls import reverse, reverse_lazy, NoReverseMatch from django.db.models import F, Max, Q from django.forms.models import modelform_factory, inlineformset_factory from django.template import TemplateDoesNotExist @@ -44,18 +51,27 @@ from django.views.decorators.cache import cache_page from django.views.decorators.csrf import ensure_csrf_cookie, csrf_exempt from django.views.generic import RedirectView +from rest_framework.status import HTTP_404_NOT_FOUND import debug # pyflakes:ignore from ietf.doc.fields import SearchableDocumentsField -from ietf.doc.models import Document, State, DocEvent, NewRevisionDocEvent, DocAlias +from ietf.doc.models import Document, State, DocEvent, NewRevisionDocEvent +from ietf.doc.storage_utils import ( + remove_from_storage, + retrieve_bytes, + store_file, +) from ietf.group.models import Group from ietf.group.utils import can_manage_session_materials, can_manage_some_groups, can_manage_group from ietf.person.models import Person, User from ietf.ietfauth.utils import role_required, has_role, user_is_person from ietf.mailtrigger.utils import gather_address_lists -from ietf.meeting.models import Meeting, Session, Schedule, FloorPlan, SessionPresentation, TimeSlot, SlideSubmission -from ietf.meeting.models import SessionStatusName, SchedulingEvent, SchedTimeSessAssignment, Room, TimeSlotTypeName +from ietf.meeting.models import Meeting, Session, Schedule, FloorPlan, \ + SessionPresentation, TimeSlot, SlideSubmission, Attended +from ..blobdb.models import ResolvedMaterial +from ietf.meeting.models import ImportantDate, SessionStatusName, SchedulingEvent, SchedTimeSessAssignment, Room, TimeSlotTypeName +from ietf.meeting.models import Registration from ietf.meeting.forms import ( CustomDurationField, SwapDaysForm, SwapTimeslotsForm, ImportMinutesForm, TimeSlotCreateForm, TimeSlotEditForm, SessionCancelForm, SessionEditForm ) from ietf.meeting.helpers import get_person_by_email, get_schedule_by_name @@ -72,7 +88,14 @@ from ietf.meeting.helpers import send_interim_approval from ietf.meeting.helpers import send_interim_approval_request from ietf.meeting.helpers import send_interim_announcement_request, sessions_post_cancel -from ietf.meeting.utils import finalize, sort_accept_tuple, condition_slide_order +from ietf.meeting.utils import ( + condition_slide_order, + finalize, + generate_proceedings_content, + organize_proceedings_sessions, + resolve_uploaded_material, + sort_accept_tuple, store_blobs_for_one_material_doc, +) from ietf.meeting.utils import add_event_info_to_session_qs from ietf.meeting.utils import session_time_for_sorting from ietf.meeting.utils import session_requested_by, SaveMaterialsError @@ -83,15 +106,15 @@ from ietf.meeting.utils import swap_meeting_schedule_timeslot_assignments, bulk_create_timeslots from ietf.meeting.utils import preprocess_meeting_important_dates from ietf.meeting.utils import new_doc_for_session, write_doc_for_session -from ietf.meeting.utils import get_activity_stats, post_process, create_recording -from ietf.meeting.utils import participants_for_meeting +from ietf.meeting.utils import get_activity_stats, post_process, create_recording, delete_recording +from ietf.meeting.utils import participants_for_meeting, generate_bluesheet, bluesheet_data, save_bluesheet from ietf.message.utils import infer_message -from ietf.name.models import SlideSubmissionStatusName, ProceedingsMaterialTypeName, SessionPurposeName -from ietf.stats.models import MeetingRegistration +from ietf.name.models import SlideSubmissionStatusName, ProceedingsMaterialTypeName, SessionPurposeName, CountryName from ietf.utils import markdown from ietf.utils.decorators import require_api_key from ietf.utils.hedgedoc import Note, NoteError -from ietf.utils.log import assertion +from ietf.utils.meetecho import MeetechoAPIError, SlidesManager +from ietf.utils.log import assertion, log from ietf.utils.mail import send_mail_message, send_mail_text from ietf.utils.mime import get_mime_type from ietf.utils.pipe import pipe @@ -99,10 +122,17 @@ from ietf.utils.response import permission_denied from ietf.utils.text import xslugify from ietf.utils.timezone import datetime_today, date_today +from ietf.settings import YOUTUBE_DOMAINS from .forms import (InterimMeetingModelForm, InterimAnnounceForm, InterimSessionModelForm, InterimCancelForm, InterimSessionInlineFormSet, RequestMinutesForm, - UploadAgendaForm, UploadBlueSheetForm, UploadMinutesForm, UploadSlidesForm) + UploadAgendaForm, UploadBlueSheetForm, UploadMinutesForm, UploadSlidesForm, + UploadNarrativeMinutesForm) + +from icalendar import Calendar, Event +from ietf.doc.templatetags.ietf_filters import absurl +from ..api.ietf_utils import requires_api_token +from ..blobdb.storage import BlobdbStorage, BlobFile request_summary_exclude_group_types = ['team'] @@ -123,6 +153,10 @@ def send_interim_change_notice(request, meeting): message.related_groups.add(group) send_mail_message(request, message) +def parse_ical_line_endings(ical): + """Parse icalendar line endings to ensure they are RFC 5545 compliant""" + return re.sub(r'\r(?!\n)|(? 0: + if ".pdf" in ext_choices: + filename = ext_choices[".pdf"] + else: + filename = list(ext_choices.values())[0] + if not filename.exists(): + raise Http404(f"File not found: {filename}") old_proceedings_format = meeting.number.isdigit() and int(meeting.number) <= 96 if settings.MEETING_MATERIALS_SERVE_LOCALLY or old_proceedings_format: - with io.open(filename, 'rb') as file: - bytes = file.read() - - mtype, chset = get_mime_type(bytes) + bytes = filename.read_bytes() + mtype, chset = get_mime_type(bytes) # chset does not consider entire file! content_type = "%s; charset=%s" % (mtype, chset) - file_ext = os.path.splitext(filename) - if len(file_ext) == 2 and file_ext[1] == '.md' and mtype == 'text/plain': - sorted_accept = sort_accept_tuple(request.META.get('HTTP_ACCEPT')) + if filename.suffix == ".md" and mtype == "text/plain": + sorted_accept = sort_accept_tuple(request.META.get("HTTP_ACCEPT")) for atype in sorted_accept: if atype[0] == "text/markdown": content_type = content_type.replace("plain", "markdown", 1) break elif atype[0] == "text/html": + # Render markdown, allowing that charset may be inaccurate. + try: + md_src = bytes.decode( + "utf-8" if chset in ["ascii", "us-ascii"] else chset + ) + except UnicodeDecodeError: + # latin-1, aka iso8859-1, accepts all 8-bit code points + md_src = bytes.decode("latin-1") + content = markdown.markdown(md_src) # a string bytes = render_to_string( "minimal.html", { - "content": markdown.markdown(bytes.decode(encoding=chset)), - "title": basename, + "content": content, + "title": filename.name, + "static_ietf_org": settings.STATIC_IETF_ORG, }, - ) - content_type = content_type.replace("plain", "html", 1) + ).encode("utf-8") + content_type = "text/html; charset=utf-8" break elif atype[0] == "text/plain": break response = HttpResponse(bytes, content_type=content_type) - response['Content-Disposition'] = 'inline; filename="%s"' % basename + response["Content-Disposition"] = f'inline; filename="{filename.name}"' return response else: return HttpResponseRedirect(redirect_to=doc.get_href(meeting=meeting)) + +@requires_api_token("ietf.meeting.views.api_resolve_materials_name") +def api_resolve_materials_name_cached(request, document, num=None, ext=None): + """Resolve materials name into document to a blob spec + + Returns the bucket/name of a blob in the blob store that corresponds to the named + document. Handles resolution of revision if it is not specified and determines the + best extension if one is not provided. Response is JSON. + + As of 2025-10-10 we do not have blobs for all materials documents or for every + format of every document. This API still returns the bucket/name as if the blob + exists. Another API will allow the caller to obtain the file contents using that + name if it cannot be retrieved from the blob store. + """ + + def _error_response(status: int, detail: str): + return JsonResponse( + { + "status": status, + "title": "Error", + "detail": detail, + }, + status=status, + ) + + def _response(bucket: str, name: str): + return JsonResponse( + { + "bucket": bucket, + "name": name, + } + ) + + try: + resolved = ResolvedMaterial.objects.get( + meeting_number=num, name=document + ) + except ResolvedMaterial.DoesNotExist: + return _error_response( + HTTP_404_NOT_FOUND, f"No suitable file for {document} for meeting {num}" + ) + return _response(bucket=resolved.bucket, name=resolved.blob) + + +@requires_api_token +def api_retrieve_materials_blob(request, bucket, name): + """Retrieve contents of a meeting materials blob + + This is intended as a fallback if the web worker cannot retrieve a blob from + the blobstore itself. The most likely cause is retrieving an old materials document + that has not been backfilled. + + If a blob is requested that does not exist, this checks for it on the filesystem + and if found, adds it to the blobstore, creates a StoredObject record, and returns + the contents as it would have done if the blob was already present. + + As a special case, if a requested file with extension `.md.html` does not exist + but a file with the same name but extension `.md` does, `.md` file will be rendered + from markdown to html and returned / stored. + """ + DEFAULT_CONTENT_TYPES = { + ".html": "text/html;charset=utf-8", + ".md": "text/markdown;charset=utf-8", + ".pdf": "application/pdf", + ".txt": "text/plain;charset=utf-8", + } + + def _default_content_type(blob_name: str): + return DEFAULT_CONTENT_TYPES.get(Path(name).suffix, "application/octet-stream") + + if not ( + settings.ENABLE_BLOBSTORAGE + and bucket in settings.MATERIALS_TYPES_SERVED_BY_WORKER + ): + return HttpResponseNotFound(f"Bucket {bucket} not found.") + storage = storages[bucket] # if not configured, a server error will result + assert isinstance(storage, BlobdbStorage) + try: + blob = storage.open(name, "rb") + except FileNotFoundError: + pass + else: + # found the blob - return it + assert isinstance(blob, BlobFile) + log(f"Materials blob: directly returning {bucket}:{name}") + return FileResponse( + blob, + filename=name, + content_type=blob.content_type or _default_content_type(name), + ) + + # Did not find the blob. Create it if we can + name_as_path = Path(name) + if name_as_path.suffixes == [".md", ".html"]: + # special case: .md.html means we want to create the .md and the .md.html + # will come along as a bonus + name_to_store = name_as_path.stem # removes the .html + else: + name_to_store = name + + # See if we have a meeting-related document that matches the requested bucket and + # name. + try: + doc, rev = _get_materials_doc(Path(name_to_store).stem) + if doc.type_id != bucket: + raise Document.DoesNotExist + except Document.DoesNotExist: + log(f"Materials blob: no doc for {bucket}:{name}") + return HttpResponseNotFound( + f"Document corresponding to {bucket}:{name} not found." + ) + else: + # create all missing blobs for the doc while we're at it + log(f"Materials blob: storing blobs for {doc.name}-{doc.rev}") + store_blobs_for_one_material_doc(doc) + + # If we can make the blob at all, it now exists, so return it or a 404 + try: + blob = storage.open(name, "rb") + except FileNotFoundError: + log(f"Materials blob: no blob for {bucket}:{name}") + return HttpResponseNotFound(f"Object {bucket}:{name} not found.") + else: + # found the blob - return it + assert isinstance(blob, BlobFile) + return FileResponse( + blob, + filename=name, + content_type=blob.content_type or _default_content_type(name), + ) + + @login_required def materials_editable_groups(request, num=None): meeting = get_meeting(num) @@ -971,6 +1166,7 @@ def cubehelix(i, total, hue=1.2, start_angle=0.5): 'rtg' : { 'dark' : (222, 219, 124) , 'light' : (247, 247, 233) }, 'sec' : { 'dark' : (0, 114, 178) , 'light' : (245, 252, 248) }, 'tsv' : { 'dark' : (117,201,119) , 'light' : (251, 252, 255) }, + 'wit' : { 'dark' : (117,201,119) , 'light' : (251, 252, 255) }, # intentionally the same as tsv } for i, p in enumerate(session_parents): if p.acronym in liz_preferred_colors: @@ -1479,6 +1675,11 @@ def list_schedules(request, num): class DiffSchedulesForm(forms.Form): from_schedule = forms.ChoiceField() to_schedule = forms.ChoiceField() + show_room_changes = forms.BooleanField( + initial=False, + required=False, + help_text="Include changes to room without a date or time change", + ) def __init__(self, meeting, user, *args, **kwargs): super().__init__(*args, **kwargs) @@ -1511,6 +1712,14 @@ def diff_schedules(request, num): raw_diffs = diff_meeting_schedules(from_schedule, to_schedule) diffs = prefetch_schedule_diff_objects(raw_diffs) + if not form.cleaned_data["show_room_changes"]: + # filter out room-only changes + diffs = [ + d + for d in diffs + if (d["change"] != "move") or (d["from"].time != d["to"].time) + ] + for d in diffs: s = d['session'] s.session_label = s.short_name @@ -1613,7 +1822,6 @@ def agenda_plain(request, num=None, name=None, base=None, ext=None, owner=None, "now": timezone.now().astimezone(meeting.tz()), "display_timezone": display_timezone, "is_current_meeting": is_current_meeting, - "use_notes": meeting.uses_notes(), "cache_time": 150 if is_current_meeting else 3600, }, content_type=mimetype[ext], @@ -1644,15 +1852,33 @@ def agenda(request, num=None, name=None, base=None, ext=None, owner=None, utc="" } }) -@cache_page(5 * 60) -def api_get_agenda_data (request, num=None): + +def generate_agenda_data(num=None, force_refresh=False): + """Generate data for the api_get_agenda_data endpoint + + :num: meeting number + :force_refresh: True to force a refresh of the cache + """ meeting = get_ietf_meeting(num) if meeting is None: raise Http404("No such full IETF meeting") elif int(meeting.number) <= 64: - return Http404("Pre-IETF 64 meetings are not available through this API") - else: - pass + raise Http404("Pre-IETF 64 meetings are not available through this API") + is_current_meeting = meeting.number == get_current_ietf_meeting_num() + + cache = caches["agenda"] + cache_timeout = ( + settings.AGENDA_CACHE_TIMEOUT_CURRENT_MEETING + if is_current_meeting + else settings.AGENDA_CACHE_TIMEOUT_DEFAULT + ) + cache_format = "1" # bump this on backward-incompatible data format changes + + cache_key = f"generate_agenda_data:{meeting.number}:v{cache_format}" + if not force_refresh: + cached_value = cache.get(cache_key) + if cached_value is not None: + return cached_value # Select the schedule to show schedule = get_schedule(meeting, None) @@ -1668,14 +1894,13 @@ def api_get_agenda_data (request, num=None): filter_organizer = AgendaFilterOrganizer(assignments=filtered_assignments) - is_current_meeting = (num is None) or (num == get_current_ietf_meeting_num()) - # Get Floor Plans floors = FloorPlan.objects.filter(meeting=meeting).order_by('order') + + # Get Preliminary Agenda Date + prelimAgendaDate = ImportantDate.objects.filter(name_id="prelimagenda", meeting=meeting).first() - #debug.show('all([(item.acronym,item.session.order_number,item.session.order_in_meeting()) for item in filtered_assignments])') - - return JsonResponse({ + result = { "meeting": { "number": schedule.meeting.number, "city": schedule.meeting.city, @@ -1684,87 +1909,109 @@ def api_get_agenda_data (request, num=None): "updated": updated, "timezone": meeting.time_zone, "infoNote": schedule.meeting.agenda_info_note, - "warningNote": schedule.meeting.agenda_warning_note + "warningNote": schedule.meeting.agenda_warning_note, + "prelimAgendaDate": prelimAgendaDate.date.isoformat() if prelimAgendaDate else "" }, "categories": filter_organizer.get_filter_categories(), "isCurrentMeeting": is_current_meeting, - "useNotes": meeting.uses_notes(), + "usesNotes": meeting.uses_notes(), "schedule": list(map(agenda_extract_schedule, filtered_assignments)), "floors": list(map(agenda_extract_floorplan, floors)) - }) + } + cache.set(cache_key, result, timeout=cache_timeout) + return result -def api_get_session_materials (request, session_id=None): - session = get_object_or_404(Session,pk=session_id) + +def api_get_agenda_data(request, num=None): + return JsonResponse(generate_agenda_data(num, force_refresh=False)) + + +def api_get_session_materials(request, session_id=None): + session = get_object_or_404(Session, pk=session_id) minutes = session.minutes() slides_actions = [] - if can_manage_session_materials(request.user, session.group, session): - slides_actions.append({ - 'label': 'Upload slides', - 'url': reverse( - 'ietf.meeting.views.upload_session_slides', - kwargs={'num': session.meeting.number, 'session_id': session.pk}, - ), - }) - elif not session.is_material_submission_cutoff(): - slides_actions.append({ - 'label': 'Propose slides', - 'url': reverse( - 'ietf.meeting.views.propose_session_slides', - kwargs={'num': session.meeting.number, 'session_id': session.pk}, - ), - }) + if can_manage_session_materials(request.user, session.group, session) or not session.is_material_submission_cutoff(): + slides_actions.append( + { + "label": "Upload slides", + "url": reverse( + "ietf.meeting.views.upload_session_slides", + kwargs={"num": session.meeting.number, "session_id": session.pk}, + ), + } + ) else: pass # no action available if it's past cutoff - - agenda = session.agenda() + + agenda = session.agenda() agenda_url = agenda.get_href() if agenda is not None else None - return JsonResponse({ - "url": agenda_url, - "slides": { - "decks": list(map(agenda_extract_slide, session.slides())), - "actions": slides_actions, - }, - "minutes": { - "id": minutes.id, - "title": minutes.title, - "url": minutes.get_href(), - "ext": minutes.file_extension() - } if minutes is not None else None - }) + return JsonResponse( + { + "url": agenda_url, + "slides": { + "decks": [ + agenda_extract_slide(slide) | {"order": order} # add "order" field + for order, slide in enumerate(session.slides()) + ], + "actions": slides_actions, + }, + "minutes": { + "id": minutes.id, + "title": minutes.title, + "url": minutes.get_href(), + "ext": minutes.file_extension(), + } + if minutes is not None + else None, + } + ) -def agenda_extract_schedule (item): + +def agenda_extract_schedule(item): + if item.session.current_status == "resched": + resched_to = item.session.tombstone_for.official_timeslotassignment() + else: + resched_to = None return { "id": item.id, + "slug": item.slug(), "sessionId": item.session.id, - "room": item.room_name if item.timeslot.show_location else None, + "room": (item.timeslot.get_location() or None) if item.timeslot else None, "location": { "short": item.timeslot.location.floorplan.short, "name": item.timeslot.location.floorplan.name, } if (item.timeslot.show_location and item.timeslot.location and item.timeslot.location.floorplan) else {}, "acronym": item.acronym, - "duration": item.timeslot.duration.seconds, + "duration": item.timeslot.duration.total_seconds(), "name": item.session.name, + "slotId": item.timeslot.id, "slotName": item.timeslot.name, + "slotModified": item.timeslot.modified.isoformat(), "startDateTime": item.timeslot.time.isoformat(), "status": item.session.current_status, + "rescheduledTo": { + "startDateTime": resched_to.timeslot.time.isoformat(), + "duration": resched_to.timeslot.duration.total_seconds(), + } if resched_to is not None else {}, "type": item.session.type.slug, "purpose": item.session.purpose.slug, "isBoF": item.session.group_at_the_time().state_id == "bof", + "isProposed": item.session.group_at_the_time().state_id == "proposed", "filterKeywords": item.filter_keywords, "groupAcronym": item.session.group_at_the_time().acronym, "groupName": item.session.group_at_the_time().name, - "groupParent": { + "groupParent": ({ "acronym": item.session.group_parent_at_the_time().acronym - } if item.session.group_parent_at_the_time() else {}, + } if item.session.group_parent_at_the_time() else {}), "note": item.session.agenda_note, "remoteInstructions": item.session.remote_instructions, "flags": { "agenda": True if item.session.agenda() is not None else False, - "showAgenda": True if (item.session.agenda() is not None or item.session.remote_instructions or item.session.agenda_note) else False + "showAgenda": True if (item.session.agenda() is not None or item.session.remote_instructions) else False }, "agenda": { - "url": item.session.agenda().get_href() + "url": item.session.agenda().get_versionless_href() } if item.session.agenda() is not None else { "url": None }, @@ -1789,7 +2036,8 @@ def agenda_extract_schedule (item): # } } -def agenda_extract_floorplan (item): + +def agenda_extract_floorplan(item): try: item.image.width except FileNotFoundError: @@ -1802,10 +2050,11 @@ def agenda_extract_floorplan (item): "short": item.short, "width": item.image.width, "height": item.image.height, - "rooms": list(map(agenda_extract_room, item.room_set.all())) + "rooms": list(map(agenda_extract_room, item.room_set.all())), } -def agenda_extract_room (item): + +def agenda_extract_room(item): return { "id": item.id, "name": item.name, @@ -1817,7 +2066,8 @@ def agenda_extract_room (item): "bottom": item.bottom() } -def agenda_extract_recording (item): + +def agenda_extract_recording(item): return { "id": item.id, "name": item.name, @@ -1825,14 +2075,17 @@ def agenda_extract_recording (item): "url": item.external_url } -def agenda_extract_slide (item): + +def agenda_extract_slide(item): return { "id": item.id, "title": item.title, - "url": item.get_versionless_href(), - "ext": item.file_extension() + "rev": item.rev, + "url": item.get_href(), + "ext": item.file_extension(), } + def agenda_csv(schedule, filtered_assignments, utc=False): encoding = 'utf-8' response = HttpResponse(content_type=f"text/csv; charset={encoding}") @@ -1859,7 +2112,7 @@ def slides_field(item): write_row(headings) - tz = datetime.timezone.utc if utc else schedule.meeting.tz() + tz = datetime.UTC if utc else schedule.meeting.tz() for item in filtered_assignments: row = [] row.append(item.timeslot.time.astimezone(tz).strftime("%Y-%m-%d")) @@ -1927,8 +2180,10 @@ def agenda_by_type_ics(request,num=None,type=None): ).order_by('session__type__slug','timeslot__time') if type: assignments = assignments.filter(session__type__slug=type) - updated = meeting.updated() - return render(request,"meeting/agenda.ics",{"schedule":schedule,"updated":updated,"assignments":assignments},content_type="text/calendar") + + return render_icalendar(schedule, assignments) + + def session_draft_list(num, acronym): try: @@ -2048,6 +2303,246 @@ def ical_session_status(assignment): else: return "CONFIRMED" + +def render_icalendar_precomp(agenda_data): + ical_content = generate_agenda_ical_precomp(agenda_data) + return HttpResponse(ical_content, content_type="text/calendar") + + +def render_icalendar(schedule, assignments): + ical_content = generate_agenda_ical(schedule, assignments) + return HttpResponse(ical_content, content_type="text/calendar") + + +def generate_agenda_ical_precomp(agenda_data): + """Generate iCalendar from precomputed data using the icalendar library""" + + cal = Calendar() + cal.add("prodid", "-//IETF//datatracker.ietf.org ical agenda//EN") + cal.add("version", "2.0") + cal.add("method", "PUBLISH") + + meeting_data = agenda_data["meeting"] + for item in agenda_data["schedule"]: + event = Event() + + uid = f"ietf-{meeting_data["number"]}-{item["slotId"]}-{item["acronym"]}" + event.add("uid", uid) + + # add custom field with meeting's local TZ + event.add("x-meeting-tz", meeting_data["timezone"]) + + if item["name"]: + summary = item["name"] + else: + summary = f"{item["groupAcronym"]} - {item["groupName"]}" + + if item["note"]: + summary += f" ({item["note"]})" + + event.add("summary", summary) + + if item["room"]: + event.add("location", item["room"]) # room name + + if item["status"] == "canceled": + status = "CANCELLED" + elif item["status"] == "resched": + resched_to = item["rescheduledTo"] + if resched_to is None: + status = "RESCHEDULED" + else: + resched_start = datetime.datetime.fromisoformat( + resched_to["startDateTime"] + ) + dur = datetime.timedelta(seconds=resched_to["duration"]) + resched_end = resched_start + dur + formatted_start = resched_start.strftime("%A %H:%M").upper() + formatted_end = resched_end.strftime("%H:%M") + status = f"RESCHEDULED TO {formatted_start}-{formatted_end}" + else: + status = "CONFIRMED" + event.add("status", status) + + event.add("class", "PUBLIC") + + start_time = datetime.datetime.fromisoformat(item["startDateTime"]) + duration = datetime.timedelta(seconds=item["duration"]) + event.add("dtstart", start_time) + event.add("dtend", start_time + duration) + + # DTSTAMP: when the event was created or last modified (in UTC) + # n.b. timeslot.modified may not be an accurate measure of this + event.add("dtstamp", datetime.datetime.fromisoformat(item["slotModified"])) + + description_parts = [item["slotName"]] + + if item["note"]: + description_parts.append(f"Note: {item["note"]}") + + links = item["links"] + if links["onsiteTool"]: + description_parts.append(f"Onsite tool: {links["onsiteTool"]}") + + if links["videoStream"]: + description_parts.append(f"Meetecho: {links["videoStream"]}") + + if links["webex"]: + description_parts.append(f"Webex: {links["webex"]}") + + if item["remoteInstructions"]: + description_parts.append( + f"Remote instructions: {item["remoteInstructions"]}" + ) + + try: + materials_url = absurl( + "ietf.meeting.views.session_details", + num=meeting_data["number"], + acronym=item["acronym"], + ) + except NoReverseMatch: + pass + else: + description_parts.append(f"Session materials: {materials_url}") + event.add("url", materials_url) + + if meeting_data["number"].isdigit(): + try: + agenda_url = absurl("agenda", num=meeting_data["number"]) + except NoReverseMatch: + pass + else: + description_parts.append(f"See in schedule: {agenda_url}#row-{item["slug"]}") + + if item["agenda"] and item["agenda"]["url"]: + description_parts.append(f"Agenda {item["agenda"]["url"]}") + + # Join all description parts with 2 newlines + description = "\n\n".join(description_parts) + event.add("description", description) + + # Add event to calendar + cal.add_component(event) + + return cal.to_ical().decode("utf-8") + + +def generate_agenda_ical(schedule, assignments): + """Generate iCalendar using the icalendar library""" + + cal = Calendar() + cal.add("prodid", "-//IETF//datatracker.ietf.org ical agenda//EN") + cal.add("version", "2.0") + cal.add("method", "PUBLISH") + + for item in assignments: + event = Event() + + uid = f"ietf-{schedule.meeting.number}-{item.timeslot.pk}-{item.session.group.acronym}" + event.add("uid", uid) + + # add custom field with meeting's local TZ + event.add("x-meeting-tz", schedule.meeting.time_zone) + + if item.session.name: + summary = item.session.name + else: + group = item.session.group_at_the_time() + summary = f"{group.acronym} - {group.name}" + + if item.session.agenda_note: + summary += f" ({item.session.agenda_note})" + + event.add("summary", summary) + + if item.timeslot.show_location and item.timeslot.get_location(): + event.add("location", item.timeslot.get_location()) + + if item.session and hasattr(item.session, "current_status"): + status = ical_session_status(item) + else: + status = "" + event.add("status", status) + + event.add("class", "PUBLIC") + + event.add("dtstart", item.timeslot.utc_start_time()) + event.add("dtend", item.timeslot.utc_end_time()) + + # DTSTAMP: when the event was created or last modified (in UTC) + dtstamp = item.timeslot.modified.astimezone(pytz.UTC) + event.add("dtstamp", dtstamp) + + description_parts = [item.timeslot.name] + + if item.session.agenda_note: + description_parts.append(f"Note: {item.session.agenda_note}") + + if hasattr(item.session, "onsite_tool_url") and callable( + item.session.onsite_tool_url + ): + onsite_url = item.session.onsite_tool_url() + if onsite_url: + description_parts.append(f"Onsite tool: {onsite_url}") + + if hasattr(item.session, "video_stream_url") and callable( + item.session.video_stream_url + ): + video_url = item.session.video_stream_url() + if video_url: + description_parts.append(f"Meetecho: {video_url}") + + if ( + item.timeslot.location + and hasattr(item.timeslot.location, "webex_url") + and callable(item.timeslot.location.webex_url) + and item.timeslot.location.webex_url() is not None + ): + description_parts.append(f"Webex: {item.timeslot.location.webex_url()}") + + if item.session.remote_instructions: + description_parts.append( + f"Remote instructions: {item.session.remote_instructions}" + ) + + try: + materials_url = absurl( + "ietf.meeting.views.session_details", + num=schedule.meeting.number, + acronym=item.session.group.acronym, + ) + description_parts.append(f"Session materials: {materials_url}") + event.add("url", materials_url) + except: + pass + + if ( + hasattr(schedule.meeting, "get_number") + and schedule.meeting.get_number() is not None + ): + try: + agenda_url = absurl("agenda", num=schedule.meeting.number) + description_parts.append( + f"See in schedule: {agenda_url}#row-{item.slug()}" + ) + except: + pass + + agenda = item.session.agenda() + if agenda and hasattr(agenda, "get_versionless_href"): + agenda_url = agenda.get_versionless_href() + description_parts.append(f"{agenda.type} {agenda_url}") + + # Join all description parts with 2 newlines + description = "\n\n".join(description_parts) + event.add("description", description) + + # Add event to calendar + cal.add_component(event) + + return cal.to_ical().decode("utf-8") + def parse_agenda_filter_params(querydict): """Parse agenda filter parameters from a request""" if len(querydict) == 0: @@ -2067,10 +2562,66 @@ def parse_agenda_filter_params(querydict): def should_include_assignment(filter_params, assignment): """Decide whether to include an assignment""" - shown = len(set(filter_params['show']).intersection(assignment.filter_keywords)) > 0 - hidden = len(set(filter_params['hide']).intersection(assignment.filter_keywords)) > 0 + if hasattr(assignment, "filter_keywords"): + kw = assignment.filter_keywords + elif isinstance(assignment, dict): + kw = assignment.get("filterKeywords", []) + else: + raise ValueError("Unsupported assignment instance") + shown = len(set(filter_params['show']).intersection(kw)) > 0 + hidden = len(set(filter_params['hide']).intersection(kw)) > 0 return shown and not hidden + +def agenda_ical_ietf(meeting, filt_params, acronym=None, session_id=None): + agenda_data = generate_agenda_data(meeting.number, force_refresh=False) + if acronym: + agenda_data["schedule"] = [ + item + for item in agenda_data["schedule"] + if item["groupAcronym"] == acronym + ] + elif session_id: + agenda_data["schedule"] = [ + item + for item in agenda_data["schedule"] + if item["sessionId"] == session_id + ] + if filt_params is not None: + # Apply the filter + agenda_data["schedule"] = [ + item + for item in agenda_data["schedule"] + if should_include_assignment(filt_params, item) + ] + return render_icalendar_precomp(agenda_data) + + +def agenda_ical_interim(meeting, filt_params, acronym=None, session_id=None): + schedule = get_schedule(meeting) + + if schedule is None and acronym is None and session_id is None: + raise Http404 + + assignments = SchedTimeSessAssignment.objects.filter( + schedule__in=[schedule, schedule.base], + session__on_agenda=True, + ) + assignments = preprocess_assignments_for_agenda(assignments, meeting) + AgendaKeywordTagger(assignments=assignments).apply() + + if filt_params is not None: + # Apply the filter + assignments = [a for a in assignments if should_include_assignment(filt_params, a)] + + if acronym: + assignments = [ a for a in assignments if a.session.group_at_the_time().acronym == acronym ] + elif session_id: + assignments = [ a for a in assignments if a.session_id == int(session_id) ] + + return render_icalendar(schedule, assignments) + + def agenda_ical(request, num=None, acronym=None, session_id=None): """Agenda ical view @@ -2098,42 +2649,20 @@ def agenda_ical(request, num=None, acronym=None, session_id=None): raise Http404 else: meeting = get_meeting(num, type_in=None) # get requested meeting, whatever its type - schedule = get_schedule(meeting) - updated = meeting.updated() - - if schedule is None and acronym is None and session_id is None: - raise Http404 - assignments = SchedTimeSessAssignment.objects.filter( - schedule__in=[schedule, schedule.base], - session__on_agenda=True, - ) - assignments = preprocess_assignments_for_agenda(assignments, meeting) - AgendaKeywordTagger(assignments=assignments).apply() + if isinstance(session_id, str) and session_id.isdigit(): + session_id = int(session_id) try: filt_params = parse_agenda_filter_params(request.GET) except ValueError as e: return HttpResponseBadRequest(str(e)) - if filt_params is not None: - # Apply the filter - assignments = [a for a in assignments if should_include_assignment(filt_params, a)] - - if acronym: - assignments = [ a for a in assignments if a.session.group_at_the_time().acronym == acronym ] - elif session_id: - assignments = [ a for a in assignments if a.session_id == int(session_id) ] - - for a in assignments: - if a.session: - a.session.ical_status = ical_session_status(a) + if meeting.type_id == "ietf": + return agenda_ical_ietf(meeting, filt_params, acronym, session_id) + else: + return agenda_ical_interim(meeting, filt_params, acronym, session_id) - return render(request, "meeting/agenda.ics", { - "schedule": schedule, - "assignments": assignments, - "updated": updated - }, content_type="text/calendar") @cache_page(15 * 60) def agenda_json(request, num=None): @@ -2157,7 +2686,7 @@ def agenda_json(request, num=None): # time of the meeting assignments = preprocess_assignments_for_agenda(assignments, meeting, extra_prefetches=[ "session__materials__docevent_set", - "session__sessionpresentation_set", + "session__presentations", "timeslot__meeting" ]) for asgn in assignments: @@ -2427,12 +2956,28 @@ def session_details(request, num, acronym): session.cancelled = session.current_status in Session.CANCELED_STATUSES session.status = status_names.get(session.current_status, session.current_status) - session.filtered_artifacts = list(session.sessionpresentation_set.filter(document__type__slug__in=['agenda','minutes','bluesheets'])) - session.filtered_artifacts.sort(key=lambda d:['agenda','minutes','bluesheets'].index(d.document.type.slug)) - session.filtered_slides = session.sessionpresentation_set.filter(document__type__slug='slides').order_by('order') - session.filtered_drafts = session.sessionpresentation_set.filter(document__type__slug='draft') - session.filtered_chatlog_and_polls = session.sessionpresentation_set.filter(document__type__slug__in=('chatlog', 'polls')).order_by('document__type__slug') - # TODO FIXME Deleted materials shouldn't be in the sessionpresentation_set + if session.meeting.type_id == 'ietf' and not session.meeting.proceedings_final: + artifact_types = ['agenda','minutes','narrativeminutes'] + if Attended.objects.filter(session=session).exists(): + session.type_counter.update(['bluesheets']) + ota = session.official_timeslotassignment() + sess_time = ota and ota.timeslot.time + session.bluesheet_title = 'Attendance IETF%s: %s : %s' % (session.meeting.number, + session.group.acronym, + sess_time.strftime("%a %H:%M")) + else: + artifact_types = ['agenda','minutes','narrativeminutes','bluesheets'] + session.filtered_artifacts = list(session.presentations.filter(document__type__slug__in=artifact_types)) + session.filtered_artifacts.sort(key=lambda d:artifact_types.index(d.document.type.slug)) + session.filtered_slides = session.presentations.filter(document__type__slug='slides').order_by('order') + session.filtered_drafts = session.presentations.filter(document__type__slug='draft') + + filtered_polls = session.presentations.filter(document__type__slug=('polls')) + filtered_chatlogs = session.presentations.filter(document__type__slug=('chatlog')) + session.filtered_chatlog_and_polls = chain(filtered_chatlogs, filtered_polls) + session.chatlog = filtered_chatlogs.first() + + # TODO FIXME Deleted materials shouldn't be in the presentations for qs in [session.filtered_artifacts,session.filtered_slides,session.filtered_drafts]: qs = [p for p in qs if p.document.get_state_slug(p.document.type_id)!='deleted'] session.type_counter.update([p.document.type.slug for p in qs]) @@ -2448,13 +2993,17 @@ def session_details(request, num, acronym): scheduled_sessions = [s for s in sessions if s.current_status == 'sched'] unscheduled_sessions = [s for s in sessions if s.current_status != 'sched'] - pending_suggestions = None - if request.user.is_authenticated: - if can_manage: - pending_suggestions = session.slidesubmission_set.filter(status__slug='pending') - else: - pending_suggestions = session.slidesubmission_set.filter(status__slug='pending', submitter=request.user.person) + # Start with all the pending suggestions for all the group's sessions + pending_suggestions = SlideSubmission.objects.filter(session__in=sessions, status__slug='pending') + if can_manage: + pass # keep the full set + elif hasattr(request.user, "person"): + pending_suggestions = pending_suggestions.filter(submitter=request.user.person) + else: + pending_suggestions = SlideSubmission.objects.none() + tsa = session.official_timeslotassignment() + future = tsa is not None and timezone.now() < tsa.timeslot.end_time() return render(request, "meeting/session_details.html", { 'scheduled_sessions':scheduled_sessions , 'unscheduled_sessions':unscheduled_sessions , @@ -2465,7 +3014,7 @@ def session_details(request, num, acronym): 'can_manage_materials' : can_manage, 'can_view_request': can_view_request, 'thisweek': datetime_today()-datetime.timedelta(days=7), - 'use_notes': meeting.uses_notes(), + 'future': future, }) class SessionDraftsForm(forms.Form): @@ -2490,7 +3039,7 @@ def add_session_drafts(request, session_id, num): if session.is_material_submission_cutoff() and not has_role(request.user, "Secretariat"): raise Http404 - already_linked = [sp.document for sp in session.sessionpresentation_set.filter(document__type_id='draft')] + already_linked = [sp.document for sp in session.presentations.filter(document__type_id='draft')] session_number = None sessions = get_sessions(session.meeting.number,session.group.acronym) @@ -2501,7 +3050,7 @@ def add_session_drafts(request, session_id, num): form = SessionDraftsForm(request.POST,already_linked=already_linked) if form.is_valid(): for draft in form.cleaned_data['drafts']: - session.sessionpresentation_set.create(document=draft,rev=None) + session.presentations.create(document=draft,rev=None) c = DocEvent(type="added_comment", doc=draft, rev=draft.rev, by=request.user.person) c.desc = "Added to session: %s" % session c.save() @@ -2512,13 +3061,156 @@ def add_session_drafts(request, session_id, num): return render(request, "meeting/add_session_drafts.html", { 'session': session, 'session_number': session_number, - 'already_linked': session.sessionpresentation_set.filter(document__type_id='draft'), + 'already_linked': session.presentations.filter(document__type_id='draft'), 'form': form, }) +class SessionRecordingsForm(forms.Form): + title = forms.CharField(max_length=255) + url = forms.URLField(label="URL of the recording (YouTube only)") -def upload_session_bluesheets(request, session_id, num): - # num is redundant, but we're dragging it along an artifact of where we are in the current URL structure + def clean_url(self): + url = self.cleaned_data['url'] + parsed_url = urlparse(url) + if parsed_url.hostname not in YOUTUBE_DOMAINS: + raise forms.ValidationError("Must be a YouTube URL") + return url + + +def add_session_recordings(request, session_id, num): + # num is redundant, but we're dragging it along an artifact of where we are in the current URL structure + session = get_object_or_404(Session, pk=session_id) + if not session.can_manage_materials(request.user): + permission_denied( + request, "You don't have permission to manage recordings for this session." + ) + if session.is_material_submission_cutoff() and not has_role( + request.user, "Secretariat" + ): + raise Http404 + + session_number = None + official_timeslotassignment = session.official_timeslotassignment() + assertion("official_timeslotassignment is not None") + initial = { + "title": "Video recording of {acronym} for {timestamp}".format( + acronym=session.group.acronym, + timestamp=official_timeslotassignment.timeslot.utc_start_time().strftime( + "%Y-%m-%d %H:%M" + ), + ) + } + + # find session number if WG has more than one session at the meeting + sessions = get_sessions(session.meeting.number, session.group.acronym) + if len(sessions) > 1: + session_number = 1 + sessions.index(session) + + presentations = session.presentations.filter( + document__in=session.get_material("recording", only_one=False), + ).order_by("document__title", "document__external_url") + + if request.method == "POST": + pk_to_delete = request.POST.get("delete", None) + if pk_to_delete is not None: + session_presentation = get_object_or_404(presentations, pk=pk_to_delete) + try: + delete_recording(session_presentation) + except ValueError as err: + log(f"Error deleting recording from session {session.pk}: {err}") + messages.error( + request, + "Unable to delete this recording. Please contact the secretariat for assistance.", + ) + form = SessionRecordingsForm(initial=initial) + else: + form = SessionRecordingsForm(request.POST) + if form.is_valid(): + title = form.cleaned_data["title"] + url = form.cleaned_data["url"] + create_recording(session, url, title=title, user=request.user.person) + return redirect( + "ietf.meeting.views.session_details", + num=session.meeting.number, + acronym=session.group.acronym, + ) + else: + form = SessionRecordingsForm(initial=initial) + + return render( + request, + "meeting/add_session_recordings.html", + { + "session": session, + "session_number": session_number, + "already_linked": presentations, + "form": form, + }, + ) + + +def session_attendance(request, session_id, num): + """Session attendance view + + GET - retrieve the current session attendance or redirect to the published bluesheet if finalized + + POST - self-attest attendance for logged-in user; falls through to GET for AnonymousUser or invalid request + """ + # num is redundant, but we're dragging it along as an artifact of where we are in the current URL structure + session = get_object_or_404(Session, pk=session_id) + if session.meeting.type_id != "ietf" or session.meeting.proceedings_final: + bluesheets = session.presentations.filter( + document__type_id="bluesheets" + ) + if bluesheets: + bluesheet = bluesheets[0].document + return redirect(bluesheet.get_href(session.meeting)) + else: + raise Http404("Bluesheets not found") + + cor_cut_off_date = session.meeting.get_submission_correction_date() + today_utc = date_today(datetime.UTC) + was_there = False + can_add = False + if request.user.is_authenticated: + # use getattr() instead of request.user.person because it's a reverse OneToOne field + person = getattr(request.user, "person", None) + # Consider allowing self-declared attendance if we have a person and at least one Attended instance exists. + # The latter condition will be satisfied when Meetecho pushes their attendee records - assuming that at least + # one person will have accessed the meeting tool. This prevents people from self-declaring before they are + # marked as attending if they did log in to the meeting tool (except for a tiny window while records are + # being processed). + if person is not None and Attended.objects.filter(session=session).exists(): + was_there = Attended.objects.filter(session=session, person=person).exists() + can_add = ( + today_utc <= cor_cut_off_date + and Registration.objects.filter( + meeting=session.meeting, person=person + ).exists() + and not was_there + ) + if can_add and request.method == "POST": + session.attended_set.get_or_create( + person=person, defaults={"origin": "self declared"} + ) + can_add = False + was_there = True + + data = bluesheet_data(session) + return render( + request, + "meeting/attendance.html", + { + "session": session, + "data": data, + "can_add": can_add, + "was_there": was_there, + }, + ) + + +def upload_session_bluesheets(request, session_id, num): + # num is redundant, but we're dragging it along an artifact of where we are in the current URL structure session = get_object_or_404(Session,pk=session_id) if not session.can_manage_materials(request.user): @@ -2542,7 +3234,10 @@ def upload_session_bluesheets(request, session_id, num): ota = session.official_timeslotassignment() sess_time = ota and ota.timeslot.time if not sess_time: - return HttpResponseGone("Cannot receive uploads for an unscheduled session. Please check the session ID.", content_type="text/plain") + return HttpResponseGone( + "Cannot receive uploads for an unscheduled session. Please check the session ID.", + content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}", + ) save_error = save_bluesheet(request, session, file, encoding=form.file_encoding[file.name]) @@ -2554,7 +3249,7 @@ def upload_session_bluesheets(request, session_id, num): else: form = UploadBlueSheetForm() - bluesheet_sp = session.sessionpresentation_set.filter(document__type='bluesheets').first() + bluesheet_sp = session.presentations.filter(document__type='bluesheets').first() return render(request, "meeting/upload_session_bluesheets.html", {'session': session, @@ -2564,48 +3259,6 @@ def upload_session_bluesheets(request, session_id, num): }) -def save_bluesheet(request, session, file, encoding='utf-8'): - bluesheet_sp = session.sessionpresentation_set.filter(document__type='bluesheets').first() - _, ext = os.path.splitext(file.name) - - if bluesheet_sp: - doc = bluesheet_sp.document - doc.rev = '%02d' % (int(doc.rev)+1) - bluesheet_sp.rev = doc.rev - bluesheet_sp.save() - else: - ota = session.official_timeslotassignment() - sess_time = ota and ota.timeslot.time - - if session.meeting.type_id=='ietf': - name = 'bluesheets-%s-%s-%s' % (session.meeting.number, - session.group.acronym, - sess_time.strftime("%Y%m%d%H%M")) - title = 'Bluesheets IETF%s: %s : %s' % (session.meeting.number, - session.group.acronym, - sess_time.strftime("%a %H:%M")) - else: - name = 'bluesheets-%s-%s' % (session.meeting.number, sess_time.strftime("%Y%m%d%H%M")) - title = 'Bluesheets %s: %s' % (session.meeting.number, sess_time.strftime("%a %H:%M")) - doc = Document.objects.create( - name = name, - type_id = 'bluesheets', - title = title, - group = session.group, - rev = '00', - ) - doc.states.add(State.objects.get(type_id='bluesheets',slug='active')) - DocAlias.objects.create(name=doc.name).docs.add(doc) - session.sessionpresentation_set.create(document=doc,rev='00') - filename = '%s-%s%s'% ( doc.name, doc.rev, ext) - doc.uploaded_filename = filename - e = NewRevisionDocEvent.objects.create(doc=doc, rev=doc.rev, by=request.user.person, type='new_revision', desc='New revision available: %s'%doc.rev) - save_error = handle_upload_file(file, filename, session.meeting, 'bluesheets', request=request, encoding=encoding) - if not save_error: - doc.save_with_history([e]) - return save_error - - def upload_session_minutes(request, session_id, num): # num is redundant, but we're dragging it along an artifact of where we are in the current URL structure session = get_object_or_404(Session,pk=session_id) @@ -2621,7 +3274,7 @@ def upload_session_minutes(request, session_id, num): if len(sessions) > 1: session_number = 1 + sessions.index(session) - minutes_sp = session.sessionpresentation_set.filter(document__type='minutes').first() + minutes_sp = session.presentations.filter(document__type='minutes').first() if request.method == 'POST': form = UploadMinutesForm(show_apply_to_all_checkbox,request.POST,request.FILES) @@ -2645,24 +3298,130 @@ def upload_session_minutes(request, session_id, num): except SessionNotScheduledError: return HttpResponseGone( "Cannot receive uploads for an unscheduled session. Please check the session ID.", - content_type="text/plain", + content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}", ) except SaveMaterialsError as err: form.add_error(None, str(err)) else: # no exception -- success! + resolve_uploaded_material(meeting=session.meeting, doc=session.minutes()) messages.success(request, f'Successfully uploaded minutes as revision {session.minutes().rev}.') return redirect('ietf.meeting.views.session_details', num=num, acronym=session.group.acronym) else: form = UploadMinutesForm(show_apply_to_all_checkbox) + tsa = session.official_timeslotassignment() + future = tsa is not None and timezone.now() < tsa.timeslot.end_time() return render(request, "meeting/upload_session_minutes.html", {'session': session, 'session_number': session_number, 'minutes_sp' : minutes_sp, 'form': form, + 'future': future, }) +@role_required("Secretariat") +def upload_session_narrativeminutes(request, session_id, num): + # num is redundant, but we're dragging it along an artifact of where we are in the current URL structure + session = get_object_or_404(Session,pk=session_id) + if session.group.acronym != "iesg": + raise Http404() + + session_number = None + sessions = get_sessions(session.meeting.number,session.group.acronym) + show_apply_to_all_checkbox = len(sessions) > 1 if session.type_id == 'regular' else False + if len(sessions) > 1: + session_number = 1 + sessions.index(session) + + narrativeminutes_sp = session.presentations.filter(document__type='narrativeminutes').first() + + if request.method == 'POST': + form = UploadNarrativeMinutesForm(show_apply_to_all_checkbox,request.POST,request.FILES) + if form.is_valid(): + file = request.FILES['file'] + _, ext = os.path.splitext(file.name) + apply_to_all = session.type_id == 'regular' + if show_apply_to_all_checkbox: + apply_to_all = form.cleaned_data['apply_to_all'] + + # Set up the new revision + try: + save_session_minutes_revision( + session=session, + apply_to_all=apply_to_all, + file=file, + ext=ext, + encoding=form.file_encoding[file.name], + request=request, + narrative=True + ) + except SessionNotScheduledError: + return HttpResponseGone( + "Cannot receive uploads for an unscheduled session. Please check the session ID.", + content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}", + ) + except SaveMaterialsError as err: + form.add_error(None, str(err)) + else: + # no exception -- success! + resolve_uploaded_material(meeting=session.meeting, doc=session.narrative_minutes()) + messages.success(request, f'Successfully uploaded narrative minutes as revision {session.narrative_minutes().rev}.') + return redirect('ietf.meeting.views.session_details', num=num, acronym=session.group.acronym) + else: + form = UploadMinutesForm(show_apply_to_all_checkbox) + + return render(request, "meeting/upload_session_narrativeminutes.html", + {'session': session, + 'session_number': session_number, + 'minutes_sp' : narrativeminutes_sp, + 'form': form, + }) + +class UploadOrEnterAgendaForm(UploadAgendaForm): + ACTIONS = [ + ("upload", "Upload agenda"), + ("enter", "Enter agenda"), + ] + submission_method = forms.ChoiceField(choices=ACTIONS, widget=forms.RadioSelect) + + content = forms.CharField(widget=forms.Textarea, required=False, strip=False, label="Agenda text") + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.fields["file"].required=False + self.order_fields(["submission_method", "file", "content"]) + + def clean_content(self): + return self.cleaned_data["content"].replace("\r", "") + + def clean_file(self): + submission_method = self.cleaned_data.get("submission_method") + if submission_method == "upload": + if self.cleaned_data.get("file", None) is not None: + return super().clean_file() + return None + + def clean(self): + def require_field(f): + if not self.cleaned_data.get(f): + self.add_error(f, ValidationError("You must fill in this field.")) + + submission_method = self.cleaned_data.get("submission_method") + if submission_method == "upload": + require_field("file") + elif submission_method == "enter": + require_field("content") + + def get_file(self): + """Get content as a file-like object""" + if self.cleaned_data.get("submission_method") == "upload": + return self.cleaned_data["file"] + else: + return SimpleUploadedFile( + name="uploaded.md", + content=self.cleaned_data["content"].encode("utf-8"), + content_type="text/markdown;charset=utf-8", + ) def upload_session_agenda(request, session_id, num): # num is redundant, but we're dragging it along an artifact of where we are in the current URL structure @@ -2679,12 +3438,12 @@ def upload_session_agenda(request, session_id, num): if len(sessions) > 1: session_number = 1 + sessions.index(session) - agenda_sp = session.sessionpresentation_set.filter(document__type='agenda').first() + agenda_sp = session.presentations.filter(document__type='agenda').first() if request.method == 'POST': - form = UploadAgendaForm(show_apply_to_all_checkbox,request.POST,request.FILES) + form = UploadOrEnterAgendaForm(show_apply_to_all_checkbox,request.POST,request.FILES) if form.is_valid(): - file = request.FILES['file'] + file = form.get_file() _, ext = os.path.splitext(file.name) apply_to_all = session.type.slug == 'regular' if show_apply_to_all_checkbox: @@ -2698,7 +3457,10 @@ def upload_session_agenda(request, session_id, num): ota = session.official_timeslotassignment() sess_time = ota and ota.timeslot.time if not sess_time: - return HttpResponseGone("Cannot receive uploads for an unscheduled session. Please check the session ID.", content_type="text/plain") + return HttpResponseGone( + "Cannot receive uploads for an unscheduled session. Please check the session ID.", + content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}", + ) if session.meeting.type_id=='ietf': name = 'agenda-%s-%s' % (session.meeting.number, session.group.acronym) @@ -2724,32 +3486,40 @@ def upload_session_agenda(request, session_id, num): group = session.group, rev = '00', ) - DocAlias.objects.create(name=doc.name).docs.add(doc) doc.states.add(State.objects.get(type_id='agenda',slug='active')) - if session.sessionpresentation_set.filter(document=doc).exists(): - sp = session.sessionpresentation_set.get(document=doc) + if session.presentations.filter(document=doc).exists(): + sp = session.presentations.get(document=doc) sp.rev = doc.rev sp.save() else: - session.sessionpresentation_set.create(document=doc,rev=doc.rev) + session.presentations.create(document=doc,rev=doc.rev) if apply_to_all: for other_session in sessions: if other_session != session: - other_session.sessionpresentation_set.filter(document__type='agenda').delete() - other_session.sessionpresentation_set.create(document=doc,rev=doc.rev) + other_session.presentations.filter(document__type='agenda').delete() + other_session.presentations.create(document=doc,rev=doc.rev) filename = '%s-%s%s'% ( doc.name, doc.rev, ext) doc.uploaded_filename = filename e = NewRevisionDocEvent.objects.create(doc=doc,by=request.user.person,type='new_revision',desc='New revision available: %s'%doc.rev,rev=doc.rev) # The way this function builds the filename it will never trigger the file delete in handle_file_upload. - save_error = handle_upload_file(file, filename, session.meeting, 'agenda', request=request, encoding=form.file_encoding[file.name]) + try: + encoding=form.file_encoding[file.name] + except AttributeError: + encoding=None + save_error = handle_upload_file(file, filename, session.meeting, 'agenda', request=request, encoding=encoding) if save_error: form.add_error(None, save_error) else: doc.save_with_history([e]) + resolve_uploaded_material(meeting=session.meeting, doc=doc) messages.success(request, f'Successfully uploaded agenda as revision {doc.rev}.') return redirect('ietf.meeting.views.session_details',num=num,acronym=session.group.acronym) else: - form = UploadAgendaForm(show_apply_to_all_checkbox, initial={'apply_to_all':session.type_id=='regular'}) + initial={'apply_to_all':session.type_id=='regular', 'submission_method':'upload'} + if agenda_sp: + doc = agenda_sp.document + initial['content'] = doc.text() + form = UploadOrEnterAgendaForm(show_apply_to_all_checkbox, initial=initial) return render(request, "meeting/upload_session_agenda.html", {'session': session, @@ -2759,299 +3529,495 @@ def upload_session_agenda(request, session_id, num): }) +@login_required def upload_session_slides(request, session_id, num, name=None): + """Upload new or replacement slides for a session + + If name is None or "", expects a new set of slides. Otherwise, replaces the named slides with a new rev. + """ # num is redundant, but we're dragging it along an artifact of where we are in the current URL structure - session = get_object_or_404(Session,pk=session_id) - if not session.can_manage_materials(request.user): - permission_denied(request, "You don't have permission to upload slides for this session.") - if session.is_material_submission_cutoff() and not has_role(request.user, "Secretariat"): - permission_denied(request, "The materials cutoff for this session has passed. Contact the secretariat for further action.") + session = get_object_or_404(Session, pk=session_id) + can_manage = session.can_manage_materials(request.user) + if session.is_material_submission_cutoff() and not has_role( + request.user, "Secretariat" + ): + permission_denied( + request, + "The materials cutoff for this session has passed. Contact the secretariat for further action.", + ) session_number = None - sessions = get_sessions(session.meeting.number,session.group.acronym) - show_apply_to_all_checkbox = len(sessions) > 1 if session.type_id == 'regular' else False + sessions = get_sessions(session.meeting.number, session.group.acronym) + show_apply_to_all_checkbox = ( + len(sessions) > 1 if session.type_id == "regular" else False + ) if len(sessions) > 1: - session_number = 1 + sessions.index(session) + session_number = 1 + sessions.index(session) - slides = None - slides_sp = None + doc = None if name: - slides = Document.objects.filter(name=name).first() - if not (slides and slides.type_id=='slides'): - raise Http404 - slides_sp = session.sessionpresentation_set.filter(document=slides).first() - - if request.method == 'POST': - form = UploadSlidesForm(session, show_apply_to_all_checkbox,request.POST,request.FILES) + doc = get_object_or_404( + session.presentations, document__name=name, document__type_id="slides" + ).document + + if request.method == "POST": + form = UploadSlidesForm( + session, show_apply_to_all_checkbox, can_manage, request.POST, request.FILES + ) if form.is_valid(): - file = request.FILES['file'] + file = request.FILES["file"] _, ext = os.path.splitext(file.name) - apply_to_all = session.type_id == 'regular' + apply_to_all = session.type_id == "regular" if show_apply_to_all_checkbox: - apply_to_all = form.cleaned_data['apply_to_all'] - if slides_sp: - doc = slides_sp.document - doc.rev = '%02d' % (int(doc.rev)+1) - doc.title = form.cleaned_data['title'] - slides_sp.rev = doc.rev - slides_sp.save() + apply_to_all = form.cleaned_data["apply_to_all"] + if can_manage: + approved = form.cleaned_data["approved"] else: + approved = False + + # Propose slides if not auto-approved + if not approved: title = form.cleaned_data['title'] + submission = SlideSubmission.objects.create(session = session, title = title, filename = '', apply_to_all = apply_to_all, submitter=request.user.person) + if session.meeting.type_id=='ietf': name = 'slides-%s-%s' % (session.meeting.number, - session.group.acronym) + session.group.acronym) if not apply_to_all: name += '-%s' % (session.docname_token(),) else: name = 'slides-%s-%s' % (session.meeting.number, session.docname_token()) name = name + '-' + slugify(title).replace('_', '-')[:128] + filename = '%s-ss%d%s'% (name, submission.id, ext) + destination = io.open(os.path.join(settings.SLIDE_STAGING_PATH, filename),'wb+') + for chunk in file.chunks(): + destination.write(chunk) + destination.close() + file.seek(0) + store_file("staging", filename, file) + + submission.filename = filename + submission.save() + + (to, cc) = gather_address_lists('slides_proposed', group=session.group, proposer=request.user.person).as_strings() + msg_txt = render_to_string("meeting/slides_proposed.txt", { + "to": to, + "cc": cc, + "submission": submission, + "settings": settings, + }) + msg = infer_message(msg_txt) + msg.by = request.user.person + msg.save() + send_mail_message(request, msg) + messages.success(request, 'Successfully submitted proposed slides.') + return redirect('ietf.meeting.views.session_details',num=num,acronym=session.group.acronym) + + # Handle creation / update of the Document (but do not save yet) + if doc is not None: + # This is a revision - bump the version and update the title. + doc.rev = "%02d" % (int(doc.rev) + 1) + doc.title = form.cleaned_data["title"] + else: + # This is a new slide deck - create a new doc unless one exists with that name + title = form.cleaned_data["title"] + if session.meeting.type_id == "ietf": + name = "slides-%s-%s" % ( + session.meeting.number, + session.group.acronym, + ) + if not apply_to_all: + name += "-%s" % (session.docname_token(),) + else: + name = "slides-%s-%s" % ( + session.meeting.number, + session.docname_token(), + ) + name = name + "-" + slugify(title).replace("_", "-")[:128] if Document.objects.filter(name=name).exists(): - doc = Document.objects.get(name=name) - doc.rev = '%02d' % (int(doc.rev)+1) - doc.title = form.cleaned_data['title'] + doc = Document.objects.get(name=name) + doc.rev = "%02d" % (int(doc.rev) + 1) + doc.title = form.cleaned_data["title"] else: doc = Document.objects.create( - name = name, - type_id = 'slides', - title = title, - group = session.group, - rev = '00', - ) - DocAlias.objects.create(name=doc.name).docs.add(doc) - doc.states.add(State.objects.get(type_id='slides',slug='active')) - doc.states.add(State.objects.get(type_id='reuse_policy',slug='single')) - if session.sessionpresentation_set.filter(document=doc).exists(): - sp = session.sessionpresentation_set.get(document=doc) - sp.rev = doc.rev - sp.save() - else: - max_order = session.sessionpresentation_set.filter(document__type='slides').aggregate(Max('order'))['order__max'] or 0 - session.sessionpresentation_set.create(document=doc,rev=doc.rev,order=max_order+1) - if apply_to_all: - for other_session in sessions: - if other_session != session and not other_session.sessionpresentation_set.filter(document=doc).exists(): - max_order = other_session.sessionpresentation_set.filter(document__type='slides').aggregate(Max('order'))['order__max'] or 0 - other_session.sessionpresentation_set.create(document=doc,rev=doc.rev,order=max_order+1) - filename = '%s-%s%s'% ( doc.name, doc.rev, ext) + name=name, + type_id="slides", + title=title, + group=session.group, + rev="00", + ) + doc.states.add(State.objects.get(type_id="slides", slug="active")) + doc.states.add(State.objects.get(type_id="reuse_policy", slug="single")) + + # Now handle creation / update of the SessionPresentation(s) + sessions_to_apply = sessions if apply_to_all else [session] + added_presentations = [] + revised_presentations = [] + for sess in sessions_to_apply: + sp = sess.presentations.filter(document=doc).first() + if sp is not None: + sp.rev = doc.rev + sp.save() + revised_presentations.append(sp) + else: + max_order = ( + sess.presentations.filter(document__type="slides").aggregate( + Max("order") + )["order__max"] + or 0 + ) + sp = sess.presentations.create( + document=doc, rev=doc.rev, order=max_order + 1 + ) + added_presentations.append(sp) + + # Now handle the uploaded file + filename = "%s-%s%s" % (doc.name, doc.rev, ext) doc.uploaded_filename = filename - e = NewRevisionDocEvent.objects.create(doc=doc,by=request.user.person,type='new_revision',desc='New revision available: %s'%doc.rev,rev=doc.rev) + e = NewRevisionDocEvent.objects.create( + doc=doc, + by=request.user.person, + type="new_revision", + desc="New revision available: %s" % doc.rev, + rev=doc.rev, + ) # The way this function builds the filename it will never trigger the file delete in handle_file_upload. - save_error = handle_upload_file(file, filename, session.meeting, 'slides', request=request, encoding=form.file_encoding[file.name]) + save_error = handle_upload_file( + file, + filename, + session.meeting, + "slides", + request=request, + encoding=form.file_encoding[file.name], + ) if save_error: form.add_error(None, save_error) else: doc.save_with_history([e]) post_process(doc) + resolve_uploaded_material(meeting=session.meeting, doc=doc) + + # Send MeetEcho updates even if we had a problem saving - that will keep it in sync with the + # SessionPresentation, which was already saved regardless of problems saving the file. + if hasattr(settings, "MEETECHO_API_CONFIG"): + sm = SlidesManager(api_config=settings.MEETECHO_API_CONFIG) + for sp in added_presentations: + try: + sm.add(session=sp.session, slides=doc, order=sp.order) + except MeetechoAPIError as err: + log(f"Error in SlidesManager.add(): {err}") + for sp in revised_presentations: + try: + sm.revise(session=sp.session, slides=doc) + except MeetechoAPIError as err: + log(f"Error in SlidesManager.revise(): {err}") + + if not save_error: messages.success( request, - f'Successfully uploaded slides as revision {doc.rev} of {doc.name}.') - return redirect('ietf.meeting.views.session_details',num=num,acronym=session.group.acronym) - else: + f"Successfully uploaded slides as revision {doc.rev} of {doc.name}.", + ) + return redirect( + "ietf.meeting.views.session_details", + num=num, + acronym=session.group.acronym, + ) + else: initial = {} - if slides: - initial = {'title':slides.title} - form = UploadSlidesForm(session, show_apply_to_all_checkbox, initial=initial) - - return render(request, "meeting/upload_session_slides.html", - {'session': session, - 'session_number': session_number, - 'slides_sp' : slides_sp, - 'form': form, - }) -@login_required -def propose_session_slides(request, session_id, num): - session = get_object_or_404(Session,pk=session_id) - if session.is_material_submission_cutoff() and not has_role(request.user, "Secretariat"): - permission_denied(request, "The materials cutoff for this session has passed. Contact the secretariat for further action.") - - session_number = None - sessions = get_sessions(session.meeting.number,session.group.acronym) - show_apply_to_all_checkbox = len(sessions) > 1 if session.type_id == 'regular' else False - if len(sessions) > 1: - session_number = 1 + sessions.index(session) - - - if request.method == 'POST': - form = UploadSlidesForm(session, show_apply_to_all_checkbox,request.POST,request.FILES) - if form.is_valid(): - file = request.FILES['file'] - _, ext = os.path.splitext(file.name) - apply_to_all = session.type_id == 'regular' - if show_apply_to_all_checkbox: - apply_to_all = form.cleaned_data['apply_to_all'] - title = form.cleaned_data['title'] + if doc is not None: + initial = {"title": doc.title} + form = UploadSlidesForm(session, show_apply_to_all_checkbox, can_manage, initial=initial) - submission = SlideSubmission.objects.create(session = session, title = title, filename = '', apply_to_all = apply_to_all, submitter=request.user.person) - - if session.meeting.type_id=='ietf': - name = 'slides-%s-%s' % (session.meeting.number, - session.group.acronym) - if not apply_to_all: - name += '-%s' % (session.docname_token(),) - else: - name = 'slides-%s-%s' % (session.meeting.number, session.docname_token()) - name = name + '-' + slugify(title).replace('_', '-')[:128] - filename = '%s-ss%d%s'% (name, submission.id, ext) - destination = io.open(os.path.join(settings.SLIDE_STAGING_PATH, filename),'wb+') - for chunk in file.chunks(): - destination.write(chunk) - destination.close() - - submission.filename = filename - submission.save() - - (to, cc) = gather_address_lists('slides_proposed', group=session.group, proposer=request.user.person).as_strings() - msg_txt = render_to_string("meeting/slides_proposed.txt", { - "to": to, - "cc": cc, - "submission": submission, - "settings": settings, - }) - msg = infer_message(msg_txt) - msg.by = request.user.person - msg.save() - send_mail_message(request, msg) - messages.success(request, 'Successfully submitted proposed slides.') - return redirect('ietf.meeting.views.session_details',num=num,acronym=session.group.acronym) - else: - initial = {} - form = UploadSlidesForm(session, show_apply_to_all_checkbox, initial=initial) + return render( + request, + "meeting/upload_session_slides.html", + { + "session": session, + "session_number": session_number, + "slides_sp": session.presentations.filter(document=doc).first() if doc else None, + "manage": session.can_manage_materials(request.user), + "form": form, + }, + ) - return render(request, "meeting/propose_session_slides.html", - {'session': session, - 'session_number': session_number, - 'form': form, - }) def remove_sessionpresentation(request, session_id, num, name): - sp = get_object_or_404(SessionPresentation,session_id=session_id,document__name=name) + sp = get_object_or_404( + SessionPresentation, session_id=session_id, document__name=name + ) session = sp.session if not session.can_manage_materials(request.user): - permission_denied(request, "You don't have permission to manage materials for this session.") - if session.is_material_submission_cutoff() and not has_role(request.user, "Secretariat"): - permission_denied(request, "The materials cutoff for this session has passed. Contact the secretariat for further action.") - if request.method == 'POST': - session.sessionpresentation_set.filter(pk=sp.pk).delete() - c = DocEvent(type="added_comment", doc=sp.document, rev=sp.document.rev, by=request.user.person) + permission_denied( + request, "You don't have permission to manage materials for this session." + ) + if session.is_material_submission_cutoff() and not has_role( + request.user, "Secretariat" + ): + permission_denied( + request, + "The materials cutoff for this session has passed. Contact the secretariat for further action.", + ) + if request.method == "POST": + session.presentations.filter(pk=sp.pk).delete() + c = DocEvent( + type="added_comment", + doc=sp.document, + rev=sp.document.rev, + by=request.user.person, + ) c.desc = "Removed from session: %s" % (session) c.save() - messages.success(request, f'Successfully removed {name}.') - return redirect('ietf.meeting.views.session_details', num=session.meeting.number, acronym=session.group.acronym) + messages.success(request, f"Successfully removed {name}.") + if sp.document.type_id == "slides" and hasattr(settings, "MEETECHO_API_CONFIG"): + sm = SlidesManager(api_config=settings.MEETECHO_API_CONFIG) + try: + sm.delete(session=session, slides=sp.document) + except MeetechoAPIError as err: + log(f"Error in SlidesManager.delete(): {err}") + + return redirect( + "ietf.meeting.views.session_details", + num=session.meeting.number, + acronym=session.group.acronym, + ) + + return render(request, "meeting/remove_sessionpresentation.html", {"sp": sp}) - return render(request,'meeting/remove_sessionpresentation.html', {'sp': sp }) def ajax_add_slides_to_session(request, session_id, num): - session = get_object_or_404(Session,pk=session_id) + session = get_object_or_404(Session, pk=session_id) if not session.can_manage_materials(request.user): - permission_denied(request, "You don't have permission to upload slides for this session.") - if session.is_material_submission_cutoff() and not has_role(request.user, "Secretariat"): - permission_denied(request, "The materials cutoff for this session has passed. Contact the secretariat for further action.") + permission_denied( + request, "You don't have permission to upload slides for this session." + ) + if session.is_material_submission_cutoff() and not has_role( + request.user, "Secretariat" + ): + permission_denied( + request, + "The materials cutoff for this session has passed. Contact the secretariat for further action.", + ) - if request.method != 'POST' or not request.POST: - return HttpResponse(json.dumps({ 'success' : False, 'error' : 'No data submitted or not POST' }),content_type='application/json') + if request.method != "POST" or not request.POST: + return HttpResponse( + json.dumps({"success": False, "error": "No data submitted or not POST"}), + content_type="application/json", + ) - order_str = request.POST.get('order', None) + order_str = request.POST.get("order", None) try: order = int(order_str) except (ValueError, TypeError): - return HttpResponse(json.dumps({ 'success' : False, 'error' : 'Supplied order is not valid' }),content_type='application/json') - if order < 1 or order > session.sessionpresentation_set.filter(document__type_id='slides').count() + 1 : - return HttpResponse(json.dumps({ 'success' : False, 'error' : 'Supplied order is not valid' }),content_type='application/json') + return HttpResponse( + json.dumps({"success": False, "error": "Supplied order is not valid"}), + content_type="application/json", + ) + if ( + order < 1 + or order > session.presentations.filter(document__type_id="slides").count() + 1 + ): + return HttpResponse( + json.dumps({"success": False, "error": "Supplied order is not valid"}), + content_type="application/json", + ) - name = request.POST.get('name', None) + name = request.POST.get("name", None) doc = Document.objects.filter(name=name).first() if not doc: - return HttpResponse(json.dumps({ 'success' : False, 'error' : 'Supplied name is not valid' }),content_type='application/json') + return HttpResponse( + json.dumps({"success": False, "error": "Supplied name is not valid"}), + content_type="application/json", + ) - if not session.sessionpresentation_set.filter(document=doc).exists(): + if not session.presentations.filter(document=doc).exists(): condition_slide_order(session) - session.sessionpresentation_set.filter(document__type_id='slides', order__gte=order).update(order=F('order')+1) - session.sessionpresentation_set.create(document=doc,rev=doc.rev,order=order) - DocEvent.objects.create(type="added_comment", doc=doc, rev=doc.rev, by=request.user.person, desc="Added to session: %s" % session) + session.presentations.filter( + document__type_id="slides", order__gte=order + ).update(order=F("order") + 1) + session.presentations.create(document=doc, rev=doc.rev, order=order) + DocEvent.objects.create( + type="added_comment", + doc=doc, + rev=doc.rev, + by=request.user.person, + desc="Added to session: %s" % session, + ) - return HttpResponse(json.dumps({'success':True}), content_type='application/json') + # Notify Meetecho of new slides if the API is configured + if hasattr(settings, "MEETECHO_API_CONFIG"): + sm = SlidesManager(api_config=settings.MEETECHO_API_CONFIG) + try: + sm.add(session=session, slides=doc, order=order) + except MeetechoAPIError as err: + log(f"Error in SlidesManager.add(): {err}") + + return HttpResponse(json.dumps({"success": True}), content_type="application/json") def ajax_remove_slides_from_session(request, session_id, num): - session = get_object_or_404(Session,pk=session_id) + session = get_object_or_404(Session, pk=session_id) if not session.can_manage_materials(request.user): - permission_denied(request, "You don't have permission to upload slides for this session.") - if session.is_material_submission_cutoff() and not has_role(request.user, "Secretariat"): - permission_denied(request, "The materials cutoff for this session has passed. Contact the secretariat for further action.") + permission_denied( + request, "You don't have permission to upload slides for this session." + ) + if session.is_material_submission_cutoff() and not has_role( + request.user, "Secretariat" + ): + permission_denied( + request, + "The materials cutoff for this session has passed. Contact the secretariat for further action.", + ) - if request.method != 'POST' or not request.POST: - return HttpResponse(json.dumps({ 'success' : False, 'error' : 'No data submitted or not POST' }),content_type='application/json') + if request.method != "POST" or not request.POST: + return HttpResponse( + json.dumps({"success": False, "error": "No data submitted or not POST"}), + content_type="application/json", + ) - oldIndex_str = request.POST.get('oldIndex', None) + oldIndex_str = request.POST.get("oldIndex", None) try: oldIndex = int(oldIndex_str) except (ValueError, TypeError): - return HttpResponse(json.dumps({ 'success' : False, 'error' : 'Supplied index is not valid' }),content_type='application/json') - if oldIndex < 1 or oldIndex > session.sessionpresentation_set.filter(document__type_id='slides').count() : - return HttpResponse(json.dumps({ 'success' : False, 'error' : 'Supplied index is not valid' }),content_type='application/json') + return HttpResponse( + json.dumps({"success": False, "error": "Supplied index is not valid"}), + content_type="application/json", + ) + if ( + oldIndex < 1 + or oldIndex > session.presentations.filter(document__type_id="slides").count() + ): + return HttpResponse( + json.dumps({"success": False, "error": "Supplied index is not valid"}), + content_type="application/json", + ) - name = request.POST.get('name', None) + name = request.POST.get("name", None) doc = Document.objects.filter(name=name).first() if not doc: - return HttpResponse(json.dumps({ 'success' : False, 'error' : 'Supplied name is not valid' }),content_type='application/json') + return HttpResponse( + json.dumps({"success": False, "error": "Supplied name is not valid"}), + content_type="application/json", + ) condition_slide_order(session) - affected_presentations = session.sessionpresentation_set.filter(document=doc).first() + affected_presentations = session.presentations.filter(document=doc).first() if affected_presentations: if affected_presentations.order == oldIndex: affected_presentations.delete() - session.sessionpresentation_set.filter(document__type_id='slides', order__gt=oldIndex).update(order=F('order')-1) - DocEvent.objects.create(type="added_comment", doc=doc, rev=doc.rev, by=request.user.person, desc="Removed from session: %s" % session) - return HttpResponse(json.dumps({'success':True}), content_type='application/json') + session.presentations.filter( + document__type_id="slides", order__gt=oldIndex + ).update(order=F("order") - 1) + DocEvent.objects.create( + type="added_comment", + doc=doc, + rev=doc.rev, + by=request.user.person, + desc="Removed from session: %s" % session, + ) + # Notify Meetecho of removed slides if the API is configured + if hasattr(settings, "MEETECHO_API_CONFIG"): + sm = SlidesManager(api_config=settings.MEETECHO_API_CONFIG) + try: + sm.delete(session=session, slides=doc) + except MeetechoAPIError as err: + log(f"Error in SlidesManager.delete(): {err}") + # Report success + return HttpResponse( + json.dumps({"success": True}), content_type="application/json" + ) else: - return HttpResponse(json.dumps({ 'success' : False, 'error' : 'Name does not match index' }),content_type='application/json') + return HttpResponse( + json.dumps({"success": False, "error": "Name does not match index"}), + content_type="application/json", + ) else: - return HttpResponse(json.dumps({ 'success' : False, 'error' : 'SessionPresentation not found' }),content_type='application/json') + return HttpResponse( + json.dumps({"success": False, "error": "SessionPresentation not found"}), + content_type="application/json", + ) def ajax_reorder_slides_in_session(request, session_id, num): - session = get_object_or_404(Session,pk=session_id) + session = get_object_or_404(Session, pk=session_id) if not session.can_manage_materials(request.user): - permission_denied(request, "You don't have permission to upload slides for this session.") - if session.is_material_submission_cutoff() and not has_role(request.user, "Secretariat"): - permission_denied(request, "The materials cutoff for this session has passed. Contact the secretariat for further action.") + permission_denied( + request, "You don't have permission to upload slides for this session." + ) + if session.is_material_submission_cutoff() and not has_role( + request.user, "Secretariat" + ): + permission_denied( + request, + "The materials cutoff for this session has passed. Contact the secretariat for further action.", + ) - if request.method != 'POST' or not request.POST: - return HttpResponse(json.dumps({ 'success' : False, 'error' : 'No data submitted or not POST' }),content_type='application/json') + if request.method != "POST" or not request.POST: + return HttpResponse( + json.dumps({"success": False, "error": "No data submitted or not POST"}), + content_type="application/json", + ) - num_slides_in_session = session.sessionpresentation_set.filter(document__type_id='slides').count() - oldIndex_str = request.POST.get('oldIndex', None) + session_slides = session.presentations.filter(document__type_id="slides") + num_slides_in_session = session_slides.count() + oldIndex_str = request.POST.get("oldIndex", None) try: oldIndex = int(oldIndex_str) except (ValueError, TypeError): - return HttpResponse(json.dumps({ 'success' : False, 'error' : 'Supplied index is not valid' }),content_type='application/json') - if oldIndex < 1 or oldIndex > num_slides_in_session : - return HttpResponse(json.dumps({ 'success' : False, 'error' : 'Supplied index is not valid' }),content_type='application/json') + return HttpResponse( + json.dumps({"success": False, "error": "Supplied index is not valid"}), + content_type="application/json", + ) + if oldIndex < 1 or oldIndex > num_slides_in_session: + return HttpResponse( + json.dumps({"success": False, "error": "Supplied index is not valid"}), + content_type="application/json", + ) - newIndex_str = request.POST.get('newIndex', None) + newIndex_str = request.POST.get("newIndex", None) try: newIndex = int(newIndex_str) except (ValueError, TypeError): - return HttpResponse(json.dumps({ 'success' : False, 'error' : 'Supplied index is not valid' }),content_type='application/json') - if newIndex < 1 or newIndex > num_slides_in_session : - return HttpResponse(json.dumps({ 'success' : False, 'error' : 'Supplied index is not valid' }),content_type='application/json') + return HttpResponse( + json.dumps({"success": False, "error": "Supplied index is not valid"}), + content_type="application/json", + ) + if newIndex < 1 or newIndex > num_slides_in_session: + return HttpResponse( + json.dumps({"success": False, "error": "Supplied index is not valid"}), + content_type="application/json", + ) if newIndex == oldIndex: - return HttpResponse(json.dumps({ 'success' : False, 'error' : 'Supplied index is not valid' }),content_type='application/json') + return HttpResponse( + json.dumps({"success": False, "error": "Supplied index is not valid"}), + content_type="application/json", + ) condition_slide_order(session) - sp = session.sessionpresentation_set.get(order=oldIndex) + sp = session_slides.get(order=oldIndex) if oldIndex < newIndex: - session.sessionpresentation_set.filter(order__gt=oldIndex, order__lte=newIndex).update(order=F('order')-1) + session_slides.filter(order__gt=oldIndex, order__lte=newIndex).update( + order=F("order") - 1 + ) else: - session.sessionpresentation_set.filter(order__gte=newIndex, order__lt=oldIndex).update(order=F('order')+1) + session_slides.filter(order__gte=newIndex, order__lt=oldIndex).update( + order=F("order") + 1 + ) sp.order = newIndex sp.save() - return HttpResponse(json.dumps({'success':True}), content_type='application/json') + # Update slide order with Meetecho if the API is configured + if hasattr(settings, "MEETECHO_API_CONFIG"): + sm = SlidesManager(api_config=settings.MEETECHO_API_CONFIG) + try: + sm.send_update(session) + except MeetechoAPIError as err: + log(f"Error in SlidesManager.send_update(): {err}") + + return HttpResponse(json.dumps({"success": True}), content_type="application/json") @role_required('Secretariat') @@ -3641,21 +4607,137 @@ def upcoming_ical(request): else: ietfs = [] - meeting_vtz = {meeting.vtimezone() for meeting in meetings} - meeting_vtz.discard(None) - - # icalendar response file should have '\r\n' line endings per RFC5545 - response = render_to_string('meeting/upcoming.ics', { - 'vtimezones': ''.join(sorted(meeting_vtz)), - 'assignments': assignments, - 'ietfs': ietfs, - }, request=request) - response = re.sub("\r(?!\n)|(? 1: - for mat, timestamp in material_times.items(): - result.append({'material': mat, 'time': timestamp}) - return result - - entry = { - 'group': group, - 'name': sess_name, - 'session': session, - 'canceled': all_canceled, - 'has_materials': s.sessionpresentation_set.exists(), - 'agendas': _format_materials((s, s.agenda()) for s in ss), - 'minutes': _format_materials((s, s.minutes()) for s in ss), - 'bluesheets': _format_materials((s, s.bluesheets()) for s in ss), - 'recordings': _format_materials((s, s.recordings()) for s in ss), - 'slides': _format_materials((s, s.slides()) for s in ss), - 'drafts': _format_materials((s, s.drafts()) for s in ss), - 'last_update': session.last_update if hasattr(session, 'last_update') else None - } - if is_meeting: - meeting_groups.append(entry) - else: - not_meeting_groups.append(entry) - return meeting_groups, not_meeting_groups - def proceedings(request, num=None): - meeting = get_meeting(num) - + # Early proceedings were hosted on www.ietf.org rather than the datatracker if meeting.proceedings_format_version == 1: return HttpResponseRedirect(settings.PROCEEDINGS_V1_BASE_URL.format(meeting=meeting)) @@ -3759,85 +4769,23 @@ def proceedings(request, num=None): kwargs['num'] = num return redirect('ietf.meeting.views.materials', **kwargs) - begin_date = meeting.get_submission_start_date() - cut_off_date = meeting.get_submission_cut_off_date() - cor_cut_off_date = meeting.get_submission_correction_date() - today_utc = date_today(datetime.timezone.utc) - - schedule = get_schedule(meeting, None) - sessions = ( - meeting.session_set.with_current_status() - .filter(Q(timeslotassignments__schedule__in=[schedule, schedule.base if schedule else None]) - | Q(current_status='notmeet')) - .select_related() - .order_by('-current_status') - ) - - plenaries, _ = organize_proceedings_sessions( - sessions.filter(name__icontains='plenary') - .exclude(current_status='notmeet') - ) - irtf_meeting, irtf_not_meeting = organize_proceedings_sessions( - sessions.filter(group__parent__acronym = 'irtf').order_by('group__acronym') - ) - # per Colin (datatracker #5010) - don't report not meeting rags - irtf_not_meeting = [item for item in irtf_not_meeting if item["group"].type_id != "rag"] - irtf = {"meeting_groups":irtf_meeting, "not_meeting_groups":irtf_not_meeting} - - training, _ = organize_proceedings_sessions( - sessions.filter(group__acronym__in=['edu','iaoc'], type_id__in=['regular', 'other',]) - .exclude(current_status='notmeet') - ) - iab, _ = organize_proceedings_sessions( - sessions.filter(group__parent__acronym = 'iab') - .exclude(current_status='notmeet') - ) - editorial, _ = organize_proceedings_sessions( - sessions.filter(group__acronym__in=['rsab','rswg']) - .exclude(current_status='notmeet') - ) - - ietf = sessions.filter(group__parent__type__slug = 'area').exclude(group__acronym='edu').order_by('group__parent__acronym', 'group__acronym') - ietf_areas = [] - for area, area_sessions in itertools.groupby( - ietf, - key=lambda s: s.group.parent - ): - meeting_groups, not_meeting_groups = organize_proceedings_sessions(area_sessions) - ietf_areas.append((area, meeting_groups, not_meeting_groups)) - - cache_version = Document.objects.filter(session__meeting__number=meeting.number).aggregate(Max('time'))["time__max"] with timezone.override(meeting.tz()): - return render(request, "meeting/proceedings.html", { + return render(request, "meeting/proceedings_wrapper.html", { 'meeting': meeting, - 'plenaries': plenaries, - 'training': training, - 'irtf': irtf, - 'iab': iab, - 'editorial': editorial, - 'ietf_areas': ietf_areas, - 'cut_off_date': cut_off_date, - 'cor_cut_off_date': cor_cut_off_date, - 'submission_started': today_utc > begin_date, - 'cache_version': cache_version, 'attendance': meeting.get_attendance(), - 'meetinghost_logo': { - 'max_height': settings.MEETINGHOST_LOGO_MAX_DISPLAY_HEIGHT, - 'max_width': settings.MEETINGHOST_LOGO_MAX_DISPLAY_WIDTH, - } + 'proceedings_content': generate_proceedings_content(meeting), }) @role_required('Secretariat') def finalize_proceedings(request, num=None): meeting = get_meeting(num) - if (meeting.number.isdigit() and int(meeting.number) <= 64) or not meeting.schedule or not meeting.schedule.assignments.exists() or meeting.proceedings_final: raise Http404 if request.method=='POST': - finalize(meeting) + finalize(request, meeting) return HttpResponseRedirect(reverse('ietf.meeting.views.proceedings',kwargs={'num':meeting.number})) return render(request, "meeting/finalize.html", {'meeting':meeting,}) @@ -3861,18 +4809,52 @@ def proceedings_attendees(request, num=None): if meeting.proceedings_format_version == 1: return HttpResponseRedirect(f'{settings.PROCEEDINGS_V1_BASE_URL.format(meeting=meeting)}/attendee.html') - checked_in, attended = participants_for_meeting(meeting) - regs = list(MeetingRegistration.objects.filter(meeting__number=num, reg_type='onsite', checkedin=True)) + template = None + registrations = None + + stats = None + chart_data = None + + if int(meeting.number) >= 118: + checked_in, attended = participants_for_meeting(meeting) + regs = list(Registration.objects.onsite().filter(meeting__number=num, checkedin=True)) + onsite_count = len(regs) + regs += [ + reg + for reg in Registration.objects.remote().filter(meeting__number=num).select_related('person') + if reg.person.pk in attended and reg.person.pk not in checked_in + ] + remote_count = len(regs) - onsite_count + + registrations = sorted(regs, key=lambda x: (x.last_name, x.first_name)) - for mr in MeetingRegistration.objects.filter(meeting__number=num, reg_type='remote').select_related('person'): - if mr.person.pk in attended and mr.person.pk not in checked_in: - regs.append(mr) + country_codes = [r.country_code for r in registrations if r.country_code] + stats = { + 'total': onsite_count + remote_count, + 'onsite': onsite_count, + 'remote': remote_count, + } - meeting_registrations = sorted(regs, key=lambda x: (x.last_name, x.first_name)) + code_to_name = dict(CountryName.objects.values_list('slug', 'name')) + country_counts = Counter(code_to_name.get(c, c) for c in country_codes).most_common() + + chart_data = { + 'type': [['Onsite', onsite_count], ['Remote', remote_count]], + 'countries': country_counts, + } + else: + overview_template = "/meeting/proceedings/%s/attendees.html" % meeting.number + try: + template = render_to_string(overview_template, {}) + except TemplateDoesNotExist: + raise Http404 return render(request, "meeting/proceedings_attendees.html", { 'meeting': meeting, - 'meeting_registrations': meeting_registrations, + 'registrations': registrations, + 'template': template, + 'stats': stats, + 'chart_data': chart_data, }) def proceedings_overview(request, num=None): @@ -3910,6 +4892,55 @@ class OldUploadRedirect(RedirectView): def get_redirect_url(self, **kwargs): return reverse_lazy('ietf.meeting.views.session_details',kwargs=self.kwargs) + +@require_api_key +@role_required("Recording Manager") +@csrf_exempt +def api_set_meetecho_recording_name(request): + """Set name for meetecho recording + + parameters: + apikey: the poster's personal API key + session_id: id of the session to update + name: the name to use for the recording at meetecho player + """ + def err(code, text): + return HttpResponse( + text, + status=code, + content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}", + ) + + if request.method != "POST": + return HttpResponseNotAllowed( + content="Method not allowed", + content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}", + permitted_methods=('POST',), + ) + + session_id = request.POST.get('session_id', None) + if session_id is None: + return err(400, 'Missing session_id parameter') + name = request.POST.get('name', None) + if name is None: + return err(400, 'Missing name parameter') + + try: + session = Session.objects.get(pk=session_id) + except Session.DoesNotExist: + return err(400, f"Session not found with session_id '{session_id}'") + except ValueError: + return err(400, "Invalid session_id: {session_id}") + + session.meetecho_recording_name = name + session.save() + + return HttpResponse( + "Done", + status=200, + content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}", + ) + @require_api_key @role_required('Recording Manager') @csrf_exempt @@ -3922,11 +4953,17 @@ def api_set_session_video_url(request): url: The recording url (on YouTube, or whatever) """ def err(code, text): - return HttpResponse(text, status=code, content_type='text/plain') + return HttpResponse( + text, + status=code, + content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}", + ) if request.method != 'POST': return HttpResponseNotAllowed( - content="Method not allowed", content_type="text/plain", permitted_methods=('POST',) + content="Method not allowed", + content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}", + permitted_methods=('POST',), ) # Temporary: fall back to deprecated interface if we have old-style parameters. @@ -3965,7 +5002,11 @@ def err(code, text): time = session.official_timeslotassignment().timeslot.time title = 'Video recording for %s on %s at %s' % (session.group.acronym, time.date(), time.time()) create_recording(session, incoming_url, title=title, user=request.user.person) - return HttpResponse("Done", status=200, content_type='text/plain') + return HttpResponse( + "Done", + status=200, + content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}", + ) def deprecated_api_set_session_video_url(request): @@ -3974,7 +5015,11 @@ def deprecated_api_set_session_video_url(request): Uses meeting/group/item to identify session. """ def err(code, text): - return HttpResponse(text, status=code, content_type='text/plain') + return HttpResponse( + text, + status=code, + content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}", + ) if request.method == 'POST': # parameters: # apikey: the poster's personal API key @@ -4028,46 +5073,136 @@ def err(code, text): else: return err(405, "Method not allowed") - return HttpResponse("Done", status=200, content_type='text/plain') + return HttpResponse( + "Done", + status=200, + content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}", + ) + @require_api_key @role_required('Recording Manager') # TODO : Rework how Meetecho interacts via APIs. There may be better paths to pursue than Personal API keys as they are currently defined. @csrf_exempt def api_add_session_attendees(request): + """Upload attendees for one or more sessions + + parameters: + apikey: the poster's personal API key + attended: json blob with + { + "session_id": session pk, + "attendees": [ + {"user_id": user-pk-1, "join_time": "2024-02-21T18:00:00Z"}, + {"user_id": user-pk-2, "join_time": "2024-02-21T18:00:01Z"}, + {"user_id": user-pk-3, "join_time": "2024-02-21T18:00:02Z"}, + ... + ] + } + """ + json_validator = jsonschema.Draft202012Validator( + schema={ + "type": "object", + "properties": { + "session_id": {"type": "integer"}, + "attendees": { + # Allow either old or new format until after IETF 119 + "anyOf": [ + {"type": "array", "items": {"type": "integer"}}, # old: array of user PKs + { + # new: array of user_id / join_time objects + "type": "array", + "items": { + "type": "object", + "properties": { + "user_id": {"type": "integer", }, + "join_time": {"type": "string", "format": "date-time"} + }, + "required": ["user_id", "join_time"], + }, + }, + ], + } + }, + "required": ["session_id", "attendees"], + }, + format_checker=jsonschema.Draft202012Validator.FORMAT_CHECKER, # format-checks disabled by default + ) def err(code, text): - return HttpResponse(text, status=code, content_type='text/plain') + return HttpResponse( + text, + status=code, + content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}", + ) - if request.method != 'POST': + if request.method != "POST": return err(405, "Method not allowed") - attended_post = request.POST.get('attended') + attended_post = request.POST.get("attended") if not attended_post: return err(400, "Missing attended parameter") + + # Validate the request payload try: - attended = json.loads(attended_post) - except json.decoder.JSONDecodeError: - return err(400, "Malformed post") - if not ( 'session_id' in attended and type(attended['session_id']) is int ): - return err(400, "Malformed post") - session_id = attended['session_id'] - if not ( 'attendees' in attended and type(attended['attendees']) is list and all([type(el) is int for el in attended['attendees']]) ): + payload = json.loads(attended_post) + json_validator.validate(payload) + except (json.decoder.JSONDecodeError, jsonschema.exceptions.ValidationError): return err(400, "Malformed post") + + session_id = payload["session_id"] session = Session.objects.filter(pk=session_id).first() if not session: return err(400, "Invalid session") - users = User.objects.filter(pk__in=attended['attendees']) - if users.count() != len(attended['attendees']): - return err(400, "Invalid attendee") - for user in users: - session.attended_set.get_or_create(person=user.person) - return HttpResponse("Done", status=200, content_type='text/plain') + + attendees = payload["attendees"] + if len(attendees) > 0: + # Check whether we have old or new format + if type(attendees[0]) == int: + # it's the old format + users = User.objects.filter(pk__in=attendees) + if users.count() != len(payload["attendees"]): + return err(400, "Invalid attendee") + for user in users: + session.attended_set.get_or_create(person=user.person) + else: + # it's the new format + join_time_by_pk = { + att["user_id"]: datetime.datetime.fromisoformat( + att["join_time"].replace("Z", "+00:00") # Z not understood until py311 + ) + for att in attendees + } + persons = list(Person.objects.filter(user__pk__in=join_time_by_pk)) + if len(persons) != len(join_time_by_pk): + return err(400, "Invalid attendee") + to_create = [ + Attended(session=session, person=person, time=join_time_by_pk[person.user_id]) + for person in persons + ] + # Create in bulk, ignoring any that already exist + Attended.objects.bulk_create(to_create, ignore_conflicts=True) + + if session.meeting.type_id == "interim": + save_error = generate_bluesheet(request, session) + if save_error: + return err(400, save_error) + + return HttpResponse( + "Done", + status=200, + content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}", + ) + @require_api_key @role_required('Recording Manager') @csrf_exempt def api_upload_chatlog(request): def err(code, text): - return HttpResponse(text, status=code, content_type='text/plain') + return HttpResponse( + text, + status=code, + content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}", + ) if request.method != 'POST': return err(405, "Method not allowed") apidata_post = request.POST.get('apidata') @@ -4085,7 +5220,7 @@ def err(code, text): session = Session.objects.filter(pk=session_id).first() if not session: return err(400, "Invalid session") - chatlog_sp = session.sessionpresentation_set.filter(document__type='chatlog').first() + chatlog_sp = session.presentations.filter(document__type='chatlog').first() if chatlog_sp: doc = chatlog_sp.document doc.rev = f"{(int(doc.rev)+1):02d}" @@ -4100,14 +5235,23 @@ def err(code, text): write_doc_for_session(session, 'chatlog', filename, json.dumps(apidata['chatlog'])) e = NewRevisionDocEvent.objects.create(doc=doc, rev=doc.rev, by=request.user.person, type='new_revision', desc='New revision available: %s'%doc.rev) doc.save_with_history([e]) - return HttpResponse("Done", status=200, content_type='text/plain') + resolve_uploaded_material(meeting=session.meeting, doc=doc) + return HttpResponse( + "Done", + status=200, + content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}", + ) @require_api_key @role_required('Recording Manager') @csrf_exempt def api_upload_polls(request): def err(code, text): - return HttpResponse(text, status=code, content_type='text/plain') + return HttpResponse( + text, + status=code, + content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}", + ) if request.method != 'POST': return err(405, "Method not allowed") apidata_post = request.POST.get('apidata') @@ -4125,7 +5269,7 @@ def err(code, text): session = Session.objects.filter(pk=session_id).first() if not session: return err(400, "Invalid session") - polls_sp = session.sessionpresentation_set.filter(document__type='polls').first() + polls_sp = session.presentations.filter(document__type='polls').first() if polls_sp: doc = polls_sp.document doc.rev = f"{(int(doc.rev)+1):02d}" @@ -4140,7 +5284,12 @@ def err(code, text): write_doc_for_session(session, 'polls', filename, json.dumps(apidata['polls'])) e = NewRevisionDocEvent.objects.create(doc=doc, rev=doc.rev, by=request.user.person, type='new_revision', desc='New revision available: %s'%doc.rev) doc.save_with_history([e]) - return HttpResponse("Done", status=200, content_type='text/plain') + resolve_uploaded_material(meeting=session.meeting, doc=doc) + return HttpResponse( + "Done", + status=200, + content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}", + ) @require_api_key @role_required('Recording Manager', 'Secretariat') @@ -4155,18 +5304,19 @@ def api_upload_bluesheet(request): [{'name': 'Name', 'affiliation': 'Organization', }, ...] """ def err(code, text): - return HttpResponse(text, status=code, content_type='text/plain') + return HttpResponse( + text, + status=code, + content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}", + ) if request.method != 'POST': return HttpResponseNotAllowed( - content="Method not allowed", content_type="text/plain", permitted_methods=('POST',) + content="Method not allowed", + content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}", + permitted_methods=('POST',), ) - # Temporary: fall back to deprecated interface if we have old-style parameters. - # Do away with this once meetecho is using the new pk-based interface. - if any(k in request.POST for k in ['meeting', 'group', 'item']): - return deprecated_api_upload_bluesheet(request) - session_id = request.POST.get('session_id', None) if session_id is None: return err(400, 'Missing session_id parameter') @@ -4199,67 +5349,11 @@ def err(code, text): save_err = save_bluesheet(request, session, file) if save_err: return err(400, save_err) - - return HttpResponse("Done", status=200, content_type='text/plain') - - -def deprecated_api_upload_bluesheet(request): - def err(code, text): - return HttpResponse(text, status=code, content_type='text/plain') - if request.method == 'POST': - # parameters: - # apikey: the poster's personal API key - # meeting: number as string, i.e., '101', or 'interim-2018-quic-02' - # group: acronym or special, i.e., 'quic' or 'plenary' - # item: '1', '2', '3' (the group's first, second, third etc. - # session during the week) - # bluesheet: json blob with - # [{'name': 'Name', 'affiliation': 'Organization', }, ...] - for item in ['meeting', 'group', 'item', 'bluesheet',]: - value = request.POST.get(item) - if not value: - return err(400, "Missing %s parameter" % item) - number = request.POST.get('meeting') - sessions = Session.objects.filter(meeting__number=number) - if not sessions.exists(): - return err(400, "No sessions found for meeting '%s'" % (number, )) - acronym = request.POST.get('group') - sessions = sessions.filter(group__acronym=acronym) - if not sessions.exists(): - return err(400, "No sessions found in meeting '%s' for group '%s'" % (number, acronym)) - session_times = [ (s.official_timeslotassignment().timeslot.time, s.id, s) for s in sessions if s.official_timeslotassignment() ] - session_times.sort() - item = request.POST.get('item') - if not item.isdigit(): - return err(400, "Expected a numeric value for 'item', found '%s'" % (item, )) - n = int(item)-1 # change 1-based to 0-based - try: - time, __, session = session_times[n] - except IndexError: - return err(400, "No item '%s' found in list of sessions for group" % (item, )) - bjson = request.POST.get('bluesheet') - try: - data = json.loads(bjson) - except json.decoder.JSONDecodeError: - return err(400, "Invalid json value: '%s'" % (bjson, )) - - text = render_to_string('meeting/bluesheet.txt', { - 'data': data, - 'session': session, - }) - - fd, name = tempfile.mkstemp(suffix=".txt", text=True) - os.close(fd) - with open(name, "w") as file: - file.write(text) - with open(name, "br") as file: - save_err = save_bluesheet(request, session, file) - if save_err: - return err(400, save_err) - else: - return err(405, "Method not allowed") - - return HttpResponse("Done", status=200, content_type='text/plain') + return HttpResponse( + "Done", + status=200, + content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}", + ) def important_dates(request, num=None, output_format=None): @@ -4285,11 +5379,8 @@ def important_dates(request, num=None, output_format=None): if output_format == 'ics': preprocess_meeting_important_dates(meetings) - ics = render_to_string('meeting/important_dates.ics', { - 'meetings': meetings, - }, request=request) - # icalendar response file should have '\r\n' line endings per RFC5545 - response = HttpResponse(re.sub("\r(?!\n)|(? 0: + messages.success( + request, + f"Notified Meetecho about slides for {','.join(str(s) for s in updated)}", + ) + elif sm.slides_notify_time is not None: + messages.warning( + request, + "No sessions were eligible for Meetecho slides update. Updates are " + f"only sent within {sm.slides_notify_time} before or after the session.", + ) + else: + messages.warning( + request, + "No sessions were eligible for Meetecho slides update. Updates are " + "currently disabled.", + ) + return redirect( + "ietf.meeting.views.session_details", num=meeting.number, acronym=acronym + ) + + def import_session_minutes(request, session_id, num): """Import session minutes from the ietf.notes.org site @@ -4657,11 +5821,12 @@ def import_session_minutes(request, session_id, num): except SessionNotScheduledError: return HttpResponseGone( "Cannot import minutes for an unscheduled session. Please check the session ID.", - content_type="text/plain", + content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}", ) except SaveMaterialsError as err: form.add_error(None, str(err)) else: + resolve_uploaded_material(meeting=session.meeting, doc=session.minutes()) messages.success(request, f'Successfully imported minutes as revision {session.minutes().rev}.') return redirect('ietf.meeting.views.session_details', num=num, acronym=session.group.acronym) else: @@ -4697,3 +5862,4 @@ def import_session_minutes(request, session_id, num): 'contents_unchanged': not contents_changed, }, ) + diff --git a/ietf/meeting/views_proceedings.py b/ietf/meeting/views_proceedings.py index 87b7ffea35..639efa1da4 100644 --- a/ietf/meeting/views_proceedings.py +++ b/ietf/meeting/views_proceedings.py @@ -8,13 +8,13 @@ import debug # pyflakes:ignore from ietf.doc.utils import add_state_change_event -from ietf.doc.models import DocAlias, DocEvent, Document, NewRevisionDocEvent, State +from ietf.doc.models import DocEvent, Document, NewRevisionDocEvent, State from ietf.ietfauth.utils import role_required from ietf.meeting.forms import FileUploadForm from ietf.meeting.models import Meeting, MeetingHost from ietf.meeting.helpers import get_meeting from ietf.name.models import ProceedingsMaterialTypeName -from ietf.meeting.utils import handle_upload_file +from ietf.meeting.utils import handle_upload_file, resolve_uploaded_material from ietf.utils.text import xslugify class UploadProceedingsMaterialForm(FileUploadForm): @@ -98,10 +98,6 @@ def save_proceedings_material_doc(meeting, material_type, title, request, file=N ) created = True - # do this even if we did not create the document, just to be sure the alias exists - alias, _ = DocAlias.objects.get_or_create(name=doc.name) - alias.docs.add(doc) - if file: if not created: doc.rev = '{:02}'.format(int(doc.rev) + 1) @@ -154,7 +150,7 @@ def save_proceedings_material_doc(meeting, material_type, title, request, file=N if events: doc.save_with_history(events) - + resolve_uploaded_material(meeting, doc) return doc diff --git a/ietf/secr/sreq/views.py b/ietf/meeting/views_session_request.py similarity index 80% rename from ietf/secr/sreq/views.py rename to ietf/meeting/views_session_request.py index eb93168e1c..a1ef74f1b8 100644 --- a/ietf/secr/sreq/views.py +++ b/ietf/meeting/views_session_request.py @@ -1,29 +1,26 @@ -# Copyright The IETF Trust 2013-2022, All Rights Reserved +# Copyright The IETF Trust 2007-2025, All Rights Reserved # -*- coding: utf-8 -*- - import datetime import inflect from collections import defaultdict, OrderedDict from django.conf import settings from django.contrib import messages +from django.core.exceptions import ObjectDoesNotExist from django.db.models import Q from django.shortcuts import render, get_object_or_404, redirect from django.http import Http404 -import debug # pyflakes:ignore - from ietf.group.models import Group, GroupFeatures from ietf.ietfauth.utils import has_role, role_required -from ietf.meeting.models import Meeting, Session, Constraint, ResourceAssociation, SchedulingEvent from ietf.meeting.helpers import get_meeting +from ietf.meeting.models import Session, Meeting, Constraint, ResourceAssociation, SchedulingEvent from ietf.meeting.utils import add_event_info_to_session_qs -from ietf.name.models import SessionStatusName, ConstraintName -from ietf.secr.sreq.forms import (SessionForm, ToolStatusForm, allowed_conflicting_groups, +from ietf.meeting.forms import (SessionRequestStatusForm, SessionRequestForm, allowed_conflicting_groups, JOINT_FOR_SESSION_CHOICES) +from ietf.name.models import SessionStatusName, ConstraintName from ietf.secr.utils.decorators import check_permissions -from ietf.secr.utils.group import get_my_groups from ietf.utils.mail import send_mail from ietf.mailtrigger.utils import gather_address_lists @@ -31,12 +28,25 @@ # Globals # ------------------------------------------------- # TODO: This needs to be replaced with something that pays attention to groupfeatures -AUTHORIZED_ROLES=('WG Chair','WG Secretary','RG Chair','IAB Group Chair','Area Director','Secretariat','Team Chair','IRTF Chair','Program Chair','Program Lead','Program Secretary', 'EDWG Chair') +AUTHORIZED_ROLES = ( + 'WG Chair', + 'WG Secretary', + 'RG Chair', + 'IAB Group Chair', + 'Area Director', + 'Secretariat', + 'Team Chair', + 'IRTF Chair', + 'Program Chair', + 'Program Lead', + 'Program Secretary', + 'EDWG Chair') # ------------------------------------------------- # Helper Functions # ------------------------------------------------- + def check_app_locked(meeting=None): ''' This function returns True if the application is locked to non-secretariat users. @@ -45,6 +55,54 @@ def check_app_locked(meeting=None): meeting = get_meeting(days=14) return bool(meeting.session_request_lock_message) + +def get_lock_message(meeting=None): + ''' + Returns the message to display to non-secretariat users when the tool is locked. + ''' + if not meeting: + meeting = get_meeting(days=14) + return meeting.session_request_lock_message + + +def get_my_groups(user, conclude=False): + ''' + Takes a Django user object (from request) + Returns a list of groups the user has access to. Rules are as follows + secretariat - has access to all groups + area director - has access to all groups in their area + wg chair or secretary - has access to their own group + chair of irtf has access to all irtf groups + + If user=None than all groups are returned. + concluded=True means include concluded groups. Need this to upload materials for groups + after they've been concluded. it happens. + ''' + my_groups = set() + states = ['bof', 'proposed', 'active'] + if conclude: + states.extend(['conclude', 'bof-conc']) + + all_groups = Group.objects.filter(type__features__has_meetings=True, state__in=states).order_by('acronym') + if user is None or has_role(user, 'Secretariat'): + return all_groups + + try: + person = user.person + except ObjectDoesNotExist: + return list() + + for group in all_groups: + if group.role_set.filter(person=person, name__in=('chair', 'secr', 'ad')): + my_groups.add(group) + continue + if group.parent and group.parent.role_set.filter(person=person, name__in=('ad', 'chair')): + my_groups.add(group) + continue + + return list(my_groups) + + def get_initial_session(sessions, prune_conflicts=False): ''' This function takes a queryset of sessions ordered by 'id' for consistency. It returns @@ -97,13 +155,43 @@ def valid_conflict(conflict): initial['joint_for_session_display'] = dict(JOINT_FOR_SESSION_CHOICES)[initial['joint_for_session']] return initial -def get_lock_message(meeting=None): + +def inbound_session_conflicts_as_string(group, meeting): ''' - Returns the message to display to non-secretariat users when the tool is locked. + Takes a Group object and Meeting object and returns a string of other groups which have + a conflict with this one ''' - if not meeting: - meeting = get_meeting(days=14) - return meeting.session_request_lock_message + constraints = group.constraint_target_set.filter(meeting=meeting, name__is_group_conflict=True) + group_set = set(constraints.values_list('source__acronym', flat=True)) # set to de-dupe + group_list = sorted(group_set) # give a consistent order + return ', '.join(group_list) + + +def get_outbound_conflicts(form: SessionRequestForm): + """extract wg conflict constraint data from a SessionForm""" + outbound_conflicts = [] + for conflictname, cfield_id in form.wg_constraint_field_ids(): + conflict_groups = form.cleaned_data[cfield_id] + if len(conflict_groups) > 0: + outbound_conflicts.append(dict(name=conflictname, groups=conflict_groups)) + return outbound_conflicts + + +def save_conflicts(group, meeting, conflicts, name): + ''' + This function takes a Group, Meeting a string which is a list of Groups acronyms (conflicts), + and the constraint name (conflict|conflic2|conflic3) and creates Constraint records + ''' + constraint_name = ConstraintName.objects.get(slug=name) + acronyms = conflicts.replace(',',' ').split() + for acronym in acronyms: + target = Group.objects.get(acronym=acronym) + + constraint = Constraint(source=group, + target=target, + meeting=meeting, + name=constraint_name) + constraint.save() def get_requester_text(person, group): @@ -129,22 +217,6 @@ def get_requester_text(person, group): ) -def save_conflicts(group, meeting, conflicts, name): - ''' - This function takes a Group, Meeting a string which is a list of Groups acronyms (conflicts), - and the constraint name (conflict|conflic2|conflic3) and creates Constraint records - ''' - constraint_name = ConstraintName.objects.get(slug=name) - acronyms = conflicts.replace(',',' ').split() - for acronym in acronyms: - target = Group.objects.get(acronym=acronym) - - constraint = Constraint(source=group, - target=target, - meeting=meeting, - name=constraint_name) - constraint.save() - def send_notification(group, meeting, login, sreq_data, session_data, action): ''' This function generates email notifications for various session request activities. @@ -152,10 +224,10 @@ def send_notification(group, meeting, login, sreq_data, session_data, action): session_data is an array of data from individual session subforms action argument is a string [new|update]. ''' - (to_email, cc_list) = gather_address_lists('session_requested',group=group,person=login) + (to_email, cc_list) = gather_address_lists('session_requested', group=group, person=login) from_email = (settings.SESSION_REQUEST_FROM_EMAIL) subject = '%s - New Meeting Session Request for IETF %s' % (group.acronym, meeting.number) - template = 'sreq/session_request_notification.txt' + template = 'meeting/session_request_notification.txt' # send email context = {} @@ -164,7 +236,7 @@ def send_notification(group, meeting, login, sreq_data, session_data, action): context['meeting'] = meeting context['login'] = login context['header'] = 'A new' - context['requester'] = get_requester_text(login,group) + context['requester'] = get_requester_text(login, group) # update overrides if action == 'update': @@ -174,10 +246,10 @@ def send_notification(group, meeting, login, sreq_data, session_data, action): # if third session requested approval is required # change headers TO=ADs, CC=session-request, submitter and cochairs if len(session_data) > 2: - (to_email, cc_list) = gather_address_lists('session_requested_long',group=group,person=login) + (to_email, cc_list) = gather_address_lists('session_requested_long', group=group, person=login) subject = '%s - Request for meeting session approval for IETF %s' % (group.acronym, meeting.number) - template = 'sreq/session_approval_notification.txt' - #status_text = 'the %s Directors for approval' % group.parent + template = 'meeting/session_approval_notification.txt' + # status_text = 'the %s Directors for approval' % group.parent context['session_lengths'] = [sd['requested_duration'] for sd in session_data] @@ -189,103 +261,188 @@ def send_notification(group, meeting, login, sreq_data, session_data, action): context, cc=cc_list) -def inbound_session_conflicts_as_string(group, meeting): - ''' - Takes a Group object and Meeting object and returns a string of other groups which have - a conflict with this one - ''' - constraints = group.constraint_target_set.filter(meeting=meeting, name__is_group_conflict=True) - group_set = set(constraints.values_list('source__acronym', flat=True)) # set to de-dupe - group_list = sorted(group_set) # give a consistent order - return ', '.join(group_list) + +def session_changed(session): + latest_event = SchedulingEvent.objects.filter(session=session).order_by('-time', '-id').first() + + if latest_event and latest_event.status_id == "schedw" and session.meeting.schedule is not None: + # send an email to iesg-secretariat to alert to change + pass + + +def status_slug_for_new_session(session, session_number): + if session.group.features.acts_like_wg and session_number == 2: + return 'apprw' + return 'schedw' # ------------------------------------------------- # View Functions # ------------------------------------------------- -@check_permissions -def approve(request, acronym): + + +@role_required(*AUTHORIZED_ROLES) +def list_view(request): ''' - This view approves the third session. For use by ADs or Secretariat. + Display list of groups the user has access to. ''' meeting = get_meeting(days=14) - group = get_object_or_404(Group, acronym=acronym) - session = add_event_info_to_session_qs(Session.objects.filter(meeting=meeting, group=group)).filter(current_status='apprw').first() - if session is None: - raise Http404 + # check for locked flag + is_locked = check_app_locked() + if is_locked and not has_role(request.user, 'Secretariat'): + message = get_lock_message() + return render(request, 'meeting/session_request_locked.html', { + 'message': message, + 'meeting': meeting}) - if has_role(request.user,'Secretariat') or group.parent.role_set.filter(name='ad',person=request.user.person): - SchedulingEvent.objects.create( - session=session, - status=SessionStatusName.objects.get(slug='appr'), - by=request.user.person, - ) - session_changed(session) + scheduled_groups = [] + unscheduled_groups = [] - messages.success(request, 'Third session approved') - return redirect('ietf.secr.sreq.views.view', acronym=acronym) - else: - # if an unauthorized user gets here return error - messages.error(request, 'Not authorized to approve the third session') - return redirect('ietf.secr.sreq.views.view', acronym=acronym) + group_types = GroupFeatures.objects.filter(has_meetings=True).values_list('type', flat=True) -@check_permissions -def cancel(request, acronym): - ''' - This view cancels a session request and sends a notification. - To cancel, or withdraw the request set status = deleted. - "canceled" status is used by the secretariat. + my_groups = [g for g in get_my_groups(request.user, conclude=True) if g.type_id in group_types] - NOTE: this function can also be called after a session has been - scheduled during the period when the session request tool is - reopened. In this case be sure to clear the timeslot assignment as well. + sessions_by_group = defaultdict(list) + for s in add_event_info_to_session_qs(Session.objects.filter(meeting=meeting, group__in=my_groups)).filter(current_status__in=['schedw', 'apprw', 'appr', 'sched']): + sessions_by_group[s.group_id].append(s) + + for group in my_groups: + group.meeting_sessions = sessions_by_group.get(group.pk, []) + + if group.pk in sessions_by_group: + # include even if concluded as we need to to see that the + # sessions are there + scheduled_groups.append(group) + else: + if group.state_id not in ['conclude', 'bof-conc']: + # too late for unscheduled if concluded + unscheduled_groups.append(group) + + # warn if there are no associated groups + if not scheduled_groups and not unscheduled_groups: + messages.warning(request, 'The account %s is not associated with any groups. If you have multiple Datatracker accounts you may try another or report a problem to %s' % (request.user, settings.SECRETARIAT_ACTION_EMAIL)) + + # add session status messages for use in template + for group in scheduled_groups: + if not group.features.acts_like_wg or (len(group.meeting_sessions) < 3): + group.status_message = group.meeting_sessions[0].current_status + else: + group.status_message = 'First two sessions: %s, Third session: %s' % (group.meeting_sessions[0].current_status, group.meeting_sessions[2].current_status) + + # add not meeting indicators for use in template + for group in unscheduled_groups: + if any(s.current_status == 'notmeet' for s in group.meeting_sessions): + group.not_meeting = True + + return render(request, 'meeting/session_request_list.html', { + 'is_locked': is_locked, + 'meeting': meeting, + 'scheduled_groups': scheduled_groups, + 'unscheduled_groups': unscheduled_groups}, + ) + + +@role_required('Secretariat') +def status(request): + ''' + This view handles locking and unlocking of the session request tool to the public. ''' meeting = get_meeting(days=14) - group = get_object_or_404(Group, acronym=acronym) - sessions = Session.objects.filter(meeting=meeting,group=group).order_by('id') - login = request.user.person + is_locked = check_app_locked(meeting=meeting) - # delete conflicts - Constraint.objects.filter(meeting=meeting,source=group).delete() + if request.method == 'POST': + button_text = request.POST.get('submit', '') + if button_text == 'Back': + return redirect('ietf.meeting.views_session_request.list_view') - # mark sessions as deleted - for session in sessions: - SchedulingEvent.objects.create( - session=session, - status=SessionStatusName.objects.get(slug='deleted'), - by=request.user.person, - ) - session_changed(session) + form = SessionRequestStatusForm(request.POST) - # clear schedule assignments if already scheduled - session.timeslotassignments.all().delete() + if button_text == 'Lock': + if form.is_valid(): + meeting.session_request_lock_message = form.cleaned_data['message'] + meeting.save() + messages.success(request, 'Session Request Tool is now Locked') + return redirect('ietf.meeting.views_session_request.list_view') - # send notifitcation - (to_email, cc_list) = gather_address_lists('session_request_cancelled',group=group,person=login) - from_email = (settings.SESSION_REQUEST_FROM_EMAIL) - subject = '%s - Cancelling a meeting request for IETF %s' % (group.acronym, meeting.number) - send_mail(request, to_email, from_email, subject, 'sreq/session_cancel_notification.txt', - {'requester':get_requester_text(login,group), - 'meeting':meeting}, cc=cc_list) + elif button_text == 'Unlock': + meeting.session_request_lock_message = '' + meeting.save() + messages.success(request, 'Session Request Tool is now Unlocked') + return redirect('ietf.meeting.views_session_request.list_view') - messages.success(request, 'The %s Session Request has been cancelled' % group.acronym) - return redirect('ietf.secr.sreq.views.main') + else: + if is_locked: + message = get_lock_message() + initial = {'message': message} + form = SessionRequestStatusForm(initial=initial) + else: + form = SessionRequestStatusForm() + return render(request, 'meeting/session_request_status.html', { + 'is_locked': is_locked, + 'form': form}, + ) -def status_slug_for_new_session(session, session_number): - if session.group.features.acts_like_wg and session_number == 2: - return 'apprw' - return 'schedw' +@check_permissions +def new_request(request, acronym): + ''' + This view gathers details for a new session request. The user proceeds to confirm() + to create the request. + ''' + group = get_object_or_404(Group, acronym=acronym) + if len(group.features.session_purposes) == 0: + raise Http404(f'Cannot request sessions for group "{acronym}"') + meeting = get_meeting(days=14) + session_conflicts = dict(inbound=inbound_session_conflicts_as_string(group, meeting)) -def get_outbound_conflicts(form: SessionForm): - """extract wg conflict constraint data from a SessionForm""" - outbound_conflicts = [] - for conflictname, cfield_id in form.wg_constraint_field_ids(): - conflict_groups = form.cleaned_data[cfield_id] - if len(conflict_groups) > 0: - outbound_conflicts.append(dict(name=conflictname, groups=conflict_groups)) - return outbound_conflicts + # check if app is locked + is_locked = check_app_locked() + if is_locked and not has_role(request.user, 'Secretariat'): + messages.warning(request, "The Session Request Tool is closed") + return redirect('ietf.meeting.views_session_request.list_view') + + if request.method == 'POST': + button_text = request.POST.get('submit', '') + if button_text == 'Cancel': + return redirect('ietf.meeting.views_session_request.list_view') + + form = SessionRequestForm(group, meeting, request.POST, notifications_optional=has_role(request.user, "Secretariat")) + if form.is_valid(): + return confirm(request, acronym) + + # the "previous" querystring causes the form to be returned + # pre-populated with data from last meeeting's session request + elif request.method == 'GET' and 'previous' in request.GET: + latest_session = add_event_info_to_session_qs(Session.objects.filter(meeting__type_id='ietf', group=group)).exclude(current_status__in=['notmeet', 'deleted', 'canceled',]).order_by('-meeting__date').first() + if latest_session: + previous_meeting = Meeting.objects.get(number=latest_session.meeting.number) + previous_sessions = add_event_info_to_session_qs(Session.objects.filter(meeting=previous_meeting, group=group)).exclude(current_status__in=['notmeet', 'deleted']).order_by('id') + if not previous_sessions: + messages.warning(request, 'This group did not meet at %s' % previous_meeting) + return redirect('ietf.meeting.views_session_request.new_request', acronym=acronym) + else: + messages.info(request, 'Fetched session info from %s' % previous_meeting) + else: + messages.warning(request, 'Did not find any previous meeting') + return redirect('ietf.meeting.views_session_request.new_request', acronym=acronym) + + initial = get_initial_session(previous_sessions, prune_conflicts=True) + if 'resources' in initial: + initial['resources'] = [x.pk for x in initial['resources']] + form = SessionRequestForm(group, meeting, initial=initial, notifications_optional=has_role(request.user, "Secretariat")) + + else: + initial = {} + form = SessionRequestForm(group, meeting, initial=initial, notifications_optional=has_role(request.user, "Secretariat")) + + return render(request, 'meeting/session_request_form.html', { + 'meeting': meeting, + 'form': form, + 'group': group, + 'is_create': True, + 'session_conflicts': session_conflicts}, + ) @role_required(*AUTHORIZED_ROLES) @@ -295,11 +452,11 @@ def confirm(request, acronym): to confirm for submission. ''' # FIXME: this should be using form.is_valid/form.cleaned_data - invalid input will make it crash - group = get_object_or_404(Group,acronym=acronym) + group = get_object_or_404(Group, acronym=acronym) if len(group.features.session_purposes) == 0: raise Http404(f'Cannot request sessions for group "{acronym}"') meeting = get_meeting(days=14) - form = SessionForm(group, meeting, request.POST, hidden=True, notifications_optional=has_role(request.user, "Secretariat")) + form = SessionRequestForm(group, meeting, request.POST, hidden=True, notifications_optional=has_role(request.user, "Secretariat")) form.is_valid() login = request.user.person @@ -307,8 +464,8 @@ def confirm(request, acronym): # check if request already exists for this group if add_event_info_to_session_qs(Session.objects.filter(group=group, meeting=meeting)).filter(Q(current_status__isnull=True) | ~Q(current_status__in=['deleted', 'notmeet'])): messages.warning(request, 'Sessions for working group %s have already been requested once.' % group.acronym) - return redirect('ietf.secr.sreq.views.main') - + return redirect('ietf.meeting.views_session_request.list_view') + session_data = form.data.copy() # use cleaned_data for the 'bethere' field so we get the Person instances session_data['bethere'] = form.cleaned_data['bethere'] if 'bethere' in form.cleaned_data else [] @@ -318,7 +475,7 @@ def confirm(request, acronym): session_data['joint_for_session_display'] = dict(JOINT_FOR_SESSION_CHOICES)[session_data['joint_for_session']] if form.cleaned_data.get('timeranges'): session_data['timeranges_display'] = [t.desc for t in form.cleaned_data['timeranges']] - session_data['resources'] = [ ResourceAssociation.objects.get(pk=pk) for pk in request.POST.getlist('resources') ] + session_data['resources'] = [ResourceAssociation.objects.get(pk=pk) for pk in request.POST.getlist('resources')] # extract wg conflict constraint data for the view / notifications outbound_conflicts = get_outbound_conflicts(form) @@ -326,7 +483,7 @@ def confirm(request, acronym): button_text = request.POST.get('submit', '') if button_text == 'Cancel': messages.success(request, 'Session Request has been cancelled') - return redirect('ietf.secr.sreq.views.main') + return redirect('ietf.meeting.views_session_request.list_view') if request.method == 'POST' and button_text == 'Submit': # delete any existing session records with status = canceled or notmeet @@ -344,10 +501,10 @@ def confirm(request, acronym): if 'resources' in form.data: new_session.resources.set(session_data['resources']) jfs = form.data.get('joint_for_session', '-1') - if not jfs: # jfs might be '' + if not jfs: # jfs might be '' jfs = '-1' if int(jfs) == count + 1: # count is zero-indexed - groups_split = form.cleaned_data.get('joint_with_groups').replace(',',' ').split() + groups_split = form.cleaned_data.get('joint_with_groups').replace(',', ' ').split() joint = Group.objects.filter(acronym__in=groups_split) new_session.joint_with_groups.set(joint) new_session.save() @@ -388,36 +545,105 @@ def confirm(request, acronym): 'new', ) - status_text = 'IETF Agenda to be scheduled' - messages.success(request, 'Your request has been sent to %s' % status_text) - return redirect('ietf.secr.sreq.views.main') + status_text = 'IETF Agenda to be scheduled' + messages.success(request, 'Your request has been sent to %s' % status_text) + return redirect('ietf.meeting.views_session_request.list_view') + + # POST from request submission + session_conflicts = dict( + outbound=outbound_conflicts, # each is a dict with name and groups as keys + inbound=inbound_session_conflicts_as_string(group, meeting), + ) + if form.cleaned_data.get('third_session'): + messages.warning(request, 'Note: Your request for a third session must be approved by an area director before being submitted to agenda@ietf.org. Click "Submit" below to email an approval request to the area directors') + + return render(request, 'meeting/session_request_confirm.html', { + 'form': form, + 'session': session_data, + 'group': group, + 'meeting': meeting, + 'session_conflicts': session_conflicts}, + ) + + +@role_required(*AUTHORIZED_ROLES) +def view_request(request, acronym, num=None): + ''' + This view displays the session request info + ''' + meeting = get_meeting(num, days=14) + group = get_object_or_404(Group, acronym=acronym) + query = Session.objects.filter(meeting=meeting, group=group) + status_is_null = Q(current_status__isnull=True) + status_allowed = ~Q(current_status__in=("canceled", "notmeet", "deleted")) + sessions = ( + add_event_info_to_session_qs(query) + .filter(status_is_null | status_allowed) + .order_by("id") + ) + + # check if app is locked + is_locked = check_app_locked() + if is_locked: + messages.warning(request, "The Session Request Tool is closed") + + # if there are no session requests yet, redirect to new session request page + if not sessions: + if is_locked: + return redirect('ietf.meeting.views_session_request.list_view') + else: + return redirect('ietf.meeting.views_session_request.new_request', acronym=acronym) + + activities = [{ + 'act_date': e.time.strftime('%b %d, %Y'), + 'act_time': e.time.strftime('%H:%M:%S'), + 'activity': e.status.name, + 'act_by': e.by, + } for e in sessions[0].schedulingevent_set.select_related('status', 'by')] + + # gather outbound conflicts + outbound_dict = OrderedDict() + for obc in group.constraint_source_set.filter(meeting=meeting, name__is_group_conflict=True): + if obc.name.slug not in outbound_dict: + outbound_dict[obc.name.slug] = [] + outbound_dict[obc.name.slug].append(obc.target.acronym) - # POST from request submission session_conflicts = dict( - outbound=outbound_conflicts, # each is a dict with name and groups as keys inbound=inbound_session_conflicts_as_string(group, meeting), + outbound=[dict(name=ConstraintName.objects.get(slug=slug), groups=' '.join(groups)) + for slug, groups in outbound_dict.items()], ) - return render(request, 'sreq/confirm.html', { - 'form': form, - 'session': session_data, + + show_approve_button = False + + # if sessions include a 3rd session waiting approval and the user is a secretariat or AD of the group + # display approve button + if any(s.current_status == 'apprw' for s in sessions): + if has_role(request.user, 'Secretariat') or group.parent.role_set.filter(name='ad', person=request.user.person): + show_approve_button = True + + # build session dictionary (like querydict from new session request form) for use in template + session = get_initial_session(sessions) + + return render(request, 'meeting/session_request_view.html', { + 'can_edit': (not is_locked) or has_role(request.user, 'Secretariat'), + 'can_cancel': (not is_locked) or has_role(request.user, 'Secretariat'), + 'session': session, # legacy processed data + 'sessions': sessions, # actual session instances + 'activities': activities, + 'meeting': meeting, 'group': group, - 'session_conflicts': session_conflicts}, + 'session_conflicts': session_conflicts, + 'show_approve_button': show_approve_button}, ) - -def session_changed(session): - latest_event = SchedulingEvent.objects.filter(session=session).order_by('-time', '-id').first() - - if latest_event and latest_event.status_id == "schedw" and session.meeting.schedule != None: - # send an email to iesg-secretariat to alert to change - pass @check_permissions -def edit(request, acronym, num=None): +def edit_request(request, acronym, num=None): ''' This view allows the user to edit details of the session request ''' - meeting = get_meeting(num,days=14) + meeting = get_meeting(num, days=14) group = get_object_or_404(Group, acronym=acronym) if len(group.features.session_purposes) == 0: raise Http404(f'Cannot request sessions for group "{acronym}"') @@ -443,15 +669,15 @@ def edit(request, acronym, num=None): login = request.user.person first_session = Session() - if(len(sessions) > 0): + if (len(sessions) > 0): first_session = sessions[0] if request.method == 'POST': button_text = request.POST.get('submit', '') if button_text == 'Cancel': - return redirect('ietf.secr.sreq.views.view', acronym=acronym) + return redirect('ietf.meeting.views_session_request.view_request', acronym=acronym) - form = SessionForm(group, meeting, request.POST, initial=initial, notifications_optional=has_role(request.user, "Secretariat")) + form = SessionRequestForm(group, meeting, request.POST, initial=initial, notifications_optional=has_role(request.user, "Secretariat")) if form.is_valid(): if form.has_changed(): changed_session_forms = [sf for sf in form.session_forms.forms_to_keep if sf.has_changed()] @@ -513,11 +739,11 @@ def edit(request, acronym, num=None): if 'resources' in form.changed_data: new_resource_ids = form.cleaned_data['resources'] - new_resources = [ ResourceAssociation.objects.get(pk=a) - for a in new_resource_ids] + new_resources = [ResourceAssociation.objects.get(pk=a) + for a in new_resource_ids] first_session.resources = new_resources - if 'bethere' in form.changed_data and set(form.cleaned_data['bethere'])!=set(initial['bethere']): + if 'bethere' in form.changed_data and set(form.cleaned_data['bethere']) != set(initial['bethere']): first_session.constraints().filter(name='bethere').delete() bethere_cn = ConstraintName.objects.get(slug='bethere') for p in form.cleaned_data['bethere']: @@ -539,7 +765,7 @@ def edit(request, acronym, num=None): # deprecated # log activity - #add_session_activity(group,'Session Request was updated',meeting,user) + # add_session_activity(group,'Session Request was updated',meeting,user) # send notification if form.cleaned_data.get("send_notifications"): @@ -556,7 +782,7 @@ def edit(request, acronym, num=None): ) messages.success(request, 'Session Request updated') - return redirect('ietf.secr.sreq.views.view', acronym=acronym) + return redirect('ietf.meeting.views_session_request.view_request', acronym=acronym) else: # method is not POST # gather outbound conflicts for initial value @@ -567,142 +793,46 @@ def edit(request, acronym, num=None): initial['constraint_{}'.format(slug)] = ' '.join(groups) if not sessions: - return redirect('ietf.secr.sreq.views.new', acronym=acronym) - form = SessionForm(group, meeting, initial=initial, notifications_optional=has_role(request.user, "Secretariat")) + return redirect('ietf.meeting.views_session_request.new_request', acronym=acronym) + form = SessionRequestForm(group, meeting, initial=initial, notifications_optional=has_role(request.user, "Secretariat")) - return render(request, 'sreq/edit.html', { - 'is_locked': is_locked and not has_role(request.user,'Secretariat'), + return render(request, 'meeting/session_request_form.html', { + 'is_locked': is_locked and not has_role(request.user, 'Secretariat'), 'meeting': meeting, 'form': form, 'group': group, + 'is_create': False, 'session_conflicts': session_conflicts}, ) -@role_required(*AUTHORIZED_ROLES) -def main(request): - ''' - Display list of groups the user has access to. - - Template variables - form: a select box populated with unscheduled groups - meeting: the current meeting - scheduled_sessions: - ''' - # check for locked flag - is_locked = check_app_locked() - - if is_locked and not has_role(request.user,'Secretariat'): - message = get_lock_message() - return render(request, 'sreq/locked.html', { - 'message': message}, - ) - - meeting = get_meeting(days=14) - - scheduled_groups = [] - unscheduled_groups = [] - - group_types = GroupFeatures.objects.filter(has_meetings=True).values_list('type', flat=True) - - my_groups = [g for g in get_my_groups(request.user, conclude=True) if g.type_id in group_types] - - sessions_by_group = defaultdict(list) - for s in add_event_info_to_session_qs(Session.objects.filter(meeting=meeting, group__in=my_groups)).filter(current_status__in=['schedw', 'apprw', 'appr', 'sched']): - sessions_by_group[s.group_id].append(s) - - for group in my_groups: - group.meeting_sessions = sessions_by_group.get(group.pk, []) - - if group.pk in sessions_by_group: - # include even if concluded as we need to to see that the - # sessions are there - scheduled_groups.append(group) - else: - if group.state_id not in ['conclude', 'bof-conc']: - # too late for unscheduled if concluded - unscheduled_groups.append(group) - - # warn if there are no associated groups - if not scheduled_groups and not unscheduled_groups: - messages.warning(request, 'The account %s is not associated with any groups. If you have multiple Datatracker accounts you may try another or report a problem to %s' % (request.user, settings.SECRETARIAT_ACTION_EMAIL)) - - # add session status messages for use in template - for group in scheduled_groups: - if not group.features.acts_like_wg or (len(group.meeting_sessions) < 3): - group.status_message = group.meeting_sessions[0].current_status - else: - group.status_message = 'First two sessions: %s, Third session: %s' % (group.meeting_sessions[0].current_status, group.meeting_sessions[2].current_status) - - # add not meeting indicators for use in template - for group in unscheduled_groups: - if any(s.current_status == 'notmeet' for s in group.meeting_sessions): - group.not_meeting = True - - return render(request, 'sreq/main.html', { - 'is_locked': is_locked, - 'meeting': meeting, - 'scheduled_groups': scheduled_groups, - 'unscheduled_groups': unscheduled_groups}, - ) @check_permissions -def new(request, acronym): +def approve_request(request, acronym): ''' - This view gathers details for a new session request. The user proceeds to confirm() - to create the request. + This view approves the third session. For use by ADs or Secretariat. ''' - group = get_object_or_404(Group, acronym=acronym) - if len(group.features.session_purposes) == 0: - raise Http404(f'Cannot request sessions for group "{acronym}"') meeting = get_meeting(days=14) - session_conflicts = dict(inbound=inbound_session_conflicts_as_string(group, meeting)) - - # check if app is locked - is_locked = check_app_locked() - if is_locked and not has_role(request.user,'Secretariat'): - messages.warning(request, "The Session Request Tool is closed") - return redirect('ietf.secr.sreq.views.main') - - if request.method == 'POST': - button_text = request.POST.get('submit', '') - if button_text == 'Cancel': - return redirect('ietf.secr.sreq.views.main') - - form = SessionForm(group, meeting, request.POST, notifications_optional=has_role(request.user, "Secretariat")) - if form.is_valid(): - return confirm(request, acronym) + group = get_object_or_404(Group, acronym=acronym) - # the "previous" querystring causes the form to be returned - # pre-populated with data from last meeeting's session request - elif request.method == 'GET' and 'previous' in request.GET: - latest_session = add_event_info_to_session_qs(Session.objects.filter(meeting__type_id='ietf', group=group)).exclude(current_status__in=['notmeet', 'deleted', 'canceled',]).order_by('-meeting__date').first() - if latest_session: - previous_meeting = Meeting.objects.get(number=latest_session.meeting.number) - previous_sessions = add_event_info_to_session_qs(Session.objects.filter(meeting=previous_meeting, group=group)).exclude(current_status__in=['notmeet', 'deleted']).order_by('id') - if not previous_sessions: - messages.warning(request, 'This group did not meet at %s' % previous_meeting) - return redirect('ietf.secr.sreq.views.new', acronym=acronym) - else: - messages.info(request, 'Fetched session info from %s' % previous_meeting) - else: - messages.warning(request, 'Did not find any previous meeting') - return redirect('ietf.secr.sreq.views.new', acronym=acronym) + session = add_event_info_to_session_qs(Session.objects.filter(meeting=meeting, group=group)).filter(current_status='apprw').first() + if session is None: + raise Http404 - initial = get_initial_session(previous_sessions, prune_conflicts=True) - if 'resources' in initial: - initial['resources'] = [x.pk for x in initial['resources']] - form = SessionForm(group, meeting, initial=initial, notifications_optional=has_role(request.user, "Secretariat")) + if has_role(request.user, 'Secretariat') or group.parent.role_set.filter(name='ad', person=request.user.person): + SchedulingEvent.objects.create( + session=session, + status=SessionStatusName.objects.get(slug='appr'), + by=request.user.person, + ) + session_changed(session) + messages.success(request, 'Third session approved') + return redirect('ietf.meeting.views_session_request.view_request', acronym=acronym) else: - initial={} - form = SessionForm(group, meeting, initial=initial, notifications_optional=has_role(request.user, "Secretariat")) + # if an unauthorized user gets here return error + messages.error(request, 'Not authorized to approve the third session') + return redirect('ietf.meeting.views_session_request.view_request', acronym=acronym) - return render(request, 'sreq/new.html', { - 'meeting': meeting, - 'form': form, - 'group': group, - 'session_conflicts': session_conflicts}, - ) @check_permissions def no_session(request, acronym): @@ -722,7 +852,7 @@ def no_session(request, acronym): # skip if state is already notmeet if add_event_info_to_session_qs(Session.objects.filter(group=group, meeting=meeting)).filter(current_status='notmeet'): messages.info(request, 'The group %s is already marked as not meeting' % group.acronym) - return redirect('ietf.secr.sreq.views.main') + return redirect('ietf.meeting.views_session_request.list_view') session = Session.objects.create( group=group, @@ -740,125 +870,62 @@ def no_session(request, acronym): session_changed(session) # send notification - (to_email, cc_list) = gather_address_lists('session_request_not_meeting',group=group,person=login) + (to_email, cc_list) = gather_address_lists('session_request_not_meeting', group=group, person=login) from_email = (settings.SESSION_REQUEST_FROM_EMAIL) subject = '%s - Not having a session at IETF %s' % (group.acronym, meeting.number) - send_mail(request, to_email, from_email, subject, 'sreq/not_meeting_notification.txt', - {'login':login, - 'group':group, - 'meeting':meeting}, cc=cc_list) + send_mail(request, to_email, from_email, subject, 'meeting/session_not_meeting_notification.txt', + {'login': login, + 'group': group, + 'meeting': meeting}, cc=cc_list) # deprecated? # log activity - #text = 'A message was sent to notify not having a session at IETF %d' % meeting.meeting_num - #add_session_activity(group,text,meeting,request.person) + # text = 'A message was sent to notify not having a session at IETF %d' % meeting.meeting_num + # add_session_activity(group,text,meeting,request.person) # redirect messages.success(request, 'A message was sent to notify not having a session at IETF %s' % meeting.number) - return redirect('ietf.secr.sreq.views.main') - -@role_required('Secretariat') -def tool_status(request): - ''' - This view handles locking and unlocking of the tool to the public. - ''' - meeting = get_meeting(days=14) - is_locked = check_app_locked(meeting=meeting) - - if request.method == 'POST': - button_text = request.POST.get('submit', '') - if button_text == 'Back': - return redirect('ietf.secr.sreq.views.main') - - form = ToolStatusForm(request.POST) - - if button_text == 'Lock': - if form.is_valid(): - meeting.session_request_lock_message = form.cleaned_data['message'] - meeting.save() - messages.success(request, 'Session Request Tool is now Locked') - return redirect('ietf.secr.sreq.views.main') - - elif button_text == 'Unlock': - meeting.session_request_lock_message = '' - meeting.save() - messages.success(request, 'Session Request Tool is now Unlocked') - return redirect('ietf.secr.sreq.views.main') - - else: - if is_locked: - message = get_lock_message() - initial = {'message': message} - form = ToolStatusForm(initial=initial) - else: - form = ToolStatusForm() + return redirect('ietf.meeting.views_session_request.list_view') - return render(request, 'sreq/tool_status.html', { - 'is_locked': is_locked, - 'form': form}, - ) -@role_required(*AUTHORIZED_ROLES) -def view(request, acronym, num = None): +@check_permissions +def cancel_request(request, acronym): ''' - This view displays the session request info + This view cancels a session request and sends a notification. + To cancel, or withdraw the request set status = deleted. + "canceled" status is used by the secretariat. + + NOTE: this function can also be called after a session has been + scheduled during the period when the session request tool is + reopened. In this case be sure to clear the timeslot assignment as well. ''' - meeting = get_meeting(num,days=14) + meeting = get_meeting(days=14) group = get_object_or_404(Group, acronym=acronym) - sessions = add_event_info_to_session_qs(Session.objects.filter(meeting=meeting, group=group)).filter(Q(current_status__isnull=True) | ~Q(current_status__in=('canceled','notmeet','deleted'))).order_by('id') - - # check if app is locked - is_locked = check_app_locked() - if is_locked: - messages.warning(request, "The Session Request Tool is closed") - - # if there are no session requests yet, redirect to new session request page - if not sessions: - if is_locked: - return redirect('ietf.secr.sreq.views.main') - else: - return redirect('ietf.secr.sreq.views.new', acronym=acronym) - - activities = [{ - 'act_date': e.time.strftime('%b %d, %Y'), - 'act_time': e.time.strftime('%H:%M:%S'), - 'activity': e.status.name, - 'act_by': e.by, - } for e in sessions[0].schedulingevent_set.select_related('status', 'by')] - - # gather outbound conflicts - outbound_dict = OrderedDict() - for obc in group.constraint_source_set.filter(meeting=meeting, name__is_group_conflict=True): - if obc.name.slug not in outbound_dict: - outbound_dict[obc.name.slug] = [] - outbound_dict[obc.name.slug].append(obc.target.acronym) - - session_conflicts = dict( - inbound=inbound_session_conflicts_as_string(group, meeting), - outbound=[dict(name=ConstraintName.objects.get(slug=slug), groups=' '.join(groups)) - for slug, groups in outbound_dict.items()], - ) + sessions = Session.objects.filter(meeting=meeting, group=group).order_by('id') + login = request.user.person - show_approve_button = False + # delete conflicts + Constraint.objects.filter(meeting=meeting, source=group).delete() - # if sessions include a 3rd session waiting approval and the user is a secretariat or AD of the group - # display approve button - if any(s.current_status == 'apprw' for s in sessions): - if has_role(request.user,'Secretariat') or group.parent.role_set.filter(name='ad',person=request.user.person): - show_approve_button = True + # mark sessions as deleted + for session in sessions: + SchedulingEvent.objects.create( + session=session, + status=SessionStatusName.objects.get(slug='deleted'), + by=request.user.person, + ) + session_changed(session) - # build session dictionary (like querydict from new session request form) for use in template - session = get_initial_session(sessions) + # clear schedule assignments if already scheduled + session.timeslotassignments.all().delete() - return render(request, 'sreq/view.html', { - 'can_edit': (not is_locked) or has_role(request.user, 'Secretariat'), - 'can_cancel': (not is_locked) or has_role(request.user, 'Secretariat'), - 'session': session, # legacy processed data - 'sessions': sessions, # actual session instances - 'activities': activities, - 'meeting': meeting, - 'group': group, - 'session_conflicts': session_conflicts, - 'show_approve_button': show_approve_button}, - ) + # send notifitcation + (to_email, cc_list) = gather_address_lists('session_request_cancelled', group=group, person=login) + from_email = (settings.SESSION_REQUEST_FROM_EMAIL) + subject = '%s - Cancelling a meeting request for IETF %s' % (group.acronym, meeting.number) + send_mail(request, to_email, from_email, subject, 'meeting/session_cancel_notification.txt', + {'requester': get_requester_text(login, group), + 'meeting': meeting}, cc=cc_list) + messages.success(request, 'The %s Session Request has been cancelled' % group.acronym) + return redirect('ietf.meeting.views_session_request.list_view') diff --git a/ietf/message/admin.py b/ietf/message/admin.py index c2564c04b9..6a876cdc70 100644 --- a/ietf/message/admin.py +++ b/ietf/message/admin.py @@ -1,32 +1,104 @@ -from django.contrib import admin +# Copyright The IETF Trust 2012-2025, All Rights Reserved +from django.contrib import admin, messages +from django.db.models import QuerySet +from rangefilter.filters import DateRangeQuickSelectListFilterBuilder from ietf.message.models import Message, MessageAttachment, SendQueue, AnnouncementFrom +from ietf.message.tasks import retry_send_messages_by_pk_task + + +class MessageSentStatusListFilter(admin.SimpleListFilter): + """Filter Messages by whether or not they were sent""" + + title = "status" + parameter_name = "status" + + def lookups(self, request, model_admin): + return [ + ("sent", "Sent"), + ("unsent", "Not sent"), + ] + + def queryset(self, request, queryset): + if self.value() == "unsent": + return queryset.filter(sent__isnull=True) + elif self.value() == "sent": + return queryset.filter(sent__isnull=False) + class MessageAdmin(admin.ModelAdmin): - list_display = ["subject", "by", "time", "groups"] + list_display = ["sent_status", "display_subject", "by", "time", "groups"] + list_display_links = ["display_subject"] search_fields = ["subject", "body"] raw_id_fields = ["by", "related_groups", "related_docs"] + list_filter = [ + MessageSentStatusListFilter, + ("time", DateRangeQuickSelectListFilterBuilder()), + ] ordering = ["-time"] + actions = ["retry_send"] + + @admin.display(description="Subject", empty_value="(no subject)") + def display_subject(self, instance): + return instance.subject or None # None triggers the empty_value def groups(self, instance): return ", ".join(g.acronym for g in instance.related_groups.all()) + + @admin.display(description="Sent", boolean=True) + def sent_status(self, instance): + return instance.sent is not None + + @admin.action(description="Send selected messages if unsent") + def retry_send(self, request, queryset: QuerySet[Message]): + try: + retry_send_messages_by_pk_task.delay( + message_pks=list(queryset.values_list("pk", flat=True)), + resend=False, + ) + except Exception as err: + self.message_user( + request, + f"Error: {repr(err)}", + messages.ERROR, + ) + else: + self.message_user(request, "Messages queued for delivery", messages.SUCCESS) + + admin.site.register(Message, MessageAdmin) + class MessageAttachmentAdmin(admin.ModelAdmin): - list_display = ['id', 'message', 'filename', 'removed',] - raw_id_fields = ['message'] + list_display = [ + "id", + "message", + "filename", + "removed", + ] + raw_id_fields = ["message"] + + admin.site.register(MessageAttachment, MessageAttachmentAdmin) + class SendQueueAdmin(admin.ModelAdmin): list_display = ["time", "by", "message", "send_at", "sent_at"] list_filter = ["time", "send_at", "sent_at"] search_fields = ["message__body"] raw_id_fields = ["by", "message"] ordering = ["-time"] + + admin.site.register(SendQueue, SendQueueAdmin) + class AnnouncementFromAdmin(admin.ModelAdmin): - list_display = ['name', 'group', 'address', ] -admin.site.register(AnnouncementFrom, AnnouncementFromAdmin) + list_display = [ + "name", + "group", + "address", + ] +admin.site.register(AnnouncementFrom, AnnouncementFromAdmin) diff --git a/ietf/message/factories.py b/ietf/message/factories.py new file mode 100644 index 0000000000..72781512e4 --- /dev/null +++ b/ietf/message/factories.py @@ -0,0 +1,27 @@ +# Copyright The IETF Trust 2024, All Rights Reserved +import factory + +from ietf.person.models import Person +from .models import Message, SendQueue + + +class MessageFactory(factory.django.DjangoModelFactory): + class Meta: + model = Message + + by = factory.LazyFunction(lambda: Person.objects.get(name="(System)")) + subject = factory.Faker("sentence") + to = factory.Faker("email") + frm = factory.Faker("email") + cc = factory.Faker("email") + bcc = factory.Faker("email") + body = factory.Faker("paragraph") + content_type = "text/plain" + + +class SendQueueFactory(factory.django.DjangoModelFactory): + class Meta: + model = SendQueue + + by = factory.LazyFunction(lambda: Person.objects.get(name="(System)")) + message = factory.SubFactory(MessageFactory) diff --git a/ietf/message/tasks.py b/ietf/message/tasks.py new file mode 100644 index 0000000000..1fdff7bea4 --- /dev/null +++ b/ietf/message/tasks.py @@ -0,0 +1,47 @@ +# Copyright The IETF Trust 2024 All Rights Reserved +# +# Celery task definitions +# +from celery import shared_task +from smtplib import SMTPException + +from ietf.message.utils import send_scheduled_message_from_send_queue, retry_send_messages +from ietf.message.models import SendQueue, Message +from ietf.utils import log +from ietf.utils.mail import log_smtp_exception, send_error_email + + +@shared_task +def send_scheduled_mail_task(): + """Send scheduled email + + This is equivalent to `ietf/bin/send-scheduled-mail all`, which was the only form used in the cron job. + """ + needs_sending = SendQueue.objects.filter(sent_at=None).select_related("message") + for s in needs_sending: + try: + send_scheduled_message_from_send_queue(s) + log.log('Sent scheduled message %s "%s"' % (s.id, s.message.subject)) + except SMTPException as e: + log_smtp_exception(e) + send_error_email(e) + + +@shared_task +def retry_send_messages_by_pk_task(message_pks: list, resend=False): + """Task to retry sending Messages by PK + + Sends Messages whose PK is included in the list. + Only previously unsent messages are sent unless `resend` is true. + """ + log.log( + "retry_send_messages_by_pk_task: " + "retrying send of Message PKs [{}] (resend={})".format( + ", ".join(str(pk) for pk in message_pks), + resend, + ) + ) + retry_send_messages( + messages=Message.objects.filter(pk__in=message_pks), + resend=resend, + ) diff --git a/ietf/message/tests.py b/ietf/message/tests.py index a027df4473..e1bad9a1e6 100644 --- a/ietf/message/tests.py +++ b/ietf/message/tests.py @@ -1,8 +1,9 @@ # Copyright The IETF Trust 2013-2020, All Rights Reserved # -*- coding: utf-8 -*- - - import datetime +from unittest import mock + +from smtplib import SMTPException from django.urls import reverse as urlreverse from django.utils import timezone @@ -10,8 +11,10 @@ import debug # pyflakes:ignore from ietf.group.factories import GroupFactory +from ietf.message.factories import MessageFactory, SendQueueFactory from ietf.message.models import Message, SendQueue -from ietf.message.utils import send_scheduled_message_from_send_queue +from ietf.message.tasks import send_scheduled_mail_task, retry_send_messages_by_pk_task +from ietf.message.utils import send_scheduled_message_from_send_queue, retry_send_messages from ietf.person.models import Person from ietf.utils.mail import outbox, send_mail_text, send_mail_message, get_payload_text from ietf.utils.test_utils import TestCase @@ -128,3 +131,75 @@ def test_send_mime_announcement(self): self.assertTrue("This is a test" in outbox[-1]["Subject"]) self.assertTrue("--NextPart" in outbox[-1].as_string()) self.assertTrue(SendQueue.objects.get(id=q.id).sent_at) + + +class UtilsTests(TestCase): + @mock.patch("ietf.message.utils.send_mail_message") + def test_retry_send_messages(self, mock_send_mail_message): + sent_message = MessageFactory(sent=timezone.now()) + unsent_messages = MessageFactory.create_batch(2, sent=None) + + # Send the sent message and one of the unsent messages + retry_send_messages( + Message.objects.filter(pk__in=[ + sent_message.pk, + unsent_messages[0].pk, + ]), + resend=False, + ) + self.assertEqual(mock_send_mail_message.call_count, 1) + self.assertEqual( + mock_send_mail_message.call_args.args[1], + unsent_messages[0], + ) + + mock_send_mail_message.reset_mock() + # Once again, send the sent message and one of the unsent messages + # (we can use the same one because our mock prevented it from having + # its status updated to sent) + retry_send_messages( + Message.objects.filter(pk__in=[ + sent_message.pk, + unsent_messages[0].pk, + ]), + resend=True, + ) + self.assertEqual(mock_send_mail_message.call_count, 2) + self.assertCountEqual( + [call_args.args[1] for call_args in mock_send_mail_message.call_args_list], + [sent_message, unsent_messages[0]], + ) + + +class TaskTests(TestCase): + @mock.patch("ietf.message.tasks.log_smtp_exception") + @mock.patch("ietf.message.tasks.send_scheduled_message_from_send_queue") + def test_send_scheduled_mail_task(self, mock_send_message, mock_log_smtp_exception): + not_yet_sent = SendQueueFactory() + SendQueueFactory(sent_at=timezone.now()) # already sent + send_scheduled_mail_task() + self.assertEqual(mock_send_message.call_count, 1) + self.assertEqual(mock_send_message.call_args[0], (not_yet_sent,)) + self.assertFalse(mock_log_smtp_exception.called) + + mock_send_message.reset_mock() + mock_send_message.side_effect = SMTPException + send_scheduled_mail_task() + self.assertEqual(mock_send_message.call_count, 1) + self.assertEqual(mock_send_message.call_args[0], (not_yet_sent,)) + self.assertTrue(mock_log_smtp_exception.called) + + @mock.patch("ietf.message.tasks.retry_send_messages") + def test_retry_send_messages_by_pk_task(self, mock_retry_send): + msgs = MessageFactory.create_batch(3) + MessageFactory() # an extra message that won't be resent + + retry_send_messages_by_pk_task([msg.pk for msg in msgs], resend=False) + called_with_messages = mock_retry_send.call_args.kwargs["messages"] + self.assertCountEqual(msgs, called_with_messages) + self.assertFalse(mock_retry_send.call_args.kwargs["resend"]) + + retry_send_messages_by_pk_task([msg.pk for msg in msgs], resend=True) + called_with_messages = mock_retry_send.call_args.kwargs["messages"] + self.assertCountEqual(msgs, called_with_messages) + self.assertTrue(mock_retry_send.call_args.kwargs["resend"]) diff --git a/ietf/message/utils.py b/ietf/message/utils.py index 2601eccab8..74448ca7c9 100644 --- a/ietf/message/utils.py +++ b/ietf/message/utils.py @@ -1,13 +1,17 @@ # Copyright The IETF Trust 2012-2020, All Rights Reserved # -*- coding: utf-8 -*- +import email +import email.utils +import re +import smtplib -import re, email - +from django.db.models import QuerySet from django.utils import timezone from django.utils.encoding import force_str -from ietf.utils.mail import send_mail_text, send_mail_mime +from ietf.utils import log +from ietf.utils.mail import send_mail_text, send_mail_mime, send_mail_message from ietf.message.models import Message first_dot_on_line_re = re.compile(r'^\.', re.MULTILINE) @@ -58,3 +62,29 @@ def send_scheduled_message_from_send_queue(queue_item): queue_item.message.sent = queue_item.sent_at queue_item.message.save() + + +def retry_send_messages(messages: QuerySet[Message], resend=False): + """Attempt delivery of Messages""" + if not resend: + # only include sent messages on explicit request + for already_sent in messages.filter(sent__isnull=False): + assert already_sent.sent is not None # appease mypy type checking + log.log( + f"retry_send_messages: skipping {already_sent.pk} " + f"(already sent {already_sent.sent.isoformat(timespec='milliseconds')})" + ) + messages = messages.filter(sent__isnull=True) + for msg in messages: + to = ",".join(a[1] for a in email.utils.getaddresses([msg.to])) + try: + send_mail_message(None, msg) + log.log( + f'retry_send_messages: ' + f'sent {msg.pk} {msg.frm} -> {to} "{msg.subject.strip()}"' + ) + except smtplib.SMTPException as e: + log.log( + f'retry_send_messages: ' + f'Failure {e}: {msg.pk} {msg.frm} -> {to} "{msg.subject.strip()}"' + ) diff --git a/ietf/message/views.py b/ietf/message/views.py index e4cca63017..355dcdd8d2 100644 --- a/ietf/message/views.py +++ b/ietf/message/views.py @@ -1,3 +1,4 @@ +# Copyright The IETF Trust 2013-2025, All Rights Reserved from django.shortcuts import render, get_object_or_404 from ietf.message.models import Message diff --git a/ietf/middleware.py b/ietf/middleware.py index 48146abf5e..fa2e8efd0c 100644 --- a/ietf/middleware.py +++ b/ietf/middleware.py @@ -8,6 +8,7 @@ from django.http import HttpResponsePermanentRedirect from ietf.utils.log import log, exc_parts from ietf.utils.mail import log_smtp_exception +from opentelemetry.propagate import inject import re import smtplib import unicodedata @@ -17,45 +18,61 @@ def sql_log_middleware(get_response): def sql_log(request): response = get_response(request) for q in connection.queries: - if re.match('(update|insert)', q['sql'], re.IGNORECASE): - log(q['sql']) + if re.match("(update|insert)", q["sql"], re.IGNORECASE): + log(q["sql"]) return response + return sql_log + class SMTPExceptionMiddleware(object): def __init__(self, get_response): self.get_response = get_response + def __call__(self, request): return self.get_response(request) + def process_exception(self, request, exception): if isinstance(exception, smtplib.SMTPException): (extype, value, tb) = log_smtp_exception(exception) - return render(request, 'email_failed.html', - {'exception': extype, 'args': value, 'traceback': "".join(tb)} ) + return render( + request, + "email_failed.html", + {"exception": extype, "args": value, "traceback": "".join(tb)}, + ) return None + class Utf8ExceptionMiddleware(object): def __init__(self, get_response): self.get_response = get_response + def __call__(self, request): return self.get_response(request) + def process_exception(self, request, exception): if isinstance(exception, OperationalError): extype, e, tb = exc_parts() if e.args[0] == 1366: log("Database 4-byte utf8 exception: %s: %s" % (extype, e)) - return render(request, 'utf8_4byte_failed.html', - {'exception': extype, 'args': e.args, 'traceback': "".join(tb)} ) + return render( + request, + "utf8_4byte_failed.html", + {"exception": extype, "args": e.args, "traceback": "".join(tb)}, + ) return None + def redirect_trailing_period_middleware(get_response): def redirect_trailing_period(request): response = get_response(request) if response.status_code == 404 and request.path.endswith("."): return HttpResponsePermanentRedirect(request.path.rstrip(".")) return response + return redirect_trailing_period + def unicode_nfkc_normalization_middleware(get_response): def unicode_nfkc_normalization(request): """Do Unicode NFKC normalization to turn ligatures into individual characters. @@ -65,9 +82,30 @@ def unicode_nfkc_normalization(request): There are probably other elements of a request which may need this normalization too, but let's put that in as it comes up, rather than guess ahead. """ - request.META["PATH_INFO"] = unicodedata.normalize('NFKC', request.META["PATH_INFO"]) - request.path_info = unicodedata.normalize('NFKC', request.path_info) + request.META["PATH_INFO"] = unicodedata.normalize( + "NFKC", request.META["PATH_INFO"] + ) + request.path_info = unicodedata.normalize("NFKC", request.path_info) response = get_response(request) return response + return unicode_nfkc_normalization - + + +def is_authenticated_header_middleware(get_response): + """Middleware to add an is-authenticated header to the response""" + def add_header(request): + response = get_response(request) + response["X-Datatracker-Is-Authenticated"] = "yes" if request.user.is_authenticated else "no" + return response + + return add_header + +def add_otel_traceparent_header(get_response): + """Middleware to add the OpenTelemetry traceparent id header to the response""" + def add_header(request): + response = get_response(request) + inject(response) + return response + + return add_header diff --git a/ietf/name/admin.py b/ietf/name/admin.py index 2458da37d9..b89d6d141c 100644 --- a/ietf/name/admin.py +++ b/ietf/name/admin.py @@ -3,6 +3,7 @@ from ietf.name.models import ( AgendaTypeName, + AttendanceTypeName, BallotPositionName, ConstraintName, ContinentName, @@ -29,6 +30,7 @@ LiaisonStatementTagName, MeetingTypeName, NomineePositionStateName, + RegistrationTicketTypeName, ReviewRequestStateName, ReviewResultName, ReviewTypeName, @@ -55,6 +57,7 @@ from ietf.stats.models import CountryAlias +from ietf.utils.admin import SaferTabularInline class NameAdmin(admin.ModelAdmin): @@ -84,7 +87,7 @@ class GroupTypeNameAdmin(NameAdmin): admin.site.register(GroupTypeName, GroupTypeNameAdmin) -class CountryAliasInline(admin.TabularInline): +class CountryAliasInline(SaferTabularInline): model = CountryAlias extra = 1 @@ -137,6 +140,7 @@ class ProceedingsMaterialTypeNameAdmin(NameAdmin): admin.site.register(AgendaFilterTypeName, NameAdmin) admin.site.register(AgendaTypeName, NameAdmin) admin.site.register(AppealArtifactTypeName, NameAdmin) +admin.site.register(AttendanceTypeName, NameAdmin) admin.site.register(BallotPositionName, NameAdmin) admin.site.register(ConstraintName, NameAdmin) admin.site.register(ContinentName, NameAdmin) @@ -158,6 +162,7 @@ class ProceedingsMaterialTypeNameAdmin(NameAdmin): admin.site.register(LiaisonStatementTagName, NameAdmin) admin.site.register(MeetingTypeName, NameAdmin) admin.site.register(NomineePositionStateName, NameAdmin) +admin.site.register(RegistrationTicketTypeName, NameAdmin) admin.site.register(ReviewRequestStateName, NameAdmin) admin.site.register(ReviewAssignmentStateName, NameAdmin) admin.site.register(ReviewResultName, NameAdmin) diff --git a/ietf/name/fixtures/names.json b/ietf/name/fixtures/names.json index 4dfe1574a2..64e26e503a 100644 --- a/ietf/name/fixtures/names.json +++ b/ietf/name/fixtures/names.json @@ -312,7 +312,7 @@ "order": 42, "slug": "watching", "type": "draft-iesg", - "used": true + "used": false }, "model": "doc.state", "pk": 11 @@ -650,7 +650,7 @@ }, { "fields": { - "desc": "
    4.2.1. Call for Adoption by WG Issued\r\n\r\n The \"Call for Adoption by WG Issued\" state should be used to indicate when an I-D is being considered for adoption by an IETF WG. An I-D that is in this state is actively being considered for adoption and has not yet achieved consensus, preference, or selection in the WG.\r\n\r\n This state may be used to describe an I-D that someone has asked a WG to consider for adoption, if the WG Chair has agreed with the request. This state may also be used to identify an I-D that a WG Chair asked an author to write specifically for consideration as a candidate WG item [WGDTSPEC], and/or an I-D that is listed as a 'candidate draft' in the WG's charter.\r\n\r\n Under normal conditions, it should not be possible for an I-D to be in the \"Call for Adoption by WG Issued\" state in more than one working group at the same time. This said, it is not uncommon for authors to \"shop\" their I-Ds to more than one WG at a time, with the hope of getting their documents adopted somewhere.\r\n\r\n After this state is implemented in the Datatracker, an I-D that is in the \"Call for Adoption by WG Issued\" state will not be able to be \"shopped\" to any other WG without the consent of the WG Chairs and the responsible ADs impacted by the shopping.\r\n\r\n Note that Figure 1 includes an arc leading from this state to outside of the WG state machine. This illustrates that some I-Ds that are considered do not get adopted as WG drafts. An I-D that is not adopted as a WG draft will transition out of the WG state machine and revert back to having no stream-specific state; however, the status change history log of the I-D will record that the I-D was previously in the \"Call for Adoption by WG Issued\" state.", + "desc": "A call for adoption of the individual submission document has been issued by the Working Group (WG) chairs. This call is still running but the WG has not yet reached consensus for adoption.", "name": "Call For Adoption By WG Issued", "next_states": [ 36, @@ -666,7 +666,7 @@ }, { "fields": { - "desc": "4.2.2. Adopted by a WG\r\n\r\n The \"Adopted by a WG\" state describes an individual submission I-D that an IETF WG has agreed to adopt as one of its WG drafts.\r\n\r\n WG Chairs who use this state will be able to clearly indicate when their WGs adopt individual submission I-Ds. This will facilitate the Datatracker's ability to correctly capture \"Replaces\" information for WG drafts and correct \"Replaced by\" information for individual submission I-Ds that have been replaced by WG drafts.\r\n\r\n This state is needed because the Datatracker uses the filename of an I-D as a key to search its database for status information about the I-D, and because the filename of a WG I-D is supposed to be different from the filename of an individual submission I-D. The filename of an individual submission I-D will typically be formatted as 'draft-author-wgname-topic-nn'.\r\n\r\n The filename of a WG document is supposed to be formatted as 'draft- ietf-wgname-topic-nn'.\r\n\r\n An individual I-D that is adopted by a WG may take weeks or months to be resubmitted by the author as a new (version-00) WG draft. If the \"Adopted by a WG\" state is not used, the Datatracker has no way to determine that an I-D has been adopted until a new version of the I-D is submitted to the WG by the author and until the I-D is approved for posting by a WG Chair.", + "desc": "The individual submission document has been adopted by the Working Group (WG), but a WG document replacing this document with the typical naming convention of 'draft- ietf-wgname-topic-nn' has not yet been submitted.", "name": "Adopted by a WG", "next_states": [ 38 @@ -681,7 +681,7 @@ }, { "fields": { - "desc": "4.2.3. Adopted for WG Info Only\r\n\r\n The \"Adopted for WG Info Only\" state describes a document that contains useful information for the WG that adopted it, but the document is not intended to be published as an RFC. The WG will not actively develop the contents of the I-D or progress it for publication as an RFC. The only purpose of the I-D is to provide information for internal use by the WG.", + "desc": "The document is adopted by the Working Group (WG) for its internal use. The WG has decided that it will not pursue publication of it as an RFC.", "name": "Adopted for WG Info Only", "next_states": [], "order": 3, @@ -694,7 +694,7 @@ }, { "fields": { - "desc": "4.2.4. WG Document\r\n\r\n The \"WG Document\" state describes an I-D that has been adopted by an IETF WG and is being actively developed.\r\n\r\n A WG Chair may transition an I-D into the \"WG Document\" state at any time as long as the I-D is not being considered or developed in any other WG.\r\n\r\n Alternatively, WG Chairs may rely upon new functionality to be added to the Datatracker to automatically move version-00 drafts into the \"WG Document\" state as described in Section 4.1.\r\n\r\n Under normal conditions, it should not be possible for an I-D to be in the \"WG Document\" state in more than one WG at a time. This said, I-Ds may be transferred from one WG to another with the consent of the WG Chairs and the responsible ADs.", + "desc": "The document has been adopted by the Working Group (WG) and is under development. A document can only be adopted by one WG at a time. However, a document may be transferred between WGs.", "name": "WG Document", "next_states": [ 39, @@ -712,7 +712,7 @@ }, { "fields": { - "desc": "4.2.5. Parked WG Document\r\n\r\n A \"Parked WG Document\" is an I-D that has lost its author or editor, is waiting for another document to be written or for a review to be completed, or cannot be progressed by the working group for some other reason.\r\n\r\n Some of the annotation tags described in Section 4.3 may be used in conjunction with this state to indicate why an I-D has been parked, and/or what may need to happen for the I-D to be un-parked.\r\n\r\n Parking a WG draft will not prevent it from expiring; however, this state can be used to indicate why the I-D has stopped progressing in the WG.\r\n\r\n A \"Parked WG Document\" that is not expired may be transferred from one WG to another with the consent of the WG Chairs and the responsible ADs.", + "desc": "The Working Group (WG) document is in a temporary state where it will not be actively developed. The reason for the pause is explained via a datatracker comments section.", "name": "Parked WG Document", "next_states": [ 38 @@ -727,7 +727,7 @@ }, { "fields": { - "desc": "4.2.6. Dead WG Document\r\n\r\n A \"Dead WG Document\" is an I-D that has been abandoned. Note that 'Dead' is not always a final state for a WG I-D. If consensus is subsequently achieved, a \"Dead WG Document\" may be resurrected. A \"Dead WG Document\" that is not resurrected will eventually expire.\r\n\r\n Note that an I-D that is declared to be \"Dead\" in one WG and that is not expired may be transferred to a non-dead state in another WG with the consent of the WG Chairs and the responsible ADs.", + "desc": "The Working Group (WG) document has been abandoned by the WG. No further development is planned in this WG. A decision to resume work on this document and move it out of this state is possible.", "name": "Dead WG Document", "next_states": [ 38 @@ -742,7 +742,7 @@ }, { "fields": { - "desc": "4.2.7. In WG Last Call\r\n\r\n A document \"In WG Last Call\" is an I-D for which a WG Last Call (WGLC) has been issued and is in progress.\r\n\r\n Note that conducting a WGLC is an optional part of the IETF WG process, per Section 7.4 of RFC 2418 [RFC2418].\r\n\r\n If a WG Chair decides to conduct a WGLC on an I-D, the \"In WG Last Call\" state can be used to track the progress of the WGLC. The Chair may configure the Datatracker to send a WGLC message to one or more mailing lists when the Chair moves the I-D into this state. The WG Chair may also be able to select a different set of mailing lists for a different document undergoing a WGLC; some documents may deserve coordination with other WGs.\r\n\r\n A WG I-D in this state should remain \"In WG Last Call\" until the WG Chair moves it to another state. The WG Chair may configure the Datatracker to send an e-mail after a specified period of time to remind or 'nudge' the Chair to conclude the WGLC and to determine the next state for the document.\r\n\r\n It is possible for one WGLC to lead into another WGLC for the same document. For example, an I-D that completed a WGLC as an \"Informational\" document may need another WGLC if a decision is taken to convert the I-D into a Standards Track document.", + "desc": "The Working Group (WG) document is currently subject to an active WG Last Call (WGLC) review per Section 7.4 of RFC2418.", "name": "In WG Last Call", "next_states": [ 38, @@ -759,7 +759,7 @@ }, { "fields": { - "desc": "4.2.8. Waiting for WG Chair Go-Ahead\r\n\r\n A WG Chair may wish to place an I-D that receives a lot of comments during a WGLC into the \"Waiting for WG Chair Go-Ahead\" state. This state describes an I-D that has undergone a WGLC; however, the Chair is not yet ready to call consensus on the document.\r\n\r\n If comments from the WGLC need to be responded to, or a revision to the I-D is needed, the Chair may place an I-D into this state until all of the WGLC comments are adequately addressed and the (possibly revised) document is in the I-D repository.", + "desc": "The Working Group (WG) document has completed Working Group Last Call (WGLC), but the WG chair(s) are not yet ready to call consensus on the document. The reasons for this may include comments from the WGLC need to be responded to, or a revision to the document is needed", "name": "Waiting for WG Chair Go-Ahead", "next_states": [ 41, @@ -775,7 +775,7 @@ }, { "fields": { - "desc": "4.2.9. WG Consensus: Waiting for Writeup\r\n\r\n A document in the \"WG Consensus: Waiting for Writeup\" state has essentially completed its development within the working group, and is nearly ready to be sent to the IESG for publication. The last thing to be done is the preparation of a protocol writeup by a Document Shepherd. The IESG requires that a document shepherd writeup be completed before publication of the I-D is requested. The IETF document shepherding process and the role of a WG Document Shepherd is described in RFC 4858 [RFC4858]\r\n\r\n A WG Chair may call consensus on an I-D without a formal WGLC and transition an I-D that was in the \"WG Document\" state directly into this state.\r\n\r\n The name of this state includes the words \"Waiting for Writeup\" because a good document shepherd writeup takes time to prepare.", + "desc": "The Working Group (WG) document has consensus to proceed to publication. However, the document is waiting for a document shepherd write-up per RFC4858.", "name": "WG Consensus: Waiting for Write-Up", "next_states": [ 44 @@ -790,7 +790,7 @@ }, { "fields": { - "desc": "4.2.10. Submitted to IESG for Publication\r\n\r\n This state describes a WG document that has been submitted to the IESG for publication and that has not been sent back to the working group for revision.\r\n\r\n An I-D in this state may be under review by the IESG, it may have been approved and be in the RFC Editor's queue, or it may have been published as an RFC. Other possibilities exist too. The document may be \"Dead\" (in the IESG state machine) or in a \"Do Not Publish\" state.", + "desc": "The Working Group (WG) document has left the WG and been submitted to the Internet Engineering Steering Group (IESG) for evaluation and publication. See the “IESG State” or “RFC Editor State” for further details on the state of the document.", "name": "Submitted to IESG for Publication", "next_states": [ 38 @@ -2020,7 +2020,7 @@ }, { "fields": { - "desc": "The document has been marked as a candidate for WG adoption by the WG Chair. This state can be used before a call for adoption is issued (and the document is put in the \"Call For Adoption By WG Issued\" state), to indicate that the document is in the queue for a call for adoption, even if none has been issued yet.", + "desc": "The individual submission document has been marked by the Working Group (WG) chairs as a candidate for adoption by the WG, but no adoption call has been started.", "name": "Candidate for WG Adoption", "next_states": [ 35 @@ -2152,7 +2152,7 @@ }, { "fields": { - "desc": "In some areas, it can be desirable to wait for multiple interoperable implementations before progressing a draft to be an RFC, and in some WGs this is required. This state should be entered after WG Last Call has completed.", + "desc": "The progression of this Working Group (WG) document towards publication is paused as it awaits implementation. The process governing the approach to implementations is WG-specific.", "name": "Waiting for Implementation", "next_states": [], "order": 8, @@ -2165,7 +2165,7 @@ }, { "fields": { - "desc": "Held by WG, see document history for details.", + "desc": "Held by Working Group (WG) chairs for administrative reasons. See document history for details.", "name": "Held by WG", "next_states": [], "order": 9, @@ -2565,6 +2565,97 @@ "model": "doc.state", "pk": 176 }, + { + "fields": { + "desc": "", + "name": "Published", + "next_states": [], + "order": 1, + "slug": "published", + "type": "rfc", + "used": true + }, + "model": "doc.state", + "pk": 177 + }, + { + "fields": { + "desc": "", + "name": "Active", + "next_states": [], + "order": 0, + "slug": "active", + "type": "narrativeminutes", + "used": true + }, + "model": "doc.state", + "pk": 178 + }, + { + "fields": { + "desc": "", + "name": "Deleted", + "next_states": [], + "order": 1, + "slug": "deleted", + "type": "narrativeminutes", + "used": true + }, + "model": "doc.state", + "pk": 179 + }, + { + "fields": { + "desc": "The editorial stream processing of this document is complete and it has been sent to the RFC Editor for publication. The document may be in the RFC Editor's queue, or it may have been published as an RFC; this state doesn't distinguish between different states occurring after the document has left the RSAB.", + "name": "Sent to the RFC Editor", + "next_states": [], + "order": 10, + "slug": "rfc-edit", + "type": "draft-stream-editorial", + "used": true + }, + "model": "doc.state", + "pk": 180 + }, + { + "fields": { + "desc": "The BOF request is spam", + "name": "Spam", + "next_states": [], + "order": 5, + "slug": "spam", + "type": "bofreq", + "used": true + }, + "model": "doc.state", + "pk": 181 + }, + { + "fields": { + "desc": "The statement has been marked historic", + "name": "Historic", + "next_states": [], + "order": 0, + "slug": "historic", + "type": "statement", + "used": false + }, + "model": "doc.state", + "pk": 182 + }, + { + "fields": { + "desc": "The statement is no longer active", + "name": "Inactive", + "next_states": [], + "order": 0, + "slug": "inactive", + "type": "statement", + "used": true + }, + "model": "doc.state", + "pk": 183 + }, { "fields": { "label": "State" @@ -2572,6 +2663,13 @@ "model": "doc.statetype", "pk": "agenda" }, + { + "fields": { + "label": "bcp state" + }, + "model": "doc.statetype", + "pk": "bcp" + }, { "fields": { "label": "State" @@ -2614,13 +2712,6 @@ "model": "doc.statetype", "pk": "draft" }, - { - "fields": { - "label": "IANA state" - }, - "model": "doc.statetype", - "pk": "draft-iana" - }, { "fields": { "label": "IANA Action state" @@ -2691,6 +2782,13 @@ "model": "doc.statetype", "pk": "draft-stream-ise" }, + { + "fields": { + "label": "fyi state" + }, + "model": "doc.statetype", + "pk": "fyi" + }, { "fields": { "label": "State" @@ -2712,6 +2810,13 @@ "model": "doc.statetype", "pk": "minutes" }, + { + "fields": { + "label": "State" + }, + "model": "doc.statetype", + "pk": "narrativeminutes" + }, { "fields": { "label": "State" @@ -2747,6 +2852,13 @@ "model": "doc.statetype", "pk": "review" }, + { + "fields": { + "label": "State" + }, + "model": "doc.statetype", + "pk": "rfc" + }, { "fields": { "label": "Shepherd's Writeup State" @@ -2775,11 +2887,20 @@ "model": "doc.statetype", "pk": "statement" }, + { + "fields": { + "label": "std state" + }, + "model": "doc.statetype", + "pk": "std" + }, { "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"chair\"\n]", + "admin_roles": [ + "chair" + ], "agenda_filter_type": "special", "agenda_type": "ietf", "create_wiki": true, @@ -2787,10 +2908,24 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"matman\",\n \"ad\",\n \"chair\",\n \"lead\",\n \"delegate\"\n]", - "docman_roles": "[\n \"chair\"\n]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[\n \"chair\",\n \"lead\",\n \"delegate\"\n]", + "default_used_roles": [ + "matman", + "ad", + "chair", + "lead", + "delegate" + ], + "docman_roles": [ + "chair" + ], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [ + "chair", + "lead", + "delegate" + ], "has_chartering_process": false, "has_default_chat": true, "has_documents": false, @@ -2800,15 +2935,29 @@ "has_reviews": false, "has_session_materials": true, "is_schedulable": true, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"chair\",\n \"lead\",\n \"delegate\",\n \"matman\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "chair", + "lead", + "delegate", + "matman" + ], "need_parent": false, "parent_types": [ "ietf" ], "req_subm_approval": true, - "role_order": "[\n \"chair\",\n \"lead\",\n \"delegate\",\n \"matman\"\n]", - "session_purposes": "[\n \"presentation\"\n]", + "role_order": [ + "chair", + "lead", + "delegate", + "matman" + ], + "session_purposes": [ + "presentation" + ], "show_on_agenda": true }, "model": "group.groupfeatures", @@ -2818,7 +2967,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"chair\"\n]", + "admin_roles": [ + "chair" + ], "agenda_filter_type": "heading", "agenda_type": "ietf", "create_wiki": false, @@ -2826,10 +2977,19 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"member\",\n \"chair\"\n]", - "docman_roles": "[\n \"chair\"\n]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[\n \"chair\"\n]", + "default_used_roles": [ + "member", + "chair" + ], + "docman_roles": [ + "chair" + ], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [ + "chair" + ], "has_chartering_process": false, "has_default_chat": false, "has_documents": false, @@ -2839,13 +2999,22 @@ "has_reviews": false, "has_session_materials": false, "is_schedulable": true, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"chair\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "chair" + ], "need_parent": false, "parent_types": [], "req_subm_approval": false, - "role_order": "[\n \"chair\"\n]", - "session_purposes": "[\n \"closed_meeting\",\n \"officehours\"\n]", + "role_order": [ + "chair" + ], + "session_purposes": [ + "closed_meeting", + "officehours" + ], "show_on_agenda": true }, "model": "group.groupfeatures", @@ -2855,7 +3024,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": true, - "admin_roles": "[\n \"chair\"\n]", + "admin_roles": [ + "chair" + ], "agenda_filter_type": "normal", "agenda_type": "ietf", "create_wiki": true, @@ -2863,10 +3034,26 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"ad\",\n \"chair\",\n \"secr\",\n \"delegate\"\n]", - "docman_roles": "[\n \"chair\",\n \"delegate\",\n \"secr\"\n]", - "groupman_authroles": "[\n \"Secretariat\",\n \"Area Director\"\n]", - "groupman_roles": "[\n \"ad\",\n \"chair\",\n \"delegate\"\n]", + "default_used_roles": [ + "ad", + "chair", + "secr", + "delegate" + ], + "docman_roles": [ + "chair", + "delegate", + "secr" + ], + "groupman_authroles": [ + "Secretariat", + "Area Director" + ], + "groupman_roles": [ + "ad", + "chair", + "delegate" + ], "has_chartering_process": false, "has_default_chat": false, "has_documents": true, @@ -2876,16 +3063,28 @@ "has_reviews": false, "has_session_materials": true, "is_schedulable": true, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"ad\",\n \"chair\",\n \"delegate\",\n \"secr\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "ad", + "chair", + "delegate", + "secr" + ], "need_parent": false, "parent_types": [ "area", "ietf" ], "req_subm_approval": true, - "role_order": "[\n \"chair\",\n \"secr\"\n]", - "session_purposes": "[\n \"regular\"\n]", + "role_order": [ + "chair", + "secr" + ], + "session_purposes": [ + "regular" + ], "show_on_agenda": true }, "model": "group.groupfeatures", @@ -2895,7 +3094,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"ad\"\n]", + "admin_roles": [ + "ad" + ], "agenda_filter_type": "heading", "agenda_type": "ietf", "create_wiki": true, @@ -2903,10 +3104,22 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"ad\",\n \"liaison_contact\",\n \"liaison_cc_contact\"\n]", - "docman_roles": "[\n \"ad\",\n \"delegate\",\n \"secr\"\n]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[\n \"ad\"\n]", + "default_used_roles": [ + "ad", + "liaison_contact", + "liaison_cc_contact" + ], + "docman_roles": [ + "ad", + "delegate", + "secr" + ], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [ + "ad" + ], "has_chartering_process": false, "has_default_chat": false, "has_documents": false, @@ -2916,15 +3129,27 @@ "has_reviews": false, "has_session_materials": false, "is_schedulable": false, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"ad\",\n \"chair\",\n \"delegate\",\n \"secr\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "ad", + "chair", + "delegate", + "secr" + ], "need_parent": true, "parent_types": [ "ietf" ], "req_subm_approval": true, - "role_order": "[\n \"chair\",\n \"secr\"\n]", - "session_purposes": "[\n \"regular\"\n]", + "role_order": [ + "chair", + "secr" + ], + "session_purposes": [ + "regular" + ], "show_on_agenda": false }, "model": "group.groupfeatures", @@ -2934,7 +3159,10 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"chair\",\n \"secr\"\n]", + "admin_roles": [ + "chair", + "secr" + ], "agenda_filter_type": "special", "agenda_type": "ad", "create_wiki": true, @@ -2942,10 +3170,25 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"ad\",\n \"chair\",\n \"reviewer\",\n \"secr\",\n \"delegate\"\n]", - "docman_roles": "[\n \"chair\"\n]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[\n \"ad\",\n \"secr\",\n \"delegate\",\n \"chair\"\n]", + "default_used_roles": [ + "ad", + "chair", + "reviewer", + "secr", + "delegate" + ], + "docman_roles": [ + "chair" + ], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [ + "ad", + "secr", + "delegate", + "chair" + ], "has_chartering_process": false, "has_default_chat": false, "has_documents": false, @@ -2955,15 +3198,31 @@ "has_reviews": false, "has_session_materials": true, "is_schedulable": false, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"ad\",\n \"chair\",\n \"delegate\",\n \"secr\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "ad", + "chair", + "delegate", + "secr" + ], "need_parent": true, "parent_types": [ "area" ], "req_subm_approval": true, - "role_order": "[\n \"chair\",\n \"secr\"\n]", - "session_purposes": "[\n \"open_meeting\",\n \"presentation\",\n \"regular\",\n \"social\",\n \"tutorial\"\n]", + "role_order": [ + "chair", + "secr" + ], + "session_purposes": [ + "open_meeting", + "presentation", + "regular", + "social", + "tutorial" + ], "show_on_agenda": false }, "model": "group.groupfeatures", @@ -2973,7 +3232,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"chair\"\n]", + "admin_roles": [ + "chair" + ], "agenda_filter_type": "normal", "agenda_type": "ietf", "create_wiki": false, @@ -2981,10 +3242,19 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"chair\",\n \"member\"\n]", - "docman_roles": "[\n \"chair\"\n]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[\n \"chair\"\n]", + "default_used_roles": [ + "chair", + "member" + ], + "docman_roles": [ + "chair" + ], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [ + "chair" + ], "has_chartering_process": false, "has_default_chat": true, "has_documents": false, @@ -2994,13 +3264,23 @@ "has_reviews": false, "has_session_materials": true, "is_schedulable": true, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"chair\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "chair" + ], "need_parent": false, "parent_types": [], "req_subm_approval": false, - "role_order": "[\n \"chair\",\n \"member\"\n]", - "session_purposes": "[\n \"officehours\",\n \"regular\"\n]", + "role_order": [ + "chair", + "member" + ], + "session_purposes": [ + "officehours", + "regular" + ], "show_on_agenda": true }, "model": "group.groupfeatures", @@ -3010,7 +3290,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": true, - "admin_roles": "[\n \"chair\"\n]", + "admin_roles": [ + "chair" + ], "agenda_filter_type": "normal", "agenda_type": "ietf", "create_wiki": false, @@ -3018,10 +3300,18 @@ "customize_workflow": true, "default_parent": "", "default_tab": "ietf.group.views.group_documents", - "default_used_roles": "[\n \"chair\"\n]", - "docman_roles": "[\n \"chair\"\n]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[\n \"chair\"\n]", + "default_used_roles": [ + "chair" + ], + "docman_roles": [ + "chair" + ], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [ + "chair" + ], "has_chartering_process": false, "has_default_chat": true, "has_documents": true, @@ -3031,15 +3321,23 @@ "has_reviews": false, "has_session_materials": true, "is_schedulable": true, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"chair\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "chair" + ], "need_parent": false, "parent_types": [ "rfcedtyp" ], "req_subm_approval": true, - "role_order": "[\n \"chair\"\n]", - "session_purposes": "[\n \"regular\"\n]", + "role_order": [ + "chair" + ], + "session_purposes": [ + "regular" + ], "show_on_agenda": true }, "model": "group.groupfeatures", @@ -3049,7 +3347,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"chair\"\n]", + "admin_roles": [ + "chair" + ], "agenda_filter_type": "normal", "agenda_type": "ietf", "create_wiki": false, @@ -3057,10 +3357,16 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"chair\"\n]", - "docman_roles": "[\n \"chair\"\n]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[]", + "default_used_roles": [ + "chair" + ], + "docman_roles": [ + "chair" + ], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [], "has_chartering_process": false, "has_default_chat": false, "has_documents": false, @@ -3070,15 +3376,26 @@ "has_reviews": false, "has_session_materials": false, "is_schedulable": false, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"chair\",\n \"delegate\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "chair", + "delegate" + ], "need_parent": false, "parent_types": [ "ietf" ], "req_subm_approval": true, - "role_order": "[\n \"chair\",\n \"secr\"\n]", - "session_purposes": "[\n \"closed_meeting\",\n \"regular\"\n]", + "role_order": [ + "chair", + "secr" + ], + "session_purposes": [ + "closed_meeting", + "regular" + ], "show_on_agenda": true }, "model": "group.groupfeatures", @@ -3088,7 +3405,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"lead\"\n]", + "admin_roles": [ + "lead" + ], "agenda_filter_type": "none", "agenda_type": "ad", "create_wiki": false, @@ -3096,10 +3415,27 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"member\",\n \"chair\",\n \"lead\",\n \"delegate\"\n]", - "docman_roles": "[\n \"lead\",\n \"chair\",\n \"secr\"\n]", - "groupman_authroles": "[\n \"Secretariat\",\n \"IAB\"\n]", - "groupman_roles": "[\n \"lead\",\n \"chair\",\n \"secr\",\n \"delegate\"\n]", + "default_used_roles": [ + "member", + "chair", + "lead", + "delegate" + ], + "docman_roles": [ + "lead", + "chair", + "secr" + ], + "groupman_authroles": [ + "Secretariat", + "IAB" + ], + "groupman_roles": [ + "lead", + "chair", + "secr", + "delegate" + ], "has_chartering_process": false, "has_default_chat": false, "has_documents": true, @@ -3109,15 +3445,29 @@ "has_reviews": false, "has_session_materials": false, "is_schedulable": false, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"lead\",\n \"chair\",\n \"secr\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "lead", + "chair", + "secr" + ], "need_parent": false, "parent_types": [ "ietf" ], "req_subm_approval": false, - "role_order": "[\n \"lead\",\n \"chair\",\n \"secr\"\n]", - "session_purposes": "[\n \"closed_meeting\",\n \"officehours\",\n \"open_meeting\"\n]", + "role_order": [ + "lead", + "chair", + "secr" + ], + "session_purposes": [ + "closed_meeting", + "officehours", + "open_meeting" + ], "show_on_agenda": false }, "model": "group.groupfeatures", @@ -3127,7 +3477,71 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"chair\"\n]", + "admin_roles": [ + "chair" + ], + "agenda_filter_type": "none", + "agenda_type": "ietf", + "create_wiki": false, + "custom_group_roles": false, + "customize_workflow": false, + "default_parent": "iab", + "default_tab": "ietf.group.views.group_about", + "default_used_roles": [], + "docman_roles": [ + "ad", + "chair", + "delegate", + "secr" + ], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [ + "ad", + "chair" + ], + "has_chartering_process": false, + "has_default_chat": true, + "has_documents": true, + "has_meetings": true, + "has_milestones": false, + "has_nonsession_materials": false, + "has_reviews": false, + "has_session_materials": true, + "is_schedulable": false, + "material_types": [ + "slides" + ], + "matman_roles": [ + "ad", + "chair", + "delegate", + "secr" + ], + "need_parent": true, + "parent_types": [ + "ietf" + ], + "req_subm_approval": false, + "role_order": [ + "chair", + "secr", + "member" + ], + "session_purposes": "[\"regular\"]", + "show_on_agenda": false + }, + "model": "group.groupfeatures", + "pk": "iabworkshop" + }, + { + "fields": { + "about_page": "ietf.group.views.group_about", + "acts_like_wg": false, + "admin_roles": [ + "chair" + ], "agenda_filter_type": "none", "agenda_type": "ietf", "create_wiki": false, @@ -3135,10 +3549,18 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"auth\"\n]", - "docman_roles": "[\n \"chair\"\n]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[\n \"chair\"\n]", + "default_used_roles": [ + "auth" + ], + "docman_roles": [ + "chair" + ], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [ + "chair" + ], "has_chartering_process": false, "has_default_chat": false, "has_documents": false, @@ -3148,13 +3570,21 @@ "has_reviews": false, "has_session_materials": false, "is_schedulable": false, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"chair\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "chair" + ], "need_parent": false, "parent_types": [], "req_subm_approval": false, - "role_order": "[\n \"chair\"\n]", - "session_purposes": "[\n \"officehours\"\n]", + "role_order": [ + "chair" + ], + "session_purposes": [ + "officehours" + ], "show_on_agenda": false }, "model": "group.groupfeatures", @@ -3164,7 +3594,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"chair\"\n]", + "admin_roles": [ + "chair" + ], "agenda_filter_type": "none", "agenda_type": "ad", "create_wiki": false, @@ -3172,10 +3604,19 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"delegate\"\n]", - "docman_roles": "[\n \"chair\"\n]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[\n \"chair\",\n \"delegate\"\n]", + "default_used_roles": [ + "delegate" + ], + "docman_roles": [ + "chair" + ], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [ + "chair", + "delegate" + ], "has_chartering_process": false, "has_default_chat": false, "has_documents": false, @@ -3185,13 +3626,24 @@ "has_reviews": false, "has_session_materials": false, "is_schedulable": false, - "material_types": "\"[]\"", - "matman_roles": "[\n \"chair\",\n \"delegate\",\n \"member\"\n]", + "material_types": "[]", + "matman_roles": [ + "chair", + "delegate", + "member" + ], "need_parent": false, "parent_types": [], "req_subm_approval": true, - "role_order": "[\n \"chair\",\n \"delegate\",\n \"member\"\n]", - "session_purposes": "[\n \"closed_meeting\",\n \"open_meeting\"\n]", + "role_order": [ + "chair", + "delegate", + "member" + ], + "session_purposes": [ + "closed_meeting", + "open_meeting" + ], "show_on_agenda": false }, "model": "group.groupfeatures", @@ -3201,7 +3653,10 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"chair\",\n \"lead\"\n]", + "admin_roles": [ + "chair", + "lead" + ], "agenda_filter_type": "heading", "agenda_type": "ietf", "create_wiki": false, @@ -3209,10 +3664,26 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"ad\",\n \"member\",\n \"comdir\",\n \"delegate\",\n \"execdir\",\n \"recman\",\n \"secr\",\n \"trac-editor\",\n \"trac-admin\",\n \"chair\"\n]", - "docman_roles": "[\n \"chair\"\n]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[\n \"chair\",\n \"delegate\"\n]", + "default_used_roles": [ + "ad", + "member", + "comdir", + "delegate", + "execdir", + "recman", + "secr", + "chair" + ], + "docman_roles": [ + "chair" + ], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [ + "chair", + "delegate" + ], "has_chartering_process": false, "has_default_chat": false, "has_documents": false, @@ -3222,15 +3693,29 @@ "has_reviews": false, "has_session_materials": true, "is_schedulable": false, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"chair\",\n \"delegate\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "chair", + "delegate" + ], "need_parent": false, "parent_types": [ "ietf" ], "req_subm_approval": true, - "role_order": "[\n \"chair\",\n \"secr\"\n]", - "session_purposes": "[\n \"admin\",\n \"plenary\",\n \"presentation\",\n \"social\",\n \"officehours\"\n]", + "role_order": [ + "chair", + "secr" + ], + "session_purposes": [ + "admin", + "plenary", + "presentation", + "social", + "officehours" + ], "show_on_agenda": false }, "model": "group.groupfeatures", @@ -3240,7 +3725,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"chair\"\n]", + "admin_roles": [ + "chair" + ], "agenda_filter_type": "none", "agenda_type": "ad", "create_wiki": false, @@ -3248,10 +3735,16 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"ad\"\n]", - "docman_roles": "[\n \"auth\"\n]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[]", + "default_used_roles": [ + "ad" + ], + "docman_roles": [ + "auth" + ], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [], "has_chartering_process": false, "has_default_chat": false, "has_documents": false, @@ -3261,15 +3754,20 @@ "has_reviews": false, "has_session_materials": false, "is_schedulable": false, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[]", + "material_types": [ + "slides" + ], + "matman_roles": [], "need_parent": true, "parent_types": [ "area" ], "req_subm_approval": false, - "role_order": "[\n \"chair\",\n \"secr\"\n]", - "session_purposes": "[]", + "role_order": [ + "chair", + "secr" + ], + "session_purposes": [], "show_on_agenda": false }, "model": "group.groupfeatures", @@ -3279,7 +3777,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"chair\"\n]", + "admin_roles": [ + "chair" + ], "agenda_filter_type": "heading", "agenda_type": "ietf", "create_wiki": false, @@ -3287,10 +3787,20 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"member\",\n \"atlarge\",\n \"chair\",\n \"delegate\"\n]", - "docman_roles": "[]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[\n \"chair\",\n \"delegate\"\n]", + "default_used_roles": [ + "member", + "atlarge", + "chair", + "delegate" + ], + "docman_roles": [], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [ + "chair", + "delegate" + ], "has_chartering_process": false, "has_default_chat": false, "has_documents": false, @@ -3300,15 +3810,24 @@ "has_reviews": false, "has_session_materials": false, "is_schedulable": false, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"chair\",\n \"delegate\",\n \"secr\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "chair", + "delegate", + "secr" + ], "need_parent": false, "parent_types": [ "irtf" ], "req_subm_approval": true, - "role_order": "[\n \"chair\",\n \"secr\"\n]", - "session_purposes": "[]", + "role_order": [ + "chair", + "secr" + ], + "session_purposes": [], "show_on_agenda": false }, "model": "group.groupfeatures", @@ -3318,7 +3837,10 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"chair\",\n \"lead\"\n]", + "admin_roles": [ + "chair", + "lead" + ], "agenda_filter_type": "heading", "agenda_type": "ietf", "create_wiki": false, @@ -3326,10 +3848,20 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"chair\",\n \"delegate\"\n]", - "docman_roles": "[\n \"chair\"\n]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[\n \"chair\",\n \"delegate\"\n]", + "default_used_roles": [ + "chair", + "delegate" + ], + "docman_roles": [ + "chair" + ], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [ + "chair", + "delegate" + ], "has_chartering_process": false, "has_default_chat": false, "has_documents": true, @@ -3339,13 +3871,24 @@ "has_reviews": false, "has_session_materials": false, "is_schedulable": true, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"chair\",\n \"delegate\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "chair", + "delegate" + ], "need_parent": false, "parent_types": [], "req_subm_approval": true, - "role_order": "[\n \"chair\",\n \"delegate\"\n]", - "session_purposes": "[\n \"officehours\"\n]", + "role_order": [ + "chair", + "delegate" + ], + "session_purposes": [ + "officehours", + "regular" + ], "show_on_agenda": true }, "model": "group.groupfeatures", @@ -3355,7 +3898,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"chair\"\n]", + "admin_roles": [ + "chair" + ], "agenda_filter_type": "none", "agenda_type": null, "create_wiki": false, @@ -3363,10 +3908,17 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"chair\",\n \"ceo\"\n]", - "docman_roles": "[]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[\n \"chair\"\n]", + "default_used_roles": [ + "chair", + "ceo" + ], + "docman_roles": [], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [ + "chair" + ], "has_chartering_process": false, "has_default_chat": false, "has_documents": false, @@ -3376,15 +3928,27 @@ "has_reviews": false, "has_session_materials": false, "is_schedulable": false, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"chair\",\n \"secr\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "chair", + "secr" + ], "need_parent": false, "parent_types": [ "isoc" ], "req_subm_approval": true, - "role_order": "[\n \"chair\",\n \"secr\"\n]", - "session_purposes": "[\n \"officehours\",\n \"open_meeting\",\n \"presentation\"\n]", + "role_order": [ + "chair", + "secr" + ], + "session_purposes": [ + "officehours", + "open_meeting", + "presentation" + ], "show_on_agenda": false }, "model": "group.groupfeatures", @@ -3394,7 +3958,10 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"chair\",\n \"advisor\"\n]", + "admin_roles": [ + "chair", + "advisor" + ], "agenda_filter_type": "none", "agenda_type": "side", "create_wiki": true, @@ -3402,10 +3969,23 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"member\",\n \"advisor\",\n \"liaison\",\n \"chair\",\n \"techadv\"\n]", - "docman_roles": "[\n \"chair\"\n]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[\n \"chair\",\n \"advisor\"\n]", + "default_used_roles": [ + "member", + "advisor", + "liaison", + "chair", + "techadv" + ], + "docman_roles": [ + "chair" + ], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [ + "chair", + "advisor" + ], "has_chartering_process": false, "has_default_chat": false, "has_documents": false, @@ -3415,15 +3995,26 @@ "has_reviews": false, "has_session_materials": false, "is_schedulable": false, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"chair\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "chair" + ], "need_parent": false, "parent_types": [ "area" ], "req_subm_approval": true, - "role_order": "[\n \"chair\",\n \"member\",\n \"advisor\"\n]", - "session_purposes": "[\n \"closed_meeting\",\n \"officehours\"\n]", + "role_order": [ + "chair", + "member", + "advisor" + ], + "session_purposes": [ + "closed_meeting", + "officehours" + ], "show_on_agenda": false }, "model": "group.groupfeatures", @@ -3433,7 +4024,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"lead\"\n]", + "admin_roles": [ + "lead" + ], "agenda_filter_type": "normal", "agenda_type": "ad", "create_wiki": false, @@ -3441,10 +4034,27 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"member\",\n \"chair\",\n \"lead\",\n \"delegate\"\n]", - "docman_roles": "[\n \"lead\",\n \"chair\",\n \"secr\"\n]", - "groupman_authroles": "[\n \"Secretariat\",\n \"IAB\"\n]", - "groupman_roles": "[\n \"lead\",\n \"chair\",\n \"secr\",\n \"delegate\"\n]", + "default_used_roles": [ + "member", + "chair", + "lead", + "delegate" + ], + "docman_roles": [ + "lead", + "chair", + "secr" + ], + "groupman_authroles": [ + "Secretariat", + "IAB" + ], + "groupman_roles": [ + "lead", + "chair", + "secr", + "delegate" + ], "has_chartering_process": false, "has_default_chat": false, "has_documents": true, @@ -3454,15 +4064,28 @@ "has_reviews": false, "has_session_materials": true, "is_schedulable": false, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"lead\",\n \"chair\",\n \"secr\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "lead", + "chair", + "secr" + ], "need_parent": false, "parent_types": [ "ietf" ], "req_subm_approval": false, - "role_order": "[\n \"lead\",\n \"chair\",\n \"secr\"\n]", - "session_purposes": "[\n \"regular\",\n \"tutorial\"\n]", + "role_order": [ + "lead", + "chair", + "secr" + ], + "session_purposes": [ + "regular", + "tutorial" + ], "show_on_agenda": false }, "model": "group.groupfeatures", @@ -3472,7 +4095,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": true, - "admin_roles": "[\n \"chair\"\n]", + "admin_roles": [ + "chair" + ], "agenda_filter_type": "normal", "agenda_type": "ietf", "create_wiki": true, @@ -3480,10 +4105,24 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"chair\",\n \"secr\",\n \"delegate\"\n]", - "docman_roles": "[\n \"chair\",\n \"delegate\",\n \"secr\"\n]", - "groupman_authroles": "[\n \"Secretariat\",\n \"IRTF Chair\"\n]", - "groupman_roles": "[\n \"chair\",\n \"delegate\"\n]", + "default_used_roles": [ + "chair", + "secr", + "delegate" + ], + "docman_roles": [ + "chair", + "delegate", + "secr" + ], + "groupman_authroles": [ + "Secretariat", + "IRTF Chair" + ], + "groupman_roles": [ + "chair", + "delegate" + ], "has_chartering_process": false, "has_default_chat": false, "has_documents": true, @@ -3493,15 +4132,26 @@ "has_reviews": false, "has_session_materials": true, "is_schedulable": true, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"chair\",\n \"delegate\",\n \"secr\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "chair", + "delegate", + "secr" + ], "need_parent": false, "parent_types": [ "irtf" ], "req_subm_approval": true, - "role_order": "[\n \"chair\",\n \"secr\"\n]", - "session_purposes": "[\n \"regular\"\n]", + "role_order": [ + "chair", + "secr" + ], + "session_purposes": [ + "regular" + ], "show_on_agenda": true }, "model": "group.groupfeatures", @@ -3511,7 +4161,10 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"chair\",\n \"secr\"\n]", + "admin_roles": [ + "chair", + "secr" + ], "agenda_filter_type": "normal", "agenda_type": "ietf", "create_wiki": true, @@ -3519,10 +4172,24 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.review_requests", - "default_used_roles": "[\n \"ad\",\n \"chair\",\n \"reviewer\",\n \"secr\",\n \"delegate\"\n]", - "docman_roles": "[\n \"secr\"\n]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[\n \"ad\",\n \"secr\",\n \"delegate\"\n]", + "default_used_roles": [ + "ad", + "chair", + "reviewer", + "secr", + "delegate" + ], + "docman_roles": [ + "secr" + ], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [ + "ad", + "secr", + "delegate" + ], "has_chartering_process": false, "has_default_chat": false, "has_documents": false, @@ -3532,15 +4199,26 @@ "has_reviews": true, "has_session_materials": true, "is_schedulable": false, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"ad\",\n \"secr\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "ad", + "secr" + ], "need_parent": true, "parent_types": [ "area" ], "req_subm_approval": true, - "role_order": "[\n \"chair\",\n \"secr\"\n]", - "session_purposes": "[\n \"open_meeting\",\n \"social\"\n]", + "role_order": [ + "chair", + "secr" + ], + "session_purposes": [ + "open_meeting", + "social" + ], "show_on_agenda": false }, "model": "group.groupfeatures", @@ -3550,7 +4228,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"chair\"\n]", + "admin_roles": [ + "chair" + ], "agenda_filter_type": "normal", "agenda_type": "ietf", "create_wiki": false, @@ -3558,10 +4238,19 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"auth\",\n \"chair\"\n]", - "docman_roles": "[\n \"chair\"\n]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[\n \"chair\"\n]", + "default_used_roles": [ + "auth", + "chair" + ], + "docman_roles": [ + "chair" + ], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [ + "chair" + ], "has_chartering_process": false, "has_default_chat": false, "has_documents": false, @@ -3571,13 +4260,23 @@ "has_reviews": false, "has_session_materials": false, "is_schedulable": false, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"chair\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "chair" + ], "need_parent": false, "parent_types": [], "req_subm_approval": true, - "role_order": "[\n \"chair\",\n \"secr\"\n]", - "session_purposes": "[\n \"officehours\",\n \"regular\"\n]", + "role_order": [ + "chair", + "secr" + ], + "session_purposes": [ + "officehours", + "regular" + ], "show_on_agenda": false }, "model": "group.groupfeatures", @@ -3587,7 +4286,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": true, - "admin_roles": "[\n \"chair\"\n]", + "admin_roles": [ + "chair" + ], "agenda_filter_type": "normal", "agenda_type": "ietf", "create_wiki": true, @@ -3595,10 +4296,25 @@ "customize_workflow": true, "default_parent": "irtf", "default_tab": "ietf.group.views.group_documents", - "default_used_roles": "[\n \"chair\",\n \"techadv\",\n \"secr\",\n \"delegate\"\n]", - "docman_roles": "[\n \"chair\",\n \"delegate\",\n \"secr\"\n]", - "groupman_authroles": "[\n \"Secretariat\",\n \"IRTF Chair\"\n]", - "groupman_roles": "[\n \"chair\",\n \"delegate\"\n]", + "default_used_roles": [ + "chair", + "techadv", + "secr", + "delegate" + ], + "docman_roles": [ + "chair", + "delegate", + "secr" + ], + "groupman_authroles": [ + "Secretariat", + "IRTF Chair" + ], + "groupman_roles": [ + "chair", + "delegate" + ], "has_chartering_process": true, "has_default_chat": true, "has_documents": true, @@ -3608,15 +4324,27 @@ "has_reviews": false, "has_session_materials": true, "is_schedulable": true, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"chair\",\n \"delegate\",\n \"secr\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "chair", + "delegate", + "secr" + ], "need_parent": true, "parent_types": [ "irtf" ], "req_subm_approval": true, - "role_order": "[\n \"chair\",\n \"delegate\",\n \"secr\"\n]", - "session_purposes": "[\n \"regular\"\n]", + "role_order": [ + "chair", + "delegate", + "secr" + ], + "session_purposes": [ + "regular" + ], "show_on_agenda": true }, "model": "group.groupfeatures", @@ -3626,7 +4354,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"chair\"\n]", + "admin_roles": [ + "chair" + ], "agenda_filter_type": "none", "agenda_type": null, "create_wiki": false, @@ -3634,10 +4364,23 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"liaiman\",\n \"ceo\",\n \"coord\",\n \"auth\",\n \"chair\",\n \"liaison_contact\",\n \"liaison_cc_contact\"\n]", - "docman_roles": "[\n \"liaiman\",\n \"matman\"\n]", - "groupman_authroles": "[\n \"Secretariat\"\n]", - "groupman_roles": "[]", + "default_used_roles": [ + "liaiman", + "ceo", + "coord", + "auth", + "chair", + "liaison_contact", + "liaison_cc_contact" + ], + "docman_roles": [ + "liaiman", + "matman" + ], + "groupman_authroles": [ + "Secretariat" + ], + "groupman_roles": [], "has_chartering_process": false, "has_default_chat": false, "has_documents": false, @@ -3647,16 +4390,20 @@ "has_reviews": false, "has_session_materials": false, "is_schedulable": false, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[]", + "material_types": [ + "slides" + ], + "matman_roles": [], "need_parent": false, "parent_types": [ "area", "sdo" ], "req_subm_approval": true, - "role_order": "[\n \"liaiman\"\n]", - "session_purposes": "[]", + "role_order": [ + "liaiman" + ], + "session_purposes": [], "show_on_agenda": false }, "model": "group.groupfeatures", @@ -3666,7 +4413,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": false, - "admin_roles": "[\n \"chair\"\n]", + "admin_roles": [ + "chair" + ], "agenda_filter_type": "special", "agenda_type": "ietf", "create_wiki": true, @@ -3674,10 +4423,28 @@ "customize_workflow": false, "default_parent": "", "default_tab": "ietf.group.views.group_about", - "default_used_roles": "[\n \"ad\",\n \"member\",\n \"delegate\",\n \"secr\",\n \"liaison\",\n \"atlarge\",\n \"chair\",\n \"matman\",\n \"techadv\"\n]", - "docman_roles": "[\n \"chair\"\n]", - "groupman_authroles": "[\n \"Secretariat\",\n \"Area Director\"\n]", - "groupman_roles": "[\n \"chair\",\n \"delegate\"\n]", + "default_used_roles": [ + "ad", + "member", + "delegate", + "secr", + "liaison", + "atlarge", + "chair", + "matman", + "techadv" + ], + "docman_roles": [ + "chair" + ], + "groupman_authroles": [ + "Secretariat", + "Area Director" + ], + "groupman_roles": [ + "chair", + "delegate" + ], "has_chartering_process": false, "has_default_chat": false, "has_documents": false, @@ -3687,15 +4454,30 @@ "has_reviews": false, "has_session_materials": true, "is_schedulable": true, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"chair\",\n \"matman\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "chair", + "matman" + ], "need_parent": false, "parent_types": [ "area" ], "req_subm_approval": false, - "role_order": "[\n \"chair\",\n \"member\",\n \"matman\"\n]", - "session_purposes": "[\n \"coding\",\n \"presentation\",\n \"social\",\n \"tutorial\"\n]", + "role_order": [ + "chair", + "member", + "matman" + ], + "session_purposes": [ + "coding", + "open_meeting", + "presentation", + "social", + "tutorial" + ], "show_on_agenda": true }, "model": "group.groupfeatures", @@ -3705,7 +4487,9 @@ "fields": { "about_page": "ietf.group.views.group_about", "acts_like_wg": true, - "admin_roles": "[\n \"chair\"\n]", + "admin_roles": [ + "chair" + ], "agenda_filter_type": "normal", "agenda_type": "ietf", "create_wiki": true, @@ -3713,10 +4497,32 @@ "customize_workflow": true, "default_parent": "", "default_tab": "ietf.group.views.group_documents", - "default_used_roles": "[\n \"ad\",\n \"editor\",\n \"delegate\",\n \"secr\",\n \"chair\",\n \"matman\",\n \"techadv\",\n \"liaison_contact\",\n \"liaison_cc_contact\"\n]", - "docman_roles": "[\n \"chair\",\n \"delegate\",\n \"secr\"\n]", - "groupman_authroles": "[\n \"Secretariat\",\n \"Area Director\"\n]", - "groupman_roles": "[\n \"ad\",\n \"chair\",\n \"delegate\",\n \"secr\"\n]", + "default_used_roles": [ + "ad", + "editor", + "delegate", + "secr", + "chair", + "matman", + "techadv", + "liaison_contact", + "liaison_cc_contact" + ], + "docman_roles": [ + "chair", + "delegate", + "secr" + ], + "groupman_authroles": [ + "Secretariat", + "Area Director" + ], + "groupman_roles": [ + "ad", + "chair", + "delegate", + "secr" + ], "has_chartering_process": true, "has_default_chat": true, "has_documents": true, @@ -3726,15 +4532,28 @@ "has_reviews": false, "has_session_materials": true, "is_schedulable": true, - "material_types": "[\n \"slides\"\n]", - "matman_roles": "[\n \"ad\",\n \"chair\",\n \"delegate\",\n \"secr\"\n]", + "material_types": [ + "slides" + ], + "matman_roles": [ + "ad", + "chair", + "delegate", + "secr" + ], "need_parent": false, "parent_types": [ "area" ], "req_subm_approval": true, - "role_order": "[\n \"chair\",\n \"secr\",\n \"delegate\"\n]", - "session_purposes": "[\n \"regular\"\n]", + "role_order": [ + "chair", + "secr", + "delegate" + ], + "session_purposes": [ + "regular" + ], "show_on_agenda": true }, "model": "group.groupfeatures", @@ -4311,6 +5130,34 @@ "model": "mailtrigger.mailtrigger", "pk": "doc_telechat_details_changed" }, + { + "fields": { + "cc": [], + "desc": "Recipients when a working group call for adoption is issued", + "to": [ + "doc_authors", + "doc_group_chairs", + "doc_group_mail_list", + "doc_shepherd" + ] + }, + "model": "mailtrigger.mailtrigger", + "pk": "doc_wg_call_for_adoption_issued" + }, + { + "fields": { + "cc": [], + "desc": "Recipients when a working group last call is issued", + "to": [ + "doc_authors", + "doc_group_chairs", + "doc_group_mail_list", + "doc_shepherd" + ] + }, + "model": "mailtrigger.mailtrigger", + "pk": "doc_wg_last_call_issued" + }, { "fields": { "cc": [], @@ -4687,13 +5534,30 @@ "liaison_response_contacts", "liaison_technical_contacts" ], - "desc": "Recipient for a message when a new liaison statement is posted", + "desc": "Recipients for a message when a new incoming liaison statement is posted", "to": [ "liaison_to_contacts" ] }, "model": "mailtrigger.mailtrigger", - "pk": "liaison_statement_posted" + "pk": "liaison_statement_posted_incoming" + }, + { + "fields": { + "cc": [ + "liaison_cc", + "liaison_coordinators", + "liaison_from_contact", + "liaison_response_contacts", + "liaison_technical_contacts" + ], + "desc": "Recipients for a message when a new outgoing liaison statement is posted", + "to": [ + "liaison_to_contacts" + ] + }, + "model": "mailtrigger.mailtrigger", + "pk": "liaison_statement_posted_outgoing" }, { "fields": { @@ -5054,6 +5918,21 @@ "model": "mailtrigger.mailtrigger", "pk": "review_completed_httpdir_early" }, + { + "fields": { + "cc": [ + "ietf_last_call", + "review_doc_all_parties", + "review_doc_group_mail_list" + ], + "desc": "Recipients when a httpdir Last Call review is completed", + "to": [ + "review_team_mail_list" + ] + }, + "model": "mailtrigger.mailtrigger", + "pk": "review_completed_httpdir_lc" + }, { "fields": { "cc": [ @@ -5169,22 +6048,51 @@ ] }, "model": "mailtrigger.mailtrigger", - "pk": "review_completed_iotdir_early" + "pk": "review_completed_iotdir_early" + }, + { + "fields": { + "cc": [ + "ietf_last_call", + "review_doc_all_parties", + "review_doc_group_mail_list" + ], + "desc": "Recipients when a iotdir ReviewTypeName object review is completed", + "to": [ + "review_team_mail_list" + ] + }, + "model": "mailtrigger.mailtrigger", + "pk": "review_completed_iotdir_lc" + }, + { + "fields": { + "cc": [ + "ietf_last_call", + "review_doc_all_parties", + "review_doc_group_mail_list" + ], + "desc": "Recipients when a iotdir ReviewTypeName object review is completed", + "to": [ + "review_team_mail_list" + ] + }, + "model": "mailtrigger.mailtrigger", + "pk": "review_completed_iotdir_telechat" }, { "fields": { "cc": [ - "ietf_last_call", "review_doc_all_parties", "review_doc_group_mail_list" ], - "desc": "Recipients when a iotdir ReviewTypeName object review is completed", + "desc": "Recipients when a opsdir ReviewTypeName object review is completed", "to": [ "review_team_mail_list" ] }, "model": "mailtrigger.mailtrigger", - "pk": "review_completed_iotdir_lc" + "pk": "review_completed_opsdir_early" }, { "fields": { @@ -5193,17 +6101,18 @@ "review_doc_all_parties", "review_doc_group_mail_list" ], - "desc": "Recipients when a iotdir ReviewTypeName object review is completed", + "desc": "Recipients when a opsdir ReviewTypeName object review is completed", "to": [ "review_team_mail_list" ] }, "model": "mailtrigger.mailtrigger", - "pk": "review_completed_iotdir_telechat" + "pk": "review_completed_opsdir_lc" }, { "fields": { "cc": [ + "ietf_last_call", "review_doc_all_parties", "review_doc_group_mail_list" ], @@ -5213,7 +6122,7 @@ ] }, "model": "mailtrigger.mailtrigger", - "pk": "review_completed_opsdir_early" + "pk": "review_completed_opsdir_telechat" }, { "fields": { @@ -5222,13 +6131,13 @@ "review_doc_all_parties", "review_doc_group_mail_list" ], - "desc": "Recipients when a opsdir ReviewTypeName object review is completed", + "desc": "Recipients when a perfmetrdir IETF Last Call review is completed", "to": [ "review_team_mail_list" ] }, "model": "mailtrigger.mailtrigger", - "pk": "review_completed_opsdir_lc" + "pk": "review_completed_perfmetrdir_lc" }, { "fields": { @@ -5237,13 +6146,13 @@ "review_doc_all_parties", "review_doc_group_mail_list" ], - "desc": "Recipients when a opsdir ReviewTypeName object review is completed", + "desc": "Recipients when a perfmetrdir Telechat review is completed", "to": [ "review_team_mail_list" ] }, "model": "mailtrigger.mailtrigger", - "pk": "review_completed_opsdir_telechat" + "pk": "review_completed_perfmetrdir_telechat" }, { "fields": { @@ -5849,7 +6758,7 @@ { "fields": { "desc": "The document's authors", - "template": "{% if doc.type_id == \"draft\" %}<{{doc.name}}@ietf.org>{% endif %}" + "template": "{% if doc.type_id == \"draft\" or doc.type_id == \"rfc\" %}<{{doc.name}}@ietf.org>{% endif %}" }, "model": "mailtrigger.recipient", "pk": "doc_authors" @@ -6190,6 +7099,14 @@ "model": "mailtrigger.recipient", "pk": "liaison_coordinators" }, + { + "fields": { + "desc": "Email address of the formal sender of the statement", + "template": "{{liaison.from_contact}}" + }, + "model": "mailtrigger.recipient", + "pk": "liaison_from_contact" + }, { "fields": { "desc": "The assigned liaison manager for an external group ", @@ -6680,6 +7597,66 @@ "model": "name.appealartifacttypename", "pk": "response" }, + { + "fields": { + "desc": "", + "name": "ANRW Onsite", + "order": 0, + "used": true + }, + "model": "name.attendancetypename", + "pk": "anrw_onsite" + }, + { + "fields": { + "desc": "", + "name": "Hackathon Onsite", + "order": 0, + "used": true + }, + "model": "name.attendancetypename", + "pk": "hackathon_onsite" + }, + { + "fields": { + "desc": "", + "name": "Hackathon Remote", + "order": 0, + "used": true + }, + "model": "name.attendancetypename", + "pk": "hackathon_remote" + }, + { + "fields": { + "desc": "", + "name": "Onsite", + "order": 0, + "used": true + }, + "model": "name.attendancetypename", + "pk": "onsite" + }, + { + "fields": { + "desc": "", + "name": "Remote", + "order": 0, + "used": true + }, + "model": "name.attendancetypename", + "pk": "remote" + }, + { + "fields": { + "desc": "", + "name": "Unknown", + "order": 0, + "used": true + }, + "model": "name.attendancetypename", + "pk": "unknown" + }, { "fields": { "blocking": false, @@ -9998,6 +10975,17 @@ "model": "name.dbtemplatetypename", "pk": "rst" }, + { + "fields": { + "desc": "", + "name": "became RFC", + "order": 0, + "revname": "came from draft", + "used": true + }, + "model": "name.docrelationshipname", + "pk": "became_rfc" + }, { "fields": { "desc": "", @@ -10009,6 +10997,17 @@ "model": "name.docrelationshipname", "pk": "conflrev" }, + { + "fields": { + "desc": "This document contains other documents (e.g., STDs contain RFCs)", + "name": "Contains", + "order": 0, + "revname": "Is part of", + "used": true + }, + "model": "name.docrelationshipname", + "pk": "contains" + }, { "fields": { "desc": "Approval for downref", @@ -10525,6 +11524,17 @@ "model": "name.doctypename", "pk": "agenda" }, + { + "fields": { + "desc": "", + "name": "Best Current Practice", + "order": 0, + "prefix": "bcp", + "used": true + }, + "model": "name.doctypename", + "pk": "bcp" + }, { "fields": { "desc": "", @@ -10591,6 +11601,17 @@ "model": "name.doctypename", "pk": "draft" }, + { + "fields": { + "desc": "", + "name": "For Your Information", + "order": 0, + "prefix": "fyi", + "used": true + }, + "model": "name.doctypename", + "pk": "fyi" + }, { "fields": { "desc": "", @@ -10624,6 +11645,17 @@ "model": "name.doctypename", "pk": "minutes" }, + { + "fields": { + "desc": "", + "name": "Narrative Minutes", + "order": 0, + "prefix": "narrative-minutes", + "used": true + }, + "model": "name.doctypename", + "pk": "narrativeminutes" + }, { "fields": { "desc": "", @@ -10668,6 +11700,17 @@ "model": "name.doctypename", "pk": "review" }, + { + "fields": { + "desc": "", + "name": "RFC", + "order": 0, + "prefix": "rfc", + "used": true + }, + "model": "name.doctypename", + "pk": "rfc" + }, { "fields": { "desc": "", @@ -10712,6 +11755,17 @@ "model": "name.doctypename", "pk": "statement" }, + { + "fields": { + "desc": "", + "name": "Standard", + "order": 0, + "prefix": "std", + "used": true + }, + "model": "name.doctypename", + "pk": "std" + }, { "fields": { "desc": "", @@ -11046,6 +12100,17 @@ "model": "name.extresourcename", "pk": "mailing_list_archive" }, + { + "fields": { + "desc": "ORCID", + "name": "ORCID", + "order": 0, + "type": "url", + "used": true + }, + "model": "name.extresourcename", + "pk": "orcid" + }, { "fields": { "desc": "Related Implementations", @@ -11081,8 +12146,8 @@ }, { "fields": { - "desc": "Issuer Tracker", - "name": "Issuer Tracker", + "desc": "Issue Tracker", + "name": "Issue Tracker", "order": 0, "type": "url", "used": true @@ -11530,6 +12595,17 @@ "model": "name.grouptypename", "pk": "iabasg" }, + { + "fields": { + "desc": "IAB Workshop", + "name": "IAB Workshop", + "order": 0, + "used": true, + "verbose_name": "IAB Workshop" + }, + "model": "name.grouptypename", + "pk": "iabworkshop" + }, { "fields": { "desc": "", @@ -11731,7 +12807,7 @@ { "fields": { "default_offset_days": -57, - "desc": "Cut-off date for BOF proposal requests. To request a BOF, please see instructions at https://www.ietf.org/how/bofs/bof-procedures on Requesting a BOF", + "desc": "Cut-off date for BOF proposal requests. To request a __BoF__ session use the [IETF BoF Request Tool](/doc/bof-requests).", "name": "Cut-off preliminary BOF requests", "order": 0, "used": true @@ -11742,7 +12818,7 @@ { "fields": { "default_offset_days": -57, - "desc": "Preliminary BOF proposals requested. To request a BOF, please see instructions on requesting a BOF at https://www.ietf.org/how/bofs/bof-procedures/", + "desc": "Preliminary BOF proposals requested. To request a __BoF__ session use the [IETF BoF Request Tool](/doc/bof-requests).", "name": "Preliminary BOF proposals requested", "order": 0, "used": false @@ -11775,7 +12851,7 @@ { "fields": { "default_offset_days": -43, - "desc": "Cut-off date for BOF proposal requests to Area Directors at UTC 23:59", + "desc": "Cut-off date for BOF proposal requests to Area Directors at UTC 23:59. To request a __BoF__ session use the [IETF BoF Request Tool](/doc/bof-requests).", "name": "Cut-off BOF scheduling Requests", "order": 0, "used": false @@ -11819,7 +12895,7 @@ { "fields": { "default_offset_days": -43, - "desc": "Cut-off date for requests to schedule Working Group Meetings at UTC 23:59", + "desc": "Cut-off date for requests to schedule Working Group Meetings at UTC 23:59. To request a __Working Group__ session, use the [IETF Meeting Session Request Tool](/secr/sreq/).", "name": "Cut-off WG scheduling Requests", "order": 0, "used": true @@ -11874,7 +12950,7 @@ { "fields": { "default_offset_days": -12, - "desc": "Internet-Draft submission cut-off (for all Internet-Drafts, including -00) by UTC 23:59", + "desc": "Internet-Draft submission cut-off (for all Internet-Drafts, including -00) by UTC 23:59. Upload using the [I-D Submission Tool](/submit/).", "name": "I-D Cutoff", "order": 0, "used": true @@ -11907,7 +12983,7 @@ { "fields": { "default_offset_days": -82, - "desc": "IETF Online Registration Opens", + "desc": "IETF Online Registration Opens [Register Here](https://www.ietf.org/how/meetings/register/).", "name": "Registration Opens", "order": 0, "used": true @@ -11918,7 +12994,7 @@ { "fields": { "default_offset_days": -89, - "desc": "Working Group and BOF scheduling begins", + "desc": "Working Group and BOF scheduling begins. To request a Working Group session, use the [IETF Meeting Session Request Tool](/secr/sreq/). If you are working on a BOF request, it is highly recommended to tell the IESG now by sending an [email to iesg@ietf.org](mailtp:iesg@ietf.org) to get advance help with the request.", "name": "Scheduling Opens", "order": 0, "used": true @@ -12642,6 +13718,86 @@ "model": "name.proceedingsmaterialtypename", "pk": "wiki" }, + { + "fields": { + "desc": "", + "name": "ANRW Combo", + "order": 0, + "used": true + }, + "model": "name.registrationtickettypename", + "pk": "anrw_combo" + }, + { + "fields": { + "desc": "", + "name": "ANRW Only", + "order": 0, + "used": true + }, + "model": "name.registrationtickettypename", + "pk": "anrw_only" + }, + { + "fields": { + "desc": "", + "name": "Hackathon Combo", + "order": 0, + "used": true + }, + "model": "name.registrationtickettypename", + "pk": "hackathon_combo" + }, + { + "fields": { + "desc": "", + "name": "Hackathon Only", + "order": 0, + "used": true + }, + "model": "name.registrationtickettypename", + "pk": "hackathon_only" + }, + { + "fields": { + "desc": "", + "name": "One Day", + "order": 0, + "used": true + }, + "model": "name.registrationtickettypename", + "pk": "one_day" + }, + { + "fields": { + "desc": "", + "name": "Student", + "order": 0, + "used": true + }, + "model": "name.registrationtickettypename", + "pk": "student" + }, + { + "fields": { + "desc": "", + "name": "Unknown", + "order": 0, + "used": true + }, + "model": "name.registrationtickettypename", + "pk": "unknown" + }, + { + "fields": { + "desc": "", + "name": "Week Pass", + "order": 0, + "used": true + }, + "model": "name.registrationtickettypename", + "pk": "week_pass" + }, { "fields": { "desc": "The reviewer has accepted the assignment", @@ -12975,7 +14131,7 @@ { "fields": { "desc": "", - "name": "Last Call", + "name": "IETF Last Call", "order": 2, "used": true }, @@ -13152,6 +14308,16 @@ "model": "name.rolename", "pk": "lead" }, + { + "fields": { + "desc": "", + "name": "Lead Maintainer", + "order": 0, + "used": true + }, + "model": "name.rolename", + "pk": "leadmaintainer" + }, { "fields": { "desc": "", @@ -13177,7 +14343,7 @@ "desc": "", "name": "Liaison CC Contact", "order": 9, - "used": true + "used": false }, "model": "name.rolename", "pk": "liaison_cc_contact" @@ -13187,11 +14353,21 @@ "desc": "", "name": "Liaison Contact", "order": 8, - "used": true + "used": false }, "model": "name.rolename", "pk": "liaison_contact" }, + { + "fields": { + "desc": "Coordinates liaison handling for the IAB", + "name": "Liaison Coordinator", + "order": 14, + "used": true + }, + "model": "name.rolename", + "pk": "liaison_coordinator" + }, { "fields": { "desc": "", @@ -13277,7 +14453,7 @@ "desc": "Assigned permission TRAC_ADMIN in datatracker-managed Trac Wiki instances", "name": "Trac Admin", "order": 0, - "used": true + "used": false }, "model": "name.rolename", "pk": "trac-admin" @@ -13287,7 +14463,7 @@ "desc": "Provides log-in permission to restricted Trac instances. Used by the generate_apache_perms management command, called from ../../scripts/Cron-runner", "name": "Trac Editor", "order": 0, - "used": true + "used": false }, "model": "name.rolename", "pk": "trac-editor" @@ -13428,7 +14604,10 @@ "name": "Administrative", "on_agenda": true, "order": 5, - "timeslot_types": "[\n \"other\",\n \"reg\"\n]", + "timeslot_types": [ + "other", + "reg" + ], "used": true }, "model": "name.sessionpurposename", @@ -13440,7 +14619,10 @@ "name": "Closed meeting", "on_agenda": false, "order": 10, - "timeslot_types": "[\n \"other\",\n \"regular\"\n]", + "timeslot_types": [ + "other", + "regular" + ], "used": true }, "model": "name.sessionpurposename", @@ -13452,7 +14634,9 @@ "name": "Coding", "on_agenda": true, "order": 4, - "timeslot_types": "[\n \"other\"\n]", + "timeslot_types": [ + "other" + ], "used": true }, "model": "name.sessionpurposename", @@ -13464,7 +14648,7 @@ "name": "None", "on_agenda": true, "order": 0, - "timeslot_types": "[]", + "timeslot_types": [], "used": false }, "model": "name.sessionpurposename", @@ -13476,7 +14660,9 @@ "name": "Office hours", "on_agenda": true, "order": 3, - "timeslot_types": "[\n \"other\"\n]", + "timeslot_types": [ + "other" + ], "used": true }, "model": "name.sessionpurposename", @@ -13488,7 +14674,9 @@ "name": "Open meeting", "on_agenda": true, "order": 9, - "timeslot_types": "[\n \"other\"\n]", + "timeslot_types": [ + "other" + ], "used": true }, "model": "name.sessionpurposename", @@ -13500,7 +14688,9 @@ "name": "Plenary", "on_agenda": true, "order": 7, - "timeslot_types": "[\n \"plenary\"\n]", + "timeslot_types": [ + "plenary" + ], "used": true }, "model": "name.sessionpurposename", @@ -13512,7 +14702,10 @@ "name": "Presentation", "on_agenda": true, "order": 8, - "timeslot_types": "[\n \"other\",\n \"regular\"\n]", + "timeslot_types": [ + "other", + "regular" + ], "used": true }, "model": "name.sessionpurposename", @@ -13524,7 +14717,9 @@ "name": "Regular", "on_agenda": true, "order": 1, - "timeslot_types": "[\n \"regular\"\n]", + "timeslot_types": [ + "regular" + ], "used": true }, "model": "name.sessionpurposename", @@ -13536,7 +14731,10 @@ "name": "Social", "on_agenda": true, "order": 6, - "timeslot_types": "[\n \"break\",\n \"other\"\n]", + "timeslot_types": [ + "break", + "other" + ], "used": true }, "model": "name.sessionpurposename", @@ -13548,7 +14746,9 @@ "name": "Tutorial", "on_agenda": true, "order": 2, - "timeslot_types": "[\n \"other\"\n]", + "timeslot_types": [ + "other" + ], "used": true }, "model": "name.sessionpurposename", @@ -13826,7 +15026,7 @@ }, { "fields": { - "desc": "Legacy stream", + "desc": "Legacy", "name": "Legacy", "order": 6, "used": true @@ -16557,49 +17757,5 @@ }, "model": "stats.countryalias", "pk": 303 - }, - { - "fields": { - "command": "xym", - "switch": "--version", - "time": "2023-08-22T07:09:39.542Z", - "used": true, - "version": "xym 0.7.0" - }, - "model": "utils.versioninfo", - "pk": 1 - }, - { - "fields": { - "command": "pyang", - "switch": "--version", - "time": "2023-08-22T07:09:39.881Z", - "used": true, - "version": "pyang 2.5.3" - }, - "model": "utils.versioninfo", - "pk": 2 - }, - { - "fields": { - "command": "yanglint", - "switch": "--version", - "time": "2023-08-22T07:09:39.899Z", - "used": true, - "version": "yanglint SO 1.9.2" - }, - "model": "utils.versioninfo", - "pk": 3 - }, - { - "fields": { - "command": "xml2rfc", - "switch": "--version", - "time": "2023-08-22T07:09:40.791Z", - "used": true, - "version": "xml2rfc 3.18.0" - }, - "model": "utils.versioninfo", - "pk": 4 } ] diff --git a/ietf/name/management/commands/generate_name_fixture.py b/ietf/name/management/commands/generate_name_fixture.py index bbf33e600e..ef30e54c73 100644 --- a/ietf/name/management/commands/generate_name_fixture.py +++ b/ietf/name/management/commands/generate_name_fixture.py @@ -77,7 +77,6 @@ def output(seq): from ietf.mailtrigger.models import MailTrigger, Recipient from ietf.meeting.models import BusinessConstraint from ietf.stats.models import CountryAlias - from ietf.utils.models import VersionInfo # Grab all ietf.name.models for n in dir(ietf.name.models): @@ -87,7 +86,7 @@ def output(seq): model_objects[model_name(item)] = list(item.objects.all().order_by('pk')) for m in ( BallotType, State, StateType, GroupFeatures, MailTrigger, Recipient, - CountryAlias, VersionInfo, BusinessConstraint ): + CountryAlias, BusinessConstraint ): model_objects[model_name(m)] = list(m.objects.all().order_by('pk')) for m in ( DBTemplate, ): diff --git a/ietf/name/migrations/0009_iabworkshops.py b/ietf/name/migrations/0009_iabworkshops.py new file mode 100644 index 0000000000..1819815860 --- /dev/null +++ b/ietf/name/migrations/0009_iabworkshops.py @@ -0,0 +1,29 @@ +# Copyright The IETF Trust 2023, All Rights Reserved + +from django.db import migrations + +def forward(apps, schema_editor): + GroupTypeName = apps.get_model("name", "GroupTypeName") + GroupTypeName.objects.create( + slug = "iabworkshop", + name = "IAB Workshop", + desc = "IAB Workshop", + used = True, + order = 0, + verbose_name = "IAB Workshop", + + ) + +def reverse(apps, schema_editor): + GroupTypeName = apps.get_model("name", "GroupTypeName") + GroupTypeName.objects.filter(slug="iabworkshop").delete() + + +class Migration(migrations.Migration): + dependencies = [ + ("name", "0008_removed_objfalse"), + ] + + operations = [ + migrations.RunPython(forward, reverse) + ] diff --git a/ietf/name/migrations/0010_rfc_doctype_names.py b/ietf/name/migrations/0010_rfc_doctype_names.py new file mode 100644 index 0000000000..8d7a565f23 --- /dev/null +++ b/ietf/name/migrations/0010_rfc_doctype_names.py @@ -0,0 +1,30 @@ +# Generated by Django 4.2.2 on 2023-06-14 20:39 + +from django.db import migrations + + +def forward(apps, schema_editor): + DocTypeName = apps.get_model("name", "DocTypeName") + DocTypeName.objects.get_or_create( + slug="rfc", + name="RFC", + used=True, + prefix="rfc", + ) + + DocRelationshipName = apps.get_model("name", "DocRelationshipName") + DocRelationshipName.objects.get_or_create( + slug="became_rfc", + name="became RFC", + used=True, + revname="came from draft", + ) + +class Migration(migrations.Migration): + dependencies = [ + ("name", "0009_iabworkshops"), + ] + + operations = [ + migrations.RunPython(forward), + ] diff --git a/ietf/name/migrations/0011_subseries.py b/ietf/name/migrations/0011_subseries.py new file mode 100644 index 0000000000..b3fe107924 --- /dev/null +++ b/ietf/name/migrations/0011_subseries.py @@ -0,0 +1,38 @@ +# Copyright The IETF Trust 2023, All Rights Reserved + +from django.db import migrations + + +def forward(apps, schema_editor): + DocTypeName = apps.get_model("name", "DocTypeName") + DocRelationshipName = apps.get_model("name", "DocRelationshipName") + for slug, name, prefix in [ + ("std", "Standard", "std"), + ("bcp", "Best Current Practice", "bcp"), + ("fyi", "For Your Information", "fyi"), + ]: + DocTypeName.objects.create( + slug=slug, name=name, prefix=prefix, desc="", used=True + ) + DocRelationshipName.objects.create( + slug="contains", + name="Contains", + revname="Is part of", + desc="This document contains other documents (e.g., STDs contain RFCs)", + used=True, + ) + + +def reverse(apps, schema_editor): + DocTypeName = apps.get_model("name", "DocTypeName") + DocRelationshipName = apps.get_model("name", "DocRelationshipName") + DocTypeName.objects.filter(slug__in=["std", "bcp", "fyi"]).delete() + DocRelationshipName.objects.filter(slug="contains").delete() + + +class Migration(migrations.Migration): + dependencies = [ + ("name", "0010_rfc_doctype_names"), + ] + + operations = [migrations.RunPython(forward, reverse)] diff --git a/ietf/name/migrations/0012_adjust_important_dates.py b/ietf/name/migrations/0012_adjust_important_dates.py new file mode 100644 index 0000000000..7a3252bb5c --- /dev/null +++ b/ietf/name/migrations/0012_adjust_important_dates.py @@ -0,0 +1,29 @@ +# Copyright The IETF Trust 2023, All Rights Reserved + +from django.db import migrations + +def markdown_names(apps, schema_editor): + ImportantDateName = apps.get_model("name", "ImportantDateName") + changes = [ + ('bofproposals', "Preliminary BOF proposals requested. To request a __BoF__ session use the [IETF BoF Request Tool](/doc/bof-requests)."), + ('openreg', "IETF Online Registration Opens [Register Here](https://www.ietf.org/how/meetings/register/)."), + ('opensched', "Working Group and BOF scheduling begins. To request a Working Group session, use the [IETF Meeting Session Request Tool](/secr/sreq/). If you are working on a BOF request, it is highly recommended to tell the IESG now by sending an [email to iesg@ietf.org](mailtp:iesg@ietf.org) to get advance help with the request."), + ('cutoffwgreq', "Cut-off date for requests to schedule Working Group Meetings at UTC 23:59. To request a __Working Group__ session, use the [IETF Meeting Session Request Tool](/secr/sreq/)."), + ('idcutoff', "Internet-Draft submission cut-off (for all Internet-Drafts, including -00) by UTC 23:59. Upload using the [I-D Submission Tool](/submit/)."), + ('cutoffwgreq', "Cut-off date for requests to schedule Working Group Meetings at UTC 23:59. To request a __Working Group__ session, use the [IETF Meeting Session Request Tool](/secr/sreq/)."), + ('bofprelimcutoff', "Cut-off date for BOF proposal requests. To request a __BoF__ session use the [IETF BoF Request Tool](/doc/bof-requests)."), + ('cutoffbofreq', "Cut-off date for BOF proposal requests to Area Directors at UTC 23:59. To request a __BoF__ session use the [IETF BoF Request Tool](/doc/bof-requests)."), + ] + for slug, newDescription in changes: + datename = ImportantDateName.objects.get(pk=slug) # If the slug does not exist, then Django will throw an exception :-) + datename.desc = newDescription + datename.save() + +class Migration(migrations.Migration): + dependencies = [ + ("name", "0011_subseries"), + ] + + operations = [ + migrations.RunPython(markdown_names), + ] diff --git a/ietf/name/migrations/0013_narrativeminutes.py b/ietf/name/migrations/0013_narrativeminutes.py new file mode 100644 index 0000000000..89aa75a371 --- /dev/null +++ b/ietf/name/migrations/0013_narrativeminutes.py @@ -0,0 +1,35 @@ +# Copyright The IETF Trust 2023, All Rights Reserved + +from django.db import migrations, models + + +def forward(apps, schema_editor): + DocTypeName = apps.get_model("name", "DocTypeName") + DocTypeName.objects.create( + slug="narrativeminutes", + name="Narrative Minutes", + desc="", + used=True, + order=0, + prefix="narrative-minutes", + ) + + +def reverse(apps, schema_editor): + DocTypeName = apps.get_model("name", "DocTypeName") + DocTypeName.objects.filter(slug="narrativeminutes").delete() + + +class Migration(migrations.Migration): + dependencies = [ + ("name", "0012_adjust_important_dates"), + ] + + operations = [ + migrations.AlterField( + model_name="doctypename", + name="prefix", + field=models.CharField(default="", max_length=32), + ), + migrations.RunPython(forward, reverse), + ] diff --git a/ietf/name/migrations/0014_change_legacy_stream_desc.py b/ietf/name/migrations/0014_change_legacy_stream_desc.py new file mode 100644 index 0000000000..8297e86274 --- /dev/null +++ b/ietf/name/migrations/0014_change_legacy_stream_desc.py @@ -0,0 +1,21 @@ +# Copyright The IETF Trust 2024, All Rights Reserved + +from django.db import migrations + +def forward(apps, schema_editor): + StreamName = apps.get_model("name", "StreamName") + StreamName.objects.filter(pk="legacy").update(desc="Legacy") + +def reverse(apps, schema_editor): + StreamName = apps.get_model("name", "StreamName") + StreamName.objects.filter(pk="legacy").update(desc="Legacy stream") + +class Migration(migrations.Migration): + + dependencies = [ + ("name", "0013_narrativeminutes"), + ] + + operations = [ + migrations.RunPython(forward, reverse) + ] diff --git a/ietf/name/migrations/0015_last_call_name.py b/ietf/name/migrations/0015_last_call_name.py new file mode 100644 index 0000000000..ac210a274f --- /dev/null +++ b/ietf/name/migrations/0015_last_call_name.py @@ -0,0 +1,22 @@ +# Copyright 2025, IETF Trust + +from django.db import migrations + + +def forward(apps, schema_editor): + ReviewTypeName = apps.get_model("name", "ReviewTypeName") + ReviewTypeName.objects.filter(slug="lc").update(name="IETF Last Call") + +def reverse(apps, schema_editor): + ReviewTypeName = apps.get_model("name", "ReviewTypeName") + ReviewTypeName.objects.filter(slug="lc").update(name="Last Call") + +class Migration(migrations.Migration): + + dependencies = [ + ("name", "0014_change_legacy_stream_desc"), + ] + + operations = [ + migrations.RunPython(forward, reverse) + ] diff --git a/ietf/name/migrations/0016_attendancetypename_registrationtickettypename.py b/ietf/name/migrations/0016_attendancetypename_registrationtickettypename.py new file mode 100644 index 0000000000..9376d3a4c6 --- /dev/null +++ b/ietf/name/migrations/0016_attendancetypename_registrationtickettypename.py @@ -0,0 +1,47 @@ +# Generated by Django 4.2.17 on 2025-01-02 18:21 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("name", "0015_last_call_name"), + ] + + operations = [ + migrations.CreateModel( + name="AttendanceTypeName", + fields=[ + ( + "slug", + models.CharField(max_length=32, primary_key=True, serialize=False), + ), + ("name", models.CharField(max_length=255)), + ("desc", models.TextField(blank=True)), + ("used", models.BooleanField(default=True)), + ("order", models.IntegerField(default=0)), + ], + options={ + "ordering": ["order", "name"], + "abstract": False, + }, + ), + migrations.CreateModel( + name="RegistrationTicketTypeName", + fields=[ + ( + "slug", + models.CharField(max_length=32, primary_key=True, serialize=False), + ), + ("name", models.CharField(max_length=255)), + ("desc", models.TextField(blank=True)), + ("used", models.BooleanField(default=True)), + ("order", models.IntegerField(default=0)), + ], + options={ + "ordering": ["order", "name"], + "abstract": False, + }, + ), + ] diff --git a/ietf/name/migrations/0017_populate_new_reg_names.py b/ietf/name/migrations/0017_populate_new_reg_names.py new file mode 100644 index 0000000000..51954885c0 --- /dev/null +++ b/ietf/name/migrations/0017_populate_new_reg_names.py @@ -0,0 +1,39 @@ +# Generated by Django 4.2.17 on 2025-01-02 18:26 + +from django.db import migrations + +def forward(apps, schema_editor): + AttendanceTypeName = apps.get_model('name', 'AttendanceTypeName') + RegistrationTicketTypeName = apps.get_model('name', 'RegistrationTicketTypeName') + AttendanceTypeName.objects.create(slug='onsite', name='Onsite') + AttendanceTypeName.objects.create(slug='remote', name='Remote') + AttendanceTypeName.objects.create(slug='hackathon_onsite', name='Hackathon Onsite') + AttendanceTypeName.objects.create(slug='hackathon_remote', name='Hackathon Remote') + AttendanceTypeName.objects.create(slug='anrw_onsite', name='ANRW Onsite') + AttendanceTypeName.objects.create(slug='unknown', name='Unknown') + RegistrationTicketTypeName.objects.create(slug='week_pass', name='Week Pass') + RegistrationTicketTypeName.objects.create(slug='one_day', name='One Day') + RegistrationTicketTypeName.objects.create(slug='student', name='Student') + RegistrationTicketTypeName.objects.create(slug='hackathon_only', name='Hackathon Only') + RegistrationTicketTypeName.objects.create(slug='hackathon_combo', name='Hackathon Combo') + RegistrationTicketTypeName.objects.create(slug='anrw_only', name='ANRW Only') + RegistrationTicketTypeName.objects.create(slug='anrw_combo', name='ANRW Combo') + RegistrationTicketTypeName.objects.create(slug='unknown', name='Unknown') + + +def reverse(apps, schema_editor): + AttendanceTypeName = apps.get_model('name', 'AttendanceTypeName') + RegistrationTicketTypeName = apps.get_model('name', 'RegistrationTicketTypeName') + AttendanceTypeName.objects.delete() + RegistrationTicketTypeName.objects.delete() + + +class Migration(migrations.Migration): + + dependencies = [ + ("name", "0016_attendancetypename_registrationtickettypename"), + ] + + operations = [ + migrations.RunPython(forward, reverse), + ] diff --git a/ietf/name/migrations/0018_alter_rolenames.py b/ietf/name/migrations/0018_alter_rolenames.py new file mode 100644 index 0000000000..f931de2e97 --- /dev/null +++ b/ietf/name/migrations/0018_alter_rolenames.py @@ -0,0 +1,36 @@ +# Copyright The IETF Trust 2025, All Rights Reserved# Generated by Django 4.2.21 on 2025-05-30 16:35 + +from django.db import migrations + + +def forward(apps, schema_editor): + RoleName = apps.get_model("name", "RoleName") + RoleName.objects.filter(slug__in=["liaison_contact", "liaison_cc_contact"]).update( + used=False + ) + RoleName.objects.get_or_create( + slug="liaison_coordinator", + defaults={ + "name": "Liaison Coordinator", + "desc": "Coordinates liaison handling for the IAB", + "order": 14, + }, + ) + RoleName.objects.filter(slug__contains="trac-").update(used=False) + + +def reverse(apps, schema_editor): + RoleName = apps.get_model("name", "RoleName") + RoleName.objects.filter(slug__in=["liaison_contact", "liaison_cc_contact"]).update( + used=True + ) + RoleName.objects.filter(slug="liaison_coordinator").delete() + # Intentionally not restoring trac-* RoleNames to used=True + + +class Migration(migrations.Migration): + dependencies = [ + ("name", "0017_populate_new_reg_names"), + ] + + operations = [migrations.RunPython(forward, reverse)] diff --git a/ietf/name/migrations/0019_alter_sessionpurposename_timeslot_types.py b/ietf/name/migrations/0019_alter_sessionpurposename_timeslot_types.py new file mode 100644 index 0000000000..a0ca81836d --- /dev/null +++ b/ietf/name/migrations/0019_alter_sessionpurposename_timeslot_types.py @@ -0,0 +1,27 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +from django.db import migrations, models +import ietf.utils.validators + + +class Migration(migrations.Migration): + dependencies = [ + ("name", "0018_alter_rolenames"), + ] + + operations = [ + migrations.AlterField( + model_name="sessionpurposename", + name="timeslot_types", + field=models.JSONField( + default=list, + help_text="Allowed TimeSlotTypeNames", + max_length=256, + validators=[ + ietf.utils.validators.JSONForeignKeyListValidator( + "name.TimeSlotTypeName" + ) + ], + ), + ), + ] diff --git a/ietf/name/models.py b/ietf/name/models.py index b5adeccc63..24104c5f45 100644 --- a/ietf/name/models.py +++ b/ietf/name/models.py @@ -1,8 +1,6 @@ # Copyright The IETF Trust 2010-2020, All Rights Reserved # -*- coding: utf-8 -*- -import jsonfield - from django.db import models from ietf.utils.models import ForeignKey @@ -42,8 +40,8 @@ class DocRelationshipName(NameModel): class DocTypeName(NameModel): """Draft, Agenda, Minutes, Charter, Discuss, Guideline, Email, - Review, Issue, Wiki""" - prefix = models.CharField(max_length=16, default="") + Review, Issue, Wiki, RFC""" + prefix = models.CharField(max_length=32, default="") class DocTagName(NameModel): """Waiting for Reference, IANA Coordination, Revised ID Needed, External Party, AD Followup, Point Raised - Writeup Needed, ...""" @@ -73,8 +71,8 @@ class SessionStatusName(NameModel): """Waiting for Approval, Approved, Waiting for Scheduling, Scheduled, Cancelled, Disapproved""" class SessionPurposeName(NameModel): """Regular, Tutorial, Office Hours, Coding, Social, Admin""" - timeslot_types = jsonfield.JSONField( - max_length=256, blank=False, default=[], + timeslot_types = models.JSONField( + max_length=256, blank=False, default=list, help_text='Allowed TimeSlotTypeNames', validators=[JSONForeignKeyListValidator('name.TimeSlotTypeName')], ) @@ -101,7 +99,7 @@ class DraftSubmissionStateName(NameModel): """Uploaded, Awaiting Submitter Authentication, Awaiting Approval from Previous Version Authors, Awaiting Initial Version Approval, Awaiting Manual Post, Cancelled, Posted""" - next_states = models.ManyToManyField('DraftSubmissionStateName', related_name="previous_states", blank=True) + next_states = models.ManyToManyField('name.DraftSubmissionStateName', related_name="previous_states", blank=True) class RoomResourceName(NameModel): "Room resources: Audio Stream, Meetecho, . . ." class IprDisclosureStateName(NameModel): @@ -151,6 +149,9 @@ class SlideSubmissionStatusName(NameModel): "Pending, Accepted, Rejected" class TelechatAgendaSectionName(NameModel): """roll_call, minutes, action_items""" - class AppealArtifactTypeName(NameModel): pass +class AttendanceTypeName(NameModel): + """onsite, remote, hackathon_onsite, hackathon_remote""" +class RegistrationTicketTypeName(NameModel): + """week, one_day, student""" diff --git a/ietf/name/resources.py b/ietf/name/resources.py index dffa7669db..0cb0e41e0b 100644 --- a/ietf/name/resources.py +++ b/ietf/name/resources.py @@ -19,7 +19,7 @@ RoleName, RoomResourceName, SessionStatusName, StdLevelName, StreamName, TimeSlotTypeName, TopicAudienceName, ReviewerQueuePolicyName, TimerangeName, ExtResourceTypeName, ExtResourceName, SlideSubmissionStatusName, ProceedingsMaterialTypeName, SessionPurposeName, TelechatAgendaSectionName, - AppealArtifactTypeName ) + AppealArtifactTypeName, AttendanceTypeName, RegistrationTicketTypeName ) class TimeSlotTypeNameResource(ModelResource): class Meta: @@ -752,3 +752,33 @@ class Meta: "order": ALL, } api.name.register(AppealArtifactTypeNameResource()) + + +class AttendanceTypeNameResource(ModelResource): + class Meta: + cache = SimpleCache() + queryset = AttendanceTypeName.objects.all() + serializer = api.Serializer() + filtering = { + "slug": ALL, + "name": ALL, + "desc": ALL, + "used": ALL, + "order": ALL, + } +api.name.register(AttendanceTypeNameResource()) + + +class RegistrationTicketTypeNameResource(ModelResource): + class Meta: + cache = SimpleCache() + queryset = RegistrationTicketTypeName.objects.all() + serializer = api.Serializer() + filtering = { + "slug": ALL, + "name": ALL, + "desc": ALL, + "used": ALL, + "order": ALL, + } +api.name.register(RegistrationTicketTypeNameResource()) diff --git a/ietf/name/serializers.py b/ietf/name/serializers.py new file mode 100644 index 0000000000..a764f56051 --- /dev/null +++ b/ietf/name/serializers.py @@ -0,0 +1,11 @@ +# Copyright The IETF Trust 2024, All Rights Reserved +"""django-rest-framework serializers""" +from rest_framework import serializers + +from .models import StreamName + + +class StreamNameSerializer(serializers.ModelSerializer): + class Meta: + model = StreamName + fields = ["slug", "name", "desc"] diff --git a/ietf/nomcom/admin.py b/ietf/nomcom/admin.py index 4b18cc005c..1675d731a5 100644 --- a/ietf/nomcom/admin.py +++ b/ietf/nomcom/admin.py @@ -21,9 +21,9 @@ class NomComAdmin(admin.ModelAdmin): admin.site.register(NomCom, NomComAdmin) class NominationAdmin(admin.ModelAdmin): - list_display = ['id', 'position', 'candidate_name', 'candidate_email', 'candidate_phone', 'nominee', 'comments', 'nominator_email', 'user', 'time', 'share_nominator'] + list_display = ['id', 'position', 'candidate_name', 'candidate_email', 'candidate_phone', 'nominee', 'comments', 'nominator_email', 'person', 'time', 'share_nominator'] list_filter = ['time', 'share_nominator'] - raw_id_fields = ['nominee', 'comments', 'user'] + raw_id_fields = ['nominee', 'comments', 'person'] admin.site.register(Nomination, NominationAdmin) class NomineeAdmin(admin.ModelAdmin): @@ -51,9 +51,9 @@ def nominee(self, obj): return ", ".join(n.person.ascii for n in obj.nominees.all()) nominee.admin_order_field = 'nominees__person__ascii' # type: ignore # https://github.com/python/mypy/issues/2087 - list_display = ['id', 'nomcom', 'author', 'nominee', 'subject', 'type', 'user', 'time'] + list_display = ['id', 'nomcom', 'author', 'nominee', 'subject', 'type', 'person', 'time'] list_filter = ['nomcom', 'type', 'time', ] - raw_id_fields = ['positions', 'topics', 'user'] + raw_id_fields = ['positions', 'topics', 'person'] admin.site.register(Feedback, FeedbackAdmin) diff --git a/ietf/nomcom/factories.py b/ietf/nomcom/factories.py index 7999228c24..286e0229ab 100644 --- a/ietf/nomcom/factories.py +++ b/ietf/nomcom/factories.py @@ -9,7 +9,7 @@ from ietf.nomcom.models import NomCom, Position, Feedback, Nominee, NomineePosition, Nomination, Topic from ietf.group.factories import GroupFactory -from ietf.person.factories import PersonFactory, UserFactory +from ietf.person.factories import PersonFactory import debug # pyflakes:ignore @@ -199,7 +199,7 @@ class Meta: candidate_email = factory.LazyAttribute(lambda obj: obj.nominee.person.email()) candidate_phone = factory.Faker('phone_number') comments = factory.SubFactory(FeedbackFactory) - nominator_email = factory.LazyAttribute(lambda obj: obj.user.email) - user = factory.SubFactory(UserFactory) + nominator_email = factory.LazyAttribute(lambda obj: obj.person.user.email) + person = factory.SubFactory(PersonFactory) share_nominator = False diff --git a/ietf/nomcom/forms.py b/ietf/nomcom/forms.py index 919ed6e187..5987b22637 100644 --- a/ietf/nomcom/forms.py +++ b/ietf/nomcom/forms.py @@ -15,12 +15,13 @@ from ietf.nomcom.models import ( NomCom, Nomination, Nominee, NomineePosition, Position, Feedback, ReminderDates, Topic, Volunteer ) from ietf.nomcom.utils import (NOMINATION_RECEIPT_TEMPLATE, FEEDBACK_RECEIPT_TEMPLATE, - get_user_email, validate_private_key, validate_public_key, + get_person_email, validate_private_key, validate_public_key, make_nomineeposition, make_nomineeposition_for_newperson, create_feedback_email) from ietf.person.models import Email from ietf.person.fields import (SearchableEmailField, SearchableEmailsField, SearchablePersonField, SearchablePersonsField ) +from ietf.utils.fields import ModelMultipleChoiceField from ietf.utils.mail import send_mail from ietf.mailtrigger.utils import gather_address_lists @@ -256,7 +257,7 @@ class NominateForm(forms.ModelForm): def __init__(self, *args, **kwargs): self.nomcom = kwargs.pop('nomcom', None) - self.user = kwargs.pop('user', None) + self.person = kwargs.pop('person', None) self.public = kwargs.pop('public', None) super(NominateForm, self).__init__(*args, **kwargs) @@ -273,7 +274,7 @@ def __init__(self, *args, **kwargs): if not self.public: self.fields.pop('confirmation') - author = get_user_email(self.user) + author = get_person_email(self.person) if author: self.fields['nominator_email'].initial = author.address help_text = """(Nomcom Chair/Member: please fill this in. Use your own email address if the person making the @@ -303,7 +304,7 @@ def save(self, commit=True): author = None if self.public: - author = get_user_email(self.user) + author = get_person_email(self.person) else: if nominator_email: emails = Email.objects.filter(address=nominator_email) @@ -314,7 +315,7 @@ def save(self, commit=True): feedback = Feedback.objects.create(nomcom=self.nomcom, comments=self.nomcom.encrypt(qualifications), type=FeedbackTypeName.objects.get(slug='nomina'), - user=self.user) + person=self.person) feedback.positions.add(position) feedback.nominees.add(nominee) @@ -326,7 +327,7 @@ def save(self, commit=True): nomination.nominee = nominee nomination.comments = feedback nomination.share_nominator = share_nominator - nomination.user = self.user + nomination.person = self.person if commit: nomination.save() @@ -361,7 +362,7 @@ class NominateNewPersonForm(forms.ModelForm): def __init__(self, *args, **kwargs): self.nomcom = kwargs.pop('nomcom', None) - self.user = kwargs.pop('user', None) + self.person = kwargs.pop('person', None) self.public = kwargs.pop('public', None) super(NominateNewPersonForm, self).__init__(*args, **kwargs) @@ -375,7 +376,7 @@ def __init__(self, *args, **kwargs): if not self.public: self.fields.pop('confirmation') - author = get_user_email(self.user) + author = get_person_email(self.person) if author: self.fields['nominator_email'].initial = author.address help_text = """(Nomcom Chair/Member: please fill this in. Use your own email address if the person making the @@ -416,7 +417,7 @@ def save(self, commit=True): author = None if self.public: - author = get_user_email(self.user) + author = get_person_email(self.person) else: if nominator_email: emails = Email.objects.filter(address=nominator_email) @@ -429,7 +430,7 @@ def save(self, commit=True): feedback = Feedback.objects.create(nomcom=self.nomcom, comments=self.nomcom.encrypt(qualifications), type=FeedbackTypeName.objects.get(slug='nomina'), - user=self.user) + person=self.person) feedback.positions.add(position) feedback.nominees.add(nominee) @@ -441,7 +442,7 @@ def save(self, commit=True): nomination.nominee = nominee nomination.comments = feedback nomination.share_nominator = share_nominator - nomination.user = self.user + nomination.person = self.person if commit: nomination.save() @@ -476,7 +477,7 @@ class FeedbackForm(forms.ModelForm): def __init__(self, *args, **kwargs): self.nomcom = kwargs.pop('nomcom', None) - self.user = kwargs.pop('user', None) + self.person = kwargs.pop('person', None) self.public = kwargs.pop('public', None) self.position = kwargs.pop('position', None) self.nominee = kwargs.pop('nominee', None) @@ -484,7 +485,7 @@ def __init__(self, *args, **kwargs): super(FeedbackForm, self).__init__(*args, **kwargs) - author = get_user_email(self.user) + author = get_person_email(self.person) if self.public: self.fields.pop('nominator_email') @@ -514,7 +515,7 @@ def save(self, commit=True): author = None if self.public: - author = get_user_email(self.user) + author = get_person_email(self.person) else: nominator_email = self.cleaned_data['nominator_email'] if nominator_email: @@ -525,7 +526,7 @@ def save(self, commit=True): feedback.author = author.address feedback.nomcom = self.nomcom - feedback.user = self.user + feedback.person = self.person feedback.type = FeedbackTypeName.objects.get(slug='comment') feedback.comments = self.nomcom.encrypt(comment_text) feedback.save() @@ -578,7 +579,7 @@ class QuestionnaireForm(forms.ModelForm): def __init__(self, *args, **kwargs): self.nomcom = kwargs.pop('nomcom', None) - self.user = kwargs.pop('user', None) + self.person = kwargs.pop('person', None) super(QuestionnaireForm, self).__init__(*args, **kwargs) self.fields['nominee'] = PositionNomineeField(nomcom=self.nomcom, required=True) @@ -588,13 +589,13 @@ def save(self, commit=True): comment_text = self.cleaned_data['comment_text'] (position, nominee) = self.cleaned_data['nominee'] - author = get_user_email(self.user) + author = get_person_email(self.person) if author: feedback.author = author feedback.nomcom = self.nomcom - feedback.user = self.user + feedback.person = self.person feedback.type = FeedbackTypeName.objects.get(slug='questio') feedback.comments = self.nomcom.encrypt(comment_text) feedback.save() @@ -659,9 +660,9 @@ class Meta: model = Feedback fields = ('type', ) - def set_nomcom(self, nomcom, user): + def set_nomcom(self, nomcom, person): self.nomcom = nomcom - self.user = user + self.person = person #self.fields['nominee'] = MultiplePositionNomineeField(nomcom=self.nomcom, #required=True, #widget=forms.SelectMultiple, @@ -670,7 +671,7 @@ def set_nomcom(self, nomcom, user): def save(self, commit=True): feedback = super(PendingFeedbackForm, self).save(commit=False) feedback.nomcom = self.nomcom - feedback.user = self.user + feedback.person = self.person feedback.save() return feedback @@ -700,9 +701,9 @@ class Meta: model = Feedback fields = ('type', ) - def set_nomcom(self, nomcom, user, instances=None): + def set_nomcom(self, nomcom, person, instances=None): self.nomcom = nomcom - self.user = user + self.person = person instances = instances or [] self.feedback_type = None for i in instances: @@ -719,9 +720,9 @@ def set_nomcom(self, nomcom, user, instances=None): required= self.feedback_type.slug != 'comment', help_text='Hold down "Control", or "Command" on a Mac, to select more than one.') if self.feedback_type.slug == 'comment': - self.fields['topic'] = forms.ModelMultipleChoiceField(queryset=self.nomcom.topic_set.all(), - help_text='Hold down "Control" or "Command" on a Mac, to select more than one.', - required=False,) + self.fields['topic'] = ModelMultipleChoiceField(queryset=self.nomcom.topic_set.all(), + help_text='Hold down "Control" or "Command" on a Mac, to select more than one.', + required=False,) else: self.fields['position'] = forms.ModelChoiceField(queryset=Position.objects.get_by_nomcom(self.nomcom).filter(is_open=True), label="Position") self.fields['searched_email'] = SearchableEmailField(only_users=False,help_text="Try to find the candidate you are classifying with this field first. Only use the name and email fields below if this search does not find the candidate.",label="Candidate",required=False) @@ -782,7 +783,7 @@ def save(self, commit=True): nominee=nominee, comments=feedback, nominator_email=nominator_email, - user=self.user) + person=self.person) return feedback else: feedback.save() @@ -847,7 +848,7 @@ class Meta: class NominationResponseCommentForm(forms.Form): comments = forms.CharField(widget=forms.Textarea,required=False,help_text="Any comments provided will be encrypted and will only be visible to the NomCom.", strip=False) -class NomcomVolunteerMultipleChoiceField(forms.ModelMultipleChoiceField): +class NomcomVolunteerMultipleChoiceField(ModelMultipleChoiceField): def label_from_instance(self, obj): year = obj.year() return f'Volunteer for the {year}/{year+1} Nominating Committee' diff --git a/ietf/nomcom/management/commands/send_reminders.py b/ietf/nomcom/management/commands/send_reminders.py deleted file mode 100644 index bc10425430..0000000000 --- a/ietf/nomcom/management/commands/send_reminders.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright The IETF Trust 2013-2020, All Rights Reserved -# -*- coding: utf-8 -*- - - -import syslog - -from django.core.management.base import BaseCommand - -from ietf.nomcom.models import NomCom, NomineePosition -from ietf.nomcom.utils import send_accept_reminder_to_nominee,send_questionnaire_reminder_to_nominee -from ietf.utils.timezone import date_today - - -def log(message): - syslog.syslog(message) - -def is_time_to_send(nomcom,send_date,nomination_date): - if nomcom.reminder_interval: - days_passed = (send_date - nomination_date).days - return days_passed > 0 and days_passed % nomcom.reminder_interval == 0 - else: - return bool(nomcom.reminderdates_set.filter(date=send_date)) - -class Command(BaseCommand): - help = ("Send acceptance and questionnaire reminders to nominees") - - def handle(self, *args, **options): - for nomcom in NomCom.objects.filter(group__state__slug='active'): - nps = NomineePosition.objects.filter(nominee__nomcom=nomcom,nominee__duplicated__isnull=True) - for nominee_position in nps.pending(): - if is_time_to_send(nomcom, date_today(), nominee_position.time.date()): - send_accept_reminder_to_nominee(nominee_position) - log('Sent accept reminder to %s' % nominee_position.nominee.email.address) - for nominee_position in nps.accepted().without_questionnaire_response(): - if is_time_to_send(nomcom, date_today(), nominee_position.time.date()): - send_questionnaire_reminder_to_nominee(nominee_position) - log('Sent questionnaire reminder to %s' % nominee_position.nominee.email.address) diff --git a/ietf/nomcom/management/tests.py b/ietf/nomcom/management/tests.py index 7bda2b5aa5..08c0e1fe32 100644 --- a/ietf/nomcom/management/tests.py +++ b/ietf/nomcom/management/tests.py @@ -1,7 +1,7 @@ # Copyright The IETF Trust 2021, All Rights Reserved # -*- coding: utf-8 -*- """Tests of nomcom management commands""" -import mock +from unittest import mock import sys from collections import namedtuple diff --git a/ietf/nomcom/migrations/0004_volunteer_origin_volunteer_time_volunteer_withdrawn.py b/ietf/nomcom/migrations/0004_volunteer_origin_volunteer_time_volunteer_withdrawn.py new file mode 100644 index 0000000000..9eaebf2069 --- /dev/null +++ b/ietf/nomcom/migrations/0004_volunteer_origin_volunteer_time_volunteer_withdrawn.py @@ -0,0 +1,27 @@ +# Generated by Django 4.2.7 on 2023-11-05 09:45 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("nomcom", "0003_alter_nomination_share_nominator"), + ] + + operations = [ + migrations.AddField( + model_name="volunteer", + name="origin", + field=models.CharField(default="datatracker", max_length=32), + ), + migrations.AddField( + model_name="volunteer", + name="time", + field=models.DateTimeField(auto_now_add=True, null=True, blank=True), + ), + migrations.AddField( + model_name="volunteer", + name="withdrawn", + field=models.DateTimeField(blank=True, null=True), + ), + ] diff --git a/ietf/nomcom/migrations/0005_user_to_person.py b/ietf/nomcom/migrations/0005_user_to_person.py new file mode 100644 index 0000000000..66a6e99642 --- /dev/null +++ b/ietf/nomcom/migrations/0005_user_to_person.py @@ -0,0 +1,100 @@ +# Generated by Django 4.2.2 on 2023-06-14 19:47 + +from django.db import migrations +from django.db.models import OuterRef, Subquery +import django.db.models.deletion +import ietf.utils.models + + +def forward(apps, schema_editor): + Nomination = apps.get_model('nomcom', 'Nomination') + Person = apps.get_model("person", "Person") + Nomination.objects.exclude( + user__isnull=True + ).update( + person=Subquery( + Person.objects.filter(user_id=OuterRef("user_id")).values("pk")[:1] + ) + ) + + Feedback = apps.get_model('nomcom', 'Feedback') + Feedback.objects.exclude( + user__isnull=True + ).update( + person=Subquery( + Person.objects.filter(user_id=OuterRef("user_id")).values("pk")[:1] + ) + ) + +def reverse(apps, schema_editor): + Nomination = apps.get_model('nomcom', 'Nomination') + Person = apps.get_model("person", "Person") + Nomination.objects.exclude( + person__isnull=True + ).update( + user_id=Subquery( + Person.objects.filter(pk=OuterRef("person_id")).values("user_id")[:1] + ) + ) + + Feedback = apps.get_model('nomcom', 'Feedback') + Feedback.objects.exclude( + person__isnull=True + ).update( + user_id=Subquery( + Person.objects.filter(pk=OuterRef("person_id")).values("user_id")[:1] + ) + ) + +class Migration(migrations.Migration): + dependencies = [ + ("person", "0001_initial"), + ("nomcom", "0004_volunteer_origin_volunteer_time_volunteer_withdrawn"), + ] + + operations = [ + migrations.AddField( + model_name="feedback", + name="person", + field=ietf.utils.models.ForeignKey( + blank=True, + editable=False, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="person.person", + ), + ), + migrations.AddField( + model_name="nomination", + name="person", + field=ietf.utils.models.ForeignKey( + editable=False, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="person.person", + ), + ), + migrations.RunPython(forward, reverse), + migrations.RemoveField( + model_name="feedback", + name="user", + field=ietf.utils.models.ForeignKey( + blank=True, + editable=False, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="user.user", + ), + ), + migrations.RemoveField( + model_name="nomination", + name="user", + field=ietf.utils.models.ForeignKey( + blank=True, + editable=False, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="user.user", + ), + ), + ] diff --git a/ietf/nomcom/models.py b/ietf/nomcom/models.py index ee2eea2cca..c206e467bd 100644 --- a/ietf/nomcom/models.py +++ b/ietf/nomcom/models.py @@ -7,7 +7,6 @@ from django.db import models from django.db.models.signals import post_delete from django.conf import settings -from django.contrib.auth.models import User from django.template.loader import render_to_string from django.template.defaultfilters import linebreaks # type: ignore @@ -43,6 +42,7 @@ class ReminderDates(models.Model): class NomCom(models.Model): + # TODO-BLOBSTORE: migrate this to a database field instead of a FileField and update code accordingly public_key = models.FileField(storage=NoLocationMigrationFileSystemStorage(location=settings.NOMCOM_PUBLIC_KEYS_DIR), upload_to=upload_path_handler, blank=True, null=True) @@ -128,7 +128,7 @@ class Nomination(models.Model): nominee = ForeignKey('Nominee') comments = ForeignKey('Feedback') nominator_email = models.EmailField(verbose_name='Nominator Email', blank=True) - user = ForeignKey(User, editable=False, null=True, on_delete=models.SET_NULL) + person = ForeignKey(Person, editable=False, null=True, on_delete=models.SET_NULL) time = models.DateTimeField(auto_now_add=True) share_nominator = models.BooleanField(verbose_name='OK to share nominator\'s name with candidate', default=False, help_text='Check this box to allow the NomCom to let the ' @@ -148,7 +148,7 @@ class Nominee(models.Model): email = ForeignKey(Email) person = ForeignKey(Person, blank=True, null=True) - nominee_position = models.ManyToManyField('Position', through='NomineePosition') + nominee_position = models.ManyToManyField('nomcom.Position', through='nomcom.NomineePosition') duplicated = ForeignKey('Nominee', blank=True, null=True) nomcom = ForeignKey('NomCom') @@ -293,13 +293,13 @@ def get_description(self): class Feedback(models.Model): nomcom = ForeignKey('NomCom') author = models.EmailField(verbose_name='Author', blank=True) - positions = models.ManyToManyField('Position', blank=True) - nominees = models.ManyToManyField('Nominee', blank=True) - topics = models.ManyToManyField('Topic', blank=True) + positions = models.ManyToManyField('nomcom.Position', blank=True) + nominees = models.ManyToManyField('nomcom.Nominee', blank=True) + topics = models.ManyToManyField('nomcom.Topic', blank=True) subject = models.TextField(verbose_name='Subject', blank=True) comments = models.BinaryField(verbose_name='Comments') type = ForeignKey(FeedbackTypeName, blank=True, null=True) - user = ForeignKey(User, editable=False, blank=True, null=True, on_delete=models.SET_NULL) + person = ForeignKey(Person, editable=False, blank=True, null=True, on_delete=models.SET_NULL) time = models.DateTimeField(auto_now_add=True) objects = FeedbackManager() @@ -327,7 +327,10 @@ class Volunteer(models.Model): nomcom = ForeignKey('NomCom') person = ForeignKey(Person) affiliation = models.CharField(blank=True, max_length=255) - + time = models.DateTimeField(auto_now_add=True, null=True, blank=True) + origin = models.CharField(max_length=32, default='datatracker') + withdrawn = models.DateTimeField(blank=True, null=True) + def __str__(self): return f'{self.person} for {self.nomcom}' diff --git a/ietf/nomcom/resources.py b/ietf/nomcom/resources.py index c87e72eae6..109a136419 100644 --- a/ietf/nomcom/resources.py +++ b/ietf/nomcom/resources.py @@ -115,11 +115,11 @@ class Meta: api.nomcom.register(NomineePositionResource()) from ietf.name.resources import FeedbackTypeNameResource -from ietf.utils.resources import UserResource +from ietf.person.resources import PersonResource class FeedbackResource(ModelResource): nomcom = ToOneField(NomComResource, 'nomcom') type = ToOneField(FeedbackTypeNameResource, 'type', null=True) - user = ToOneField(UserResource, 'user', null=True) + person = ToOneField(PersonResource, 'person', null=True) positions = ToManyField(PositionResource, 'positions', null=True) nominees = ToManyField(NomineeResource, 'nominees', null=True) class Meta: @@ -136,18 +136,18 @@ class Meta: "time": ALL, "nomcom": ALL_WITH_RELATIONS, "type": ALL_WITH_RELATIONS, - "user": ALL_WITH_RELATIONS, + "person": ALL_WITH_RELATIONS, "positions": ALL_WITH_RELATIONS, "nominees": ALL_WITH_RELATIONS, } api.nomcom.register(FeedbackResource()) -from ietf.utils.resources import UserResource +from ietf.person.resources import PersonResource class NominationResource(ModelResource): position = ToOneField(PositionResource, 'position') nominee = ToOneField(NomineeResource, 'nominee') comments = ToOneField(FeedbackResource, 'comments') - user = ToOneField(UserResource, 'user', null=True) + person = ToOneField(PersonResource, 'person', null=True) class Meta: cache = SimpleCache() queryset = Nomination.objects.all() @@ -164,7 +164,7 @@ class Meta: "position": ALL_WITH_RELATIONS, "nominee": ALL_WITH_RELATIONS, "comments": ALL_WITH_RELATIONS, - "user": ALL_WITH_RELATIONS, + "person": ALL_WITH_RELATIONS, } api.nomcom.register(NominationResource()) diff --git a/ietf/nomcom/tasks.py b/ietf/nomcom/tasks.py new file mode 100644 index 0000000000..3d063a6b26 --- /dev/null +++ b/ietf/nomcom/tasks.py @@ -0,0 +1,10 @@ +# Copyright The IETF Trust 2024, All Rights Reserved + +from celery import shared_task + +from .utils import send_reminders + + +@shared_task +def send_nomcom_reminders_task(): + send_reminders() diff --git a/ietf/nomcom/tests.py b/ietf/nomcom/tests.py index d3da0bddd6..210788ce07 100644 --- a/ietf/nomcom/tests.py +++ b/ietf/nomcom/tests.py @@ -1,10 +1,9 @@ -# Copyright The IETF Trust 2012-2023, All Rights Reserved -# -*- coding: utf-8 -*- +# Copyright The IETF Trust 2012-2025, All Rights Reserved import datetime import io -import mock +from unittest import mock import random import shutil @@ -24,32 +23,49 @@ import debug # pyflakes:ignore +from ietf.api.views import EmailIngestionError from ietf.dbtemplate.factories import DBTemplateFactory from ietf.dbtemplate.models import DBTemplate -from ietf.doc.factories import DocEventFactory, WgDocumentAuthorFactory, \ - NewRevisionDocEventFactory, DocumentAuthorFactory +from ietf.doc.factories import ( + DocEventFactory, + WgDocumentAuthorFactory, + NewRevisionDocEventFactory, + DocumentAuthorFactory, + RfcAuthorFactory, + WgDraftFactory, WgRfcFactory, +) from ietf.group.factories import GroupFactory, GroupHistoryFactory, RoleFactory, RoleHistoryFactory from ietf.group.models import Group, Role -from ietf.meeting.factories import MeetingFactory, AttendedFactory +from ietf.meeting.factories import MeetingFactory, AttendedFactory, RegistrationFactory +from ietf.meeting.models import Registration from ietf.message.models import Message from ietf.nomcom.test_data import nomcom_test_data, generate_cert, check_comments, \ COMMUNITY_USER, CHAIR_USER, \ MEMBER_USER, SECRETARIAT_USER, EMAIL_DOMAIN, NOMCOM_YEAR from ietf.nomcom.models import NomineePosition, Position, Nominee, \ NomineePositionStateName, Feedback, FeedbackTypeName, \ - Nomination, FeedbackLastSeen, TopicFeedbackLastSeen, ReminderDates -from ietf.nomcom.management.commands.send_reminders import Command, is_time_to_send + Nomination, FeedbackLastSeen, TopicFeedbackLastSeen, ReminderDates, \ + NomCom from ietf.nomcom.factories import NomComFactory, FeedbackFactory, TopicFactory, \ nomcom_kwargs_for_year, provide_private_key_to_test_client, \ key -from ietf.nomcom.utils import get_nomcom_by_year, make_nomineeposition, \ - get_hash_nominee_position, is_eligible, list_eligible, \ - get_eligibility_date, suggest_affiliation, \ - decorate_volunteers_with_qualifications +from ietf.nomcom.tasks import send_nomcom_reminders_task +from ietf.nomcom.utils import ( + get_nomcom_by_year, + make_nomineeposition, + get_hash_nominee_position, + is_eligible, + list_eligible, + get_eligibility_date, + suggest_affiliation, + ingest_feedback_email, + decorate_volunteers_with_qualifications, + send_reminders, + _is_time_to_send_reminder, + get_qualified_author_queryset, +) from ietf.person.factories import PersonFactory, EmailFactory from ietf.person.models import Email, Person -from ietf.stats.models import MeetingRegistration -from ietf.stats.factories import MeetingRegistrationFactory from ietf.utils.mail import outbox, empty_outbox, get_payload_text from ietf.utils.test_utils import login_testing_unauthorized, TestCase, unicontent from ietf.utils.timezone import date_today, datetime_today, datetime_from_date, DEADLINE_TZINFO @@ -689,20 +705,16 @@ def test_public_nominate_with_automatic_questionnaire(self): self.assertIn('nominee@', outbox[1]['To']) - def nominate_view(self, *args, **kwargs): - public = kwargs.pop('public', True) - searched_email = kwargs.pop('searched_email', None) - nominee_email = kwargs.pop('nominee_email', 'nominee@example.com') + def nominate_view(self, public=True, searched_email=None, + nominee_email='nominee@example.com', + nominator_email=COMMUNITY_USER+EMAIL_DOMAIN, + position='IAOC', confirmation=False): + if not searched_email: - searched_email = Email.objects.filter(address=nominee_email).first() - if not searched_email: - searched_email = EmailFactory(address=nominee_email, primary=True, origin='test') + searched_email = Email.objects.filter(address=nominee_email).first() or EmailFactory(address=nominee_email, primary=True, origin='test') if not searched_email.person: searched_email.person = PersonFactory() searched_email.save() - nominator_email = kwargs.pop('nominator_email', "%s%s" % (COMMUNITY_USER, EMAIL_DOMAIN)) - position_name = kwargs.pop('position', 'IAOC') - confirmation = kwargs.pop('confirmation', False) if public: nominate_url = self.public_nominate_url @@ -726,7 +738,7 @@ def nominate_view(self, *args, **kwargs): q = PyQuery(response.content) self.assertEqual(len(q("#nominate-form")), 1) - position = Position.objects.get(name=position_name) + position = Position.objects.get(name=position) comment_text = 'Test nominate view. Comments with accents äöåÄÖÅ éáíóú âêîôû ü àèìòù.' candidate_phone = '123456' @@ -764,12 +776,9 @@ def nominate_view(self, *args, **kwargs): comments=feedback, nominator_email="%s%s" % (COMMUNITY_USER, EMAIL_DOMAIN)) - def nominate_newperson_view(self, *args, **kwargs): - public = kwargs.pop('public', True) - nominee_email = kwargs.pop('nominee_email', 'nominee@example.com') - nominator_email = kwargs.pop('nominator_email', "%s%s" % (COMMUNITY_USER, EMAIL_DOMAIN)) - position_name = kwargs.pop('position', 'IAOC') - confirmation = kwargs.pop('confirmation', False) + def nominate_newperson_view(self, public=True, nominee_email='nominee@example.com', + nominator_email=COMMUNITY_USER+EMAIL_DOMAIN, + position='IAOC', confirmation=False): if public: nominate_url = self.public_nominate_newperson_url @@ -793,7 +802,7 @@ def nominate_newperson_view(self, *args, **kwargs): q = PyQuery(response.content) self.assertEqual(len(q("#nominate-form")), 1) - position = Position.objects.get(name=position_name) + position = Position.objects.get(name=position) candidate_email = nominee_email candidate_name = 'nominee' comment_text = 'Test nominate view. Comments with accents äöåÄÖÅ éáíóú âêîôû ü àèìòù.' @@ -847,15 +856,13 @@ def test_add_questionnaire(self): self.access_chair_url(self.add_questionnaire_url) self.add_questionnaire() - def add_questionnaire(self, *args, **kwargs): - public = kwargs.pop('public', False) - nominee_email = kwargs.pop('nominee_email', 'nominee@example.com') - nominator_email = kwargs.pop('nominator_email', "%s%s" % (COMMUNITY_USER, EMAIL_DOMAIN)) - position_name = kwargs.pop('position', 'IAOC') + def add_questionnaire(self, public=False, nominee_email='nominee@example.com', + nominator_email=COMMUNITY_USER+EMAIL_DOMAIN, + position='IAOC'): self.nominate_view(public=public, nominee_email=nominee_email, - position=position_name, + position=position, nominator_email=nominator_email) response = self.client.get(self.add_questionnaire_url) @@ -874,7 +881,7 @@ def add_questionnaire(self, *args, **kwargs): self.assertEqual(response.status_code, 200) self.assertContains(response, "questionnnaireform") - position = Position.objects.get(name=position_name) + position = Position.objects.get(name=position) nominee = Nominee.objects.get(email__address=nominee_email) comment_text = 'Test add questionnaire view. Comments with accents äöåÄÖÅ éáíóú âêîôû ü àèìòù.' @@ -924,16 +931,13 @@ def test_private_feedback(self): self.access_member_url(self.private_feedback_url) self.feedback_view(public=False) - def feedback_view(self, *args, **kwargs): - public = kwargs.pop('public', True) - nominee_email = kwargs.pop('nominee_email', 'nominee@example.com') - nominator_email = kwargs.pop('nominator_email', "%s%s" % (COMMUNITY_USER, EMAIL_DOMAIN)) - position_name = kwargs.pop('position', 'IAOC') - confirmation = kwargs.pop('confirmation', False) + def feedback_view(self, public=True, nominee_email='nominee@example.com', + nominator_email=COMMUNITY_USER+EMAIL_DOMAIN, + position='IAOC', confirmation=False): self.nominate_view(public=public, nominee_email=nominee_email, - position=position_name, + position=position, nominator_email=nominator_email) feedback_url = self.public_feedback_url @@ -956,7 +960,7 @@ def feedback_view(self, *args, **kwargs): self.assertEqual(response.status_code, 200) self.assertNotContains(response, "feedbackform") - position = Position.objects.get(name=position_name) + position = Position.objects.get(name=position) nominee = Nominee.objects.get(email__address=nominee_email) feedback_url += "?nominee=%d&position=%d" % (nominee.id, position.id) @@ -972,7 +976,7 @@ def feedback_view(self, *args, **kwargs): comments = 'Test feedback view. Comments with accents äöåÄÖÅ éáíóú âêîôû ü àèìòù.' test_data = {'comment_text': comments, - 'position_name': position.name, + 'position': position.name, 'nominee_name': nominee.email.person.name, 'nominee_email': nominee.email.address, 'confirmation': confirmation} @@ -1126,6 +1130,47 @@ def test_encrypted_comments(self): self.assertNotEqual(feedback.comments, comment_text) self.assertEqual(check_comments(feedback.comments, comment_text, self.privatekey_file), True) + @mock.patch("ietf.nomcom.utils.create_feedback_email") + def test_ingest_feedback_email(self, mock_create_feedback_email): + message = b"This is nomcom feedback" + no_nomcom_year = date_today().year + 10 # a guess at a year with no nomcoms + while NomCom.objects.filter(group__acronym__icontains=no_nomcom_year).exists(): + no_nomcom_year += 1 + inactive_nomcom = NomComFactory(group__state_id="conclude", group__acronym=f"nomcom{no_nomcom_year + 1}") + + # cases where the nomcom does not exist, so admins are notified + for bad_year in (no_nomcom_year, inactive_nomcom.year()): + with self.assertRaises(EmailIngestionError) as context: + ingest_feedback_email(message, bad_year) + self.assertIn("does not exist", context.exception.msg) + self.assertIsNotNone(context.exception.email_body) # error message to be sent + self.assertIsNone(context.exception.email_recipients) # default recipients (i.e., admin) + self.assertIsNone(context.exception.email_original_message) # no original message + self.assertFalse(context.exception.email_attach_traceback) # no traceback + self.assertFalse(mock_create_feedback_email.called) + + # nomcom exists but an error occurs, so feedback goes to the nomcom chair + active_nomcom = NomComFactory(group__acronym=f"nomcom{no_nomcom_year + 2}") + mock_create_feedback_email.side_effect = ValueError("ouch!") + with self.assertRaises(EmailIngestionError) as context: + ingest_feedback_email(message, active_nomcom.year()) + self.assertIn(f"Error ingesting nomcom {active_nomcom.year()}", context.exception.msg) + self.assertIsNotNone(context.exception.email_body) # error message to be sent + self.assertEqual(context.exception.email_recipients, active_nomcom.chair_emails()) + self.assertEqual(context.exception.email_original_message, message) + self.assertFalse(context.exception.email_attach_traceback) # no traceback + self.assertTrue(mock_create_feedback_email.called) + self.assertEqual(mock_create_feedback_email.call_args, mock.call(active_nomcom, message)) + mock_create_feedback_email.reset_mock() + + # and, finally, success + mock_create_feedback_email.side_effect = None + mock_create_feedback_email.return_value = FeedbackFactory(author="someone@example.com") + ingest_feedback_email(message, active_nomcom.year()) + self.assertTrue(mock_create_feedback_email.called) + self.assertEqual(mock_create_feedback_email.call_args, mock.call(active_nomcom, message)) + + class ReminderTest(TestCase): def setUp(self): @@ -1168,7 +1213,7 @@ def setUp(self): feedback = Feedback.objects.create(nomcom=self.nomcom, comments=self.nomcom.encrypt('some non-empty comments'), type=FeedbackTypeName.objects.get(slug='questio'), - user=User.objects.get(username=CHAIR_USER)) + person=User.objects.get(username=CHAIR_USER).person) feedback.positions.add(gen) feedback.nominees.add(n) @@ -1176,36 +1221,41 @@ def tearDown(self): teardown_test_public_keys_dir(self) super().tearDown() - def test_is_time_to_send(self): + def test_is_time_to_send_reminder(self): self.nomcom.reminder_interval = 4 today = date_today() - self.assertTrue(is_time_to_send(self.nomcom,today+datetime.timedelta(days=4),today)) + self.assertTrue( + _is_time_to_send_reminder(self.nomcom, today + datetime.timedelta(days=4), today) + ) for delta in range(4): - self.assertFalse(is_time_to_send(self.nomcom,today+datetime.timedelta(days=delta),today)) + self.assertFalse( + _is_time_to_send_reminder( + self.nomcom, today + datetime.timedelta(days=delta), today + ) + ) self.nomcom.reminder_interval = None - self.assertFalse(is_time_to_send(self.nomcom,today,today)) + self.assertFalse(_is_time_to_send_reminder(self.nomcom, today, today)) self.nomcom.reminderdates_set.create(date=today) - self.assertTrue(is_time_to_send(self.nomcom,today,today)) + self.assertTrue(_is_time_to_send_reminder(self.nomcom, today, today)) - def test_command(self): - c = Command() - messages_before=len(outbox) + def test_send_reminders(self): + messages_before = len(outbox) self.nomcom.reminder_interval = 3 self.nomcom.save() - c.handle(None,None) + send_reminders() self.assertEqual(len(outbox), messages_before + 2) self.assertIn('nominee1@example.org', outbox[-1]['To']) self.assertIn('please complete', outbox[-1]['Subject']) self.assertIn('nominee1@example.org', outbox[-2]['To']) self.assertIn('please accept', outbox[-2]['Subject']) - messages_before=len(outbox) + messages_before = len(outbox) self.nomcom.reminder_interval = 4 self.nomcom.save() - c.handle(None,None) + send_reminders() self.assertEqual(len(outbox), messages_before + 1) self.assertIn('nominee2@example.org', outbox[-1]['To']) self.assertIn('please accept', outbox[-1]['Subject']) - + def test_remind_accept_view(self): url = reverse('ietf.nomcom.views.send_reminder_mail', kwargs={'year': NOMCOM_YEAR,'type':'accept'}) login_testing_unauthorized(self, CHAIR_USER, url) @@ -2025,7 +2075,15 @@ def first_meeting_of_year(year): if not ' ' in ascii: continue first_name, last_name = ascii.rsplit(None, 1) - MeetingRegistration.objects.create(meeting=meeting, first_name=first_name, last_name=last_name, person=person, country_code='WO', email=email, attended=True) + RegistrationFactory( + meeting=meeting, + first_name=first_name, + last_name=last_name, + person=person, + country_code='WO', + email=email, + attended=True + ) for view in ('public_eligible','private_eligible'): url = reverse(f'ietf.nomcom.views.{view}',kwargs={'year':self.nc.year()}) for username in (self.chair.user.username,'secretary'): @@ -2048,7 +2106,7 @@ def first_meeting_of_year(year): for number in range(meeting_start, meeting_start+8): m = MeetingFactory.create(type_id='ietf', number=number) for p in people: - m.meetingregistration_set.create(person=p, reg_type="onsite", checkedin=True, attended=True) + RegistrationFactory(meeting=m, person=p, checkedin=True, attended=True) for p in people: self.nc.volunteer_set.create(person=p,affiliation='something') for view in ('public_volunteers','private_volunteers'): @@ -2074,10 +2132,6 @@ def first_meeting_of_year(year): self.assertContains(response, people[-1].plain_name(), status_code=200) self.assertNotContains(response, unqualified_person.plain_name()) - - - - class NomComIndexTests(TestCase): def setUp(self): super().setUp() @@ -2192,7 +2246,7 @@ def test_public_accepting_feedback(self): self.assertIn('not currently accepting feedback', unicontent(response)) test_data = {'comment_text': 'junk', - 'position_name': pos.name, + 'position': pos.name, 'nominee_name': pos.nominee_set.first().email.person.name, 'nominee_email': pos.nominee_set.first().email.address, 'confirmation': False, @@ -2401,6 +2455,85 @@ def test_get_eligibility_date(self): NomComFactory(group__acronym=f'nomcom{this_year}', first_call_for_volunteers=datetime.date(this_year,5,6)) self.assertEqual(get_eligibility_date(),datetime.date(this_year,5,6)) + def test_get_qualified_author_queryset(self): + """get_qualified_author_queryset implements the eligiblity rules correctly + + This is not an exhaustive test of corner cases. Overlaps considerably with + rfc8989EligibilityTests.test_elig_by_author(). + """ + people = PersonFactory.create_batch(2) + extra_person = PersonFactory() + base_qs = Person.objects.filter(pk__in=[person.pk for person in people]) + now = datetime.datetime.now(tz=datetime.UTC) + one_year = datetime.timedelta(days=365) + + # Authors with no qualifying drafts + self.assertCountEqual( + get_qualified_author_queryset(base_qs, now - 5 * one_year, now), [] + ) + + # Authors with one qualifying draft + approved_draft = WgDraftFactory(authors=people, states=[("draft", "active")]) + DocEventFactory( + type="iesg_approved", + doc=approved_draft, + time=now - 4 * one_year, + ) + self.assertCountEqual( + get_qualified_author_queryset(base_qs, now - 5 * one_year, now), [] + ) + + # Create a draft that was published into an RFC. Give it an extra author who + # should not be eligible. + published_draft = WgDraftFactory(authors=people, states=[("draft", "rfc")]) + DocEventFactory( + type="iesg_approved", + doc=published_draft, + time=now - 5.5 * one_year, # < 6 years ago + ) + rfc = WgRfcFactory( + authors=people + [extra_person], + group=published_draft.group, + ) + DocEventFactory( + type="published_rfc", + doc=rfc, + time=now - 0.5 * one_year, # < 1 year ago + ) + # Period 6 years ago to 1 year ago - authors are eligible due to the + # iesg-approved draft in this window + self.assertCountEqual( + get_qualified_author_queryset(base_qs, now - 6 * one_year, now - one_year), + people, + ) + + # Period 5 years ago to now - authors are eligible due to the RFC publication + self.assertCountEqual( + get_qualified_author_queryset(base_qs, now - 5 * one_year, now), + people, + ) + + # Use the extra_person to check that a single doc can't count both as an + # RFC _and_ an approved draft. Use an eligibility interval that includes both + # the approval and the RFC publication + self.assertCountEqual( + get_qualified_author_queryset(base_qs, now - 6 * one_year, now), + people, # does not include extra_person! + ) + + # Now add an RfcAuthor for only one of the two authors to the RFC. This should + # remove the other author from the eligibility list because the DocumentAuthor + # records are no longer used. + RfcAuthorFactory( + document=rfc, + person=people[0], + titlepage_name="P. Zero", + ) + self.assertCountEqual( + get_qualified_author_queryset(base_qs, now - 5 * one_year, now), + [people[0]], + ) + class rfc8713EligibilityTests(TestCase): @@ -2424,7 +2557,7 @@ def setUp(self): for combo in combinations(meetings,combo_len): p = PersonFactory() for m in combo: - MeetingRegistrationFactory(person=p, meeting=m, attended=True) + RegistrationFactory(person=p, meeting=m, attended=True) if combo_len<3: self.ineligible_people.append(p) else: @@ -2434,7 +2567,7 @@ def setUp(self): def ineligible_person_with_role(**kwargs): p = RoleFactory(**kwargs).person for m in meetings: - MeetingRegistrationFactory(person=p, meeting=m, attended=True) + RegistrationFactory(person=p, meeting=m, attended=True) self.ineligible_people.append(p) for group in ['isocbot', 'ietf-trust', 'llc-board', 'iab']: for role in ['member', 'chair']: @@ -2449,8 +2582,7 @@ def ineligible_person_with_role(**kwargs): self.other_date = datetime.date(2009,5,1) self.other_people = PersonFactory.create_batch(1) for date in (datetime.date(2009,3,1), datetime.date(2008,11,1), datetime.date(2008,7,1)): - MeetingRegistrationFactory(person=self.other_people[0],meeting__date=date, meeting__type_id='ietf', attended=True) - + RegistrationFactory(person=self.other_people[0], meeting__date=date, meeting__type_id='ietf', attended=True) def test_is_person_eligible(self): for person in self.eligible_people: @@ -2494,7 +2626,7 @@ def setUp(self): for combo in combinations(meetings,combo_len): p = PersonFactory() for m in combo: - MeetingRegistrationFactory(person=p, meeting=m, attended=True) + RegistrationFactory(person=p, meeting=m, attended=True) if combo_len<3: self.ineligible_people.append(p) else: @@ -2542,7 +2674,7 @@ def test_elig_by_meetings(self): for combo in combinations(prev_five,combo_len): p = PersonFactory() for m in combo: - MeetingRegistrationFactory(person=p, meeting=m, attended=True) # not checkedin because this forces looking at older meetings + RegistrationFactory(person=p, meeting=m, attended=True) # not checkedin because this forces looking at older meetings AttendedFactory(session__meeting=m, session__type_id='plenary',person=p) if combo_len<3: ineligible_people.append(p) @@ -2557,8 +2689,9 @@ def test_elig_by_meetings(self): for person in ineligible_people: self.assertFalse(is_eligible(person,nomcom)) - Person.objects.filter(pk__in=[p.pk for p in eligible_people+ineligible_people]).delete() - + people = Person.objects.filter(pk__in=[p.pk for p in eligible_people + ineligible_people]) + Registration.objects.filter(person__in=people).delete() + people.delete() def test_elig_by_office_active_groups(self): @@ -2685,33 +2818,41 @@ def test_elig_by_author(self): ineligible = set() p = PersonFactory() - ineligible.add(p) - + ineligible.add(p) # no RFCs or iesg-approved drafts p = PersonFactory() - da = WgDocumentAuthorFactory(person=p) - DocEventFactory(type='published_rfc',doc=da.document,time=middle_date) - ineligible.add(p) + doc = WgRfcFactory(authors=[p]) + DocEventFactory(type='published_rfc', doc=doc, time=middle_date) + ineligible.add(p) # only one RFC p = PersonFactory() - da = WgDocumentAuthorFactory(person=p) + da = WgDocumentAuthorFactory( + person=p, + document__states=[("draft", "active"), ("draft-rfceditor", "ref")], + ) DocEventFactory(type='iesg_approved',doc=da.document,time=last_date) - da = WgDocumentAuthorFactory(person=p) - DocEventFactory(type='published_rfc',doc=da.document,time=first_date) - eligible.add(p) + doc = WgRfcFactory(authors=[p]) + DocEventFactory(type='published_rfc', doc=doc, time=first_date) + eligible.add(p) # one RFC and one iesg-approved draft p = PersonFactory() - da = WgDocumentAuthorFactory(person=p) + da = WgDocumentAuthorFactory( + person=p, + document__states=[("draft", "active"), ("draft-rfceditor", "ref")], + ) DocEventFactory(type='iesg_approved',doc=da.document,time=middle_date) - da = WgDocumentAuthorFactory(person=p) - DocEventFactory(type='published_rfc',doc=da.document,time=day_before_first_date) - ineligible.add(p) + doc = WgRfcFactory(authors=[p]) + DocEventFactory(type='published_rfc', doc=doc, time=day_before_first_date) + ineligible.add(p) # RFC is out of the eligibility window p = PersonFactory() - da = WgDocumentAuthorFactory(person=p) + da = WgDocumentAuthorFactory( + person=p, + document__states=[("draft", "active"), ("draft-rfceditor", "ref")], + ) DocEventFactory(type='iesg_approved',doc=da.document,time=day_after_last_date) - da = WgDocumentAuthorFactory(person=p) - DocEventFactory(type='published_rfc',doc=da.document,time=middle_date) - ineligible.add(p) + doc = WgRfcFactory(authors=[p]) + DocEventFactory(type='published_rfc', doc=doc, time=middle_date) + ineligible.add(p) # iesg approval is outside the eligibility window for person in eligible: self.assertTrue(is_eligible(person,nomcom)) @@ -2742,7 +2883,7 @@ def setUp(self): def test_registration_is_not_enough(self): p = PersonFactory() for meeting in self.meetings: - MeetingRegistrationFactory(person=p, meeting=meeting, checkedin=False) + RegistrationFactory(person=p, meeting=meeting, checkedin=False) self.assertFalse(is_eligible(p, self.nomcom)) def test_elig_by_meetings(self): @@ -2759,7 +2900,7 @@ def test_elig_by_meetings(self): for method in attendance_methods: p = PersonFactory() for meeting in combo: - MeetingRegistrationFactory(person=p, meeting=meeting, reg_type='onsite', checkedin=(method in ('checkedin', 'both'))) + RegistrationFactory(person=p, meeting=meeting, checkedin=(method in ('checkedin', 'both'))) if method in ('session', 'both'): AttendedFactory(session__meeting=meeting, session__type_id='plenary',person=p) if combo_len<3: @@ -2792,7 +2933,7 @@ def test_volunteer(self): self.assertContains(r, 'NomCom is not accepting volunteers at this time', status_code=200) nomcom.is_accepting_volunteers = True nomcom.save() - MeetingRegistrationFactory(person=person, affiliation='mtg_affiliation', checkedin=True) + RegistrationFactory(person=person, affiliation='mtg_affiliation', checkedin=True) r = self.client.get(url) self.assertContains(r, 'Volunteer for NomCom', status_code=200) self.assertContains(r, 'mtg_affiliation') @@ -2839,15 +2980,38 @@ def test_volunteer(self): def test_suggest_affiliation(self): person = PersonFactory() - self.assertEqual(suggest_affiliation(person), '') - da = DocumentAuthorFactory(person=person,affiliation='auth_affil') + self.assertEqual(suggest_affiliation(person), "") + rfc_da = DocumentAuthorFactory( + person=person, + document__type_id="rfc", + affiliation="", + ) + rfc = rfc_da.document + DocEventFactory(doc=rfc, type="published_rfc") + self.assertEqual(suggest_affiliation(person), "") + + rfc_da.affiliation = "rfc_da_affil" + rfc_da.save() + self.assertEqual(suggest_affiliation(person), "rfc_da_affil") + + rfc_ra = RfcAuthorFactory(person=person, document=rfc, affiliation="") + self.assertEqual(suggest_affiliation(person), "") + + rfc_ra.affiliation = "rfc_ra_affil" + rfc_ra.save() + self.assertEqual(suggest_affiliation(person), "rfc_ra_affil") + + da = DocumentAuthorFactory(person=person, affiliation="auth_affil") NewRevisionDocEventFactory(doc=da.document) - self.assertEqual(suggest_affiliation(person), 'auth_affil') + self.assertEqual(suggest_affiliation(person), "auth_affil") + nc = NomComFactory() - nc.volunteer_set.create(person=person,affiliation='volunteer_affil') - self.assertEqual(suggest_affiliation(person), 'volunteer_affil') - MeetingRegistrationFactory(person=person, affiliation='meeting_affil') - self.assertEqual(suggest_affiliation(person), 'meeting_affil') + nc.volunteer_set.create(person=person, affiliation="volunteer_affil") + self.assertEqual(suggest_affiliation(person), "volunteer_affil") + + RegistrationFactory(person=person, affiliation="meeting_affil") + self.assertEqual(suggest_affiliation(person), "meeting_affil") + class VolunteerDecoratorUnitTests(TestCase): def test_decorate_volunteers_with_qualifications(self): @@ -2864,7 +3028,7 @@ def test_decorate_volunteers_with_qualifications(self): ('106', datetime.date(2019, 11, 16)), ]] for m in meetings: - MeetingRegistrationFactory(meeting=m, person=meeting_person, attended=True) + RegistrationFactory(meeting=m, person=meeting_person, attended=True) AttendedFactory(session__meeting=m, session__type_id='plenary', person=meeting_person) nomcom.volunteer_set.create(person=meeting_person) @@ -2883,15 +3047,15 @@ def test_decorate_volunteers_with_qualifications(self): author_person = PersonFactory() for i in range(2): - da = WgDocumentAuthorFactory(person=author_person) + doc = WgRfcFactory(authors=[author_person]) DocEventFactory( type='published_rfc', - doc=da.document, + doc=doc, time=datetime.datetime( elig_date.year - 3, elig_date.month, 28 if elig_date.month == 2 and elig_date.day == 29 else elig_date.day, - tzinfo=datetime.timezone.utc, + tzinfo=datetime.UTC, ) ) nomcom.volunteer_set.create(person=author_person) @@ -3017,3 +3181,10 @@ def test_reclassify_feedback_unrelated(self): self.assertEqual(fb.type_id, 'junk') self.assertEqual(Feedback.objects.filter(type='read').count(), 0) self.assertEqual(Feedback.objects.filter(type='junk').count(), 1) + + +class TaskTests(TestCase): + @mock.patch("ietf.nomcom.tasks.send_reminders") + def test_send_nomcom_reminders_task(self, mock_send): + send_nomcom_reminders_task() + self.assertEqual(mock_send.call_count, 1) diff --git a/ietf/nomcom/utils.py b/ietf/nomcom/utils.py index 220f2e401d..a2ab680df6 100644 --- a/ietf/nomcom/utils.py +++ b/ietf/nomcom/utils.py @@ -16,8 +16,9 @@ from email.header import decode_header from email.iterators import typed_subpart_iterator from email.utils import parseaddr +from textwrap import dedent -from django.db.models import Q, Count +from django.db.models import Q, Count, F, QuerySet from django.conf import settings from django.contrib.sites.models import Site from django.core.exceptions import ObjectDoesNotExist @@ -26,7 +27,7 @@ from django.shortcuts import get_object_or_404 from ietf.dbtemplate.models import DBTemplate -from ietf.doc.models import DocEvent, NewRevisionDocEvent +from ietf.doc.models import DocEvent, NewRevisionDocEvent, Document from ietf.group.models import Group, Role from ietf.person.models import Email, Person from ietf.mailtrigger.utils import gather_address_lists @@ -88,26 +89,21 @@ def get_year_by_nomcom(nomcom): return m.group(0) -def get_user_email(user): - # a user object already has an email field, but we don't want to - # overwrite anything that might be there, and we don't know that - # what's there is the right thing, so we cache the lookup results in a - # separate attribute - if not hasattr(user, "_email_cache"): - user._email_cache = None - if hasattr(user, "person"): - emails = user.person.email_set.filter(active=True).order_by('-time') - if emails: - user._email_cache = emails[0] - for email in emails: - if email.address.lower() == user.username.lower(): - user._email_cache = email +def get_person_email(person): + if not hasattr(person, "_email_cache"): + person._email_cache = None + emails = person.email_set.filter(active=True).order_by('-time') + if emails: + person._email_cache = emails[0] + for email in emails: + if email.address.lower() == person.user.username.lower(): + person._email_cache = email else: try: - user._email_cache = Email.objects.get(address=user.username) + person._email_cache = Email.objects.get(address=person.user.username) except ObjectDoesNotExist: pass - return user._email_cache + return person._email_cache def get_hash_nominee_position(date, nominee_position_id): return hmac.new(settings.NOMCOM_APP_SECRET, f"{date}{nominee_position_id}".encode('utf-8'), hashlib.sha256).hexdigest() @@ -188,7 +184,7 @@ def retrieve_nomcom_private_key(request, year): if not private_key: return private_key - command = "%s bf -d -in /dev/stdin -k \"%s\" -a" + command = "%s aes-128-ecb -d -in /dev/stdin -k \"%s\" -a -iter 1000" code, out, error = pipe( command % ( settings.OPENSSL_COMMAND, @@ -212,7 +208,7 @@ def store_nomcom_private_key(request, year, private_key): if not private_key: request.session['NOMCOM_PRIVATE_KEY_%s' % year] = '' else: - command = "%s bf -e -in /dev/stdin -k \"%s\" -a" + command = "%s aes-128-ecb -e -in /dev/stdin -k \"%s\" -a -iter 1000" code, out, error = pipe( command % ( settings.OPENSSL_COMMAND, @@ -580,6 +576,70 @@ def get_8989_eligibility_querysets(date, base_qs): def get_9389_eligibility_querysets(date, base_qs): return get_threerule_eligibility_querysets(date, base_qs, three_of_five_callable=three_of_five_eligible_9389) + +def get_qualified_author_queryset( + base_qs: QuerySet[Person], + eligibility_period_start: datetime.datetime, + eligibility_period_end: datetime.datetime, +): + """Filter a Person queryset, keeping those qualified by RFC 8989's author path + + The author path is defined by "path 3" in section 4 of RFC 8989. It qualifies + a person who has been a front-page listed author or editor of at least two IETF- + stream RFCs within the last five years. An I-D in the RFC Editor queue that was + approved by the IESG is treated as an RFC, using the date of entry to the RFC + Editor queue as the date for qualification. + + This method does not strictly enforce "in the RFC Editor queue" for IESG-approved + drafts when computing eligibility. In the overwhelming majority of cases, an IESG- + approved draft immediately enters the queue and goes on to be published, so this + simplification makes the calculation much easier and virtually never affects + eligibility. + + Arguments eligibility_period_start and eligibility_period_end are datetimes that + mark the start and end of the eligibility period. These should be five years apart. + """ + # First, get the RFCs using publication date + qualifying_rfc_pub_events = DocEvent.objects.filter( + type='published_rfc', + time__gte=eligibility_period_start, + time__lte=eligibility_period_end, + ) + qualifying_rfcs = Document.objects.filter( + type_id="rfc", + docevent__in=qualifying_rfc_pub_events + ).annotate( + rfcauthor_count=Count("rfcauthor") + ) + rfcs_with_rfcauthors = qualifying_rfcs.filter(rfcauthor_count__gt=0).distinct() + rfcs_without_rfcauthors = qualifying_rfcs.filter(rfcauthor_count=0).distinct() + + # Second, get the IESG-approved I-Ds excluding any we're already counting as rfcs + qualifying_approval_events = DocEvent.objects.filter( + type='iesg_approved', + time__gte=eligibility_period_start, + time__lte=eligibility_period_end, + ) + qualifying_drafts = Document.objects.filter( + type_id="draft", + docevent__in=qualifying_approval_events, + ).exclude( + relateddocument__relationship_id="became_rfc", + relateddocument__target__in=qualifying_rfcs, + ).distinct() + + return base_qs.filter( + Q(documentauthor__document__in=qualifying_drafts) + | Q(rfcauthor__document__in=rfcs_with_rfcauthors) + | Q(documentauthor__document__in=rfcs_without_rfcauthors) + ).annotate( + document_author_count=Count('documentauthor'), + rfc_author_count=Count("rfcauthor") + ).annotate( + authorship_count=F("document_author_count") + F("rfc_author_count") + ).filter(authorship_count__gte=2) + + def get_threerule_eligibility_querysets(date, base_qs, three_of_five_callable): if not base_qs: base_qs = Person.objects.all() @@ -612,14 +672,7 @@ def get_threerule_eligibility_querysets(date, base_qs, three_of_five_callable): ) ).distinct() - rfc_pks = set(DocEvent.objects.filter(type='published_rfc', time__gte=five_years_ago, time__lte=date_as_dt).values_list('doc__pk', flat=True)) - iesgappr_pks = set(DocEvent.objects.filter(type='iesg_approved', time__gte=five_years_ago, time__lte=date_as_dt).values_list('doc__pk',flat=True)) - qualifying_pks = rfc_pks.union(iesgappr_pks.difference(rfc_pks)) - author_qs = base_qs.filter( - documentauthor__document__pk__in=qualifying_pks - ).annotate( - document_author_count = Count('documentauthor') - ).filter(document_author_count__gte=2) + author_qs = get_qualified_author_queryset(base_qs, five_years_ago, date_as_dt) return three_of_five_qs, officer_qs, author_qs def list_eligible_8989(date, base_qs=None): @@ -670,14 +723,14 @@ def previous_five_meetings(date = None): return Meeting.objects.filter(type='ietf',date__lte=date).order_by('-date')[:5] def three_of_five_eligible_8713(previous_five, queryset=None): - """ Return a list of Person records who attended at least + """ Return a list of Person records who attended at least 3 of the 5 type_id='ietf' meetings before the given date. Does not disqualify anyone based on held roles. This variant bases the calculation on MeetingRegistration.attended """ if queryset is None: queryset = Person.objects.all() - return queryset.filter(meetingregistration__meeting__in=list(previous_five),meetingregistration__attended=True).annotate(mtg_count=Count('meetingregistration')).filter(mtg_count__gte=3) + return queryset.filter(registration__meeting__in=list(previous_five), registration__attended=True).annotate(mtg_count=Count('registration')).filter(mtg_count__gte=3) def three_of_five_eligible_9389(previous_five, queryset=None): """ Return a list of Person records who attended at least @@ -695,18 +748,42 @@ def three_of_five_eligible_9389(previous_five, queryset=None): counts[id] += 1 return queryset.filter(pk__in=[id for id, count in counts.items() if count >= 3]) -def suggest_affiliation(person): - recent_meeting = person.meetingregistration_set.order_by('-meeting__date').first() - affiliation = recent_meeting.affiliation if recent_meeting else '' - if not affiliation: - recent_volunteer = person.volunteer_set.order_by('-nomcom__group__acronym').first() - if recent_volunteer: - affiliation = recent_volunteer.affiliation - if not affiliation: - recent_draft_revision = NewRevisionDocEvent.objects.filter(doc__type_id='draft',doc__documentauthor__person=person).order_by('-time').first() - if recent_draft_revision: - affiliation = recent_draft_revision.doc.documentauthor_set.filter(person=person).first().affiliation - return affiliation +def suggest_affiliation(person) -> str: + """Heuristically suggest a current affiliation for a Person""" + recent_meeting = person.registration_set.order_by('-meeting__date').first() + if recent_meeting and recent_meeting.affiliation: + return recent_meeting.affiliation + + recent_volunteer = person.volunteer_set.order_by('-nomcom__group__acronym').first() + if recent_volunteer and recent_volunteer.affiliation: + return recent_volunteer.affiliation + + recent_draft_revision = NewRevisionDocEvent.objects.filter( + doc__type_id="draft", + doc__documentauthor__person=person, + ).order_by("-time").first() + if recent_draft_revision: + draft_author = recent_draft_revision.doc.documentauthor_set.filter( + person=person + ).first() + if draft_author and draft_author.affiliation: + return draft_author.affiliation + + recent_rfc_publication = DocEvent.objects.filter( + Q(doc__documentauthor__person=person) | Q(doc__rfcauthor__person=person), + doc__type_id="rfc", + type="published_rfc", + ).order_by("-time").first() + if recent_rfc_publication: + rfc = recent_rfc_publication.doc + if rfc.rfcauthor_set.exists(): + rfc_author = rfc.rfcauthor_set.filter(person=person).first() + else: + rfc_author = rfc.documentauthor_set.filter(person=person).first() + if rfc_author and rfc_author.affiliation: + return rfc_author.affiliation + return "" + def extract_volunteers(year): nomcom = get_nomcom_by_year(year) @@ -720,3 +797,58 @@ def extract_volunteers(year): decorate_volunteers_with_qualifications(volunteers,nomcom=nomcom) volunteers = sorted(volunteers,key=lambda v:(not v.eligible,v.person.last_name())) return nomcom, volunteers + + +def ingest_feedback_email(message: bytes, year: int): + from ietf.api.views import EmailIngestionError # avoid circular import + from .models import NomCom + try: + nomcom = NomCom.objects.get(group__acronym__icontains=str(year), + group__state__slug='active') + except NomCom.DoesNotExist: + raise EmailIngestionError( + f"Error ingesting nomcom email: nomcom {year} does not exist or is not active", + email_body=dedent(f"""\ + An email for nomcom {year} was posted to ingest_feedback_email, but no + active nomcom exists for that year. + """), + ) + + try: + feedback = create_feedback_email(nomcom, message) + except Exception as err: + raise EmailIngestionError( + f"Error ingesting nomcom {year} feedback email", + email_recipients=nomcom.chair_emails(), + email_body=dedent(f"""\ + An error occurred while ingesting feedback email for nomcom {year}. + + {{error_summary}} + """), + email_original_message=message, + ) from err + log("Received nomcom email from %s" % feedback.author) + + +def _is_time_to_send_reminder(nomcom, send_date, nomination_date): + if nomcom.reminder_interval: + days_passed = (send_date - nomination_date).days + return days_passed > 0 and days_passed % nomcom.reminder_interval == 0 + else: + return bool(nomcom.reminderdates_set.filter(date=send_date)) + + +def send_reminders(): + from .models import NomCom, NomineePosition + for nomcom in NomCom.objects.filter(group__state__slug="active"): + nps = NomineePosition.objects.filter( + nominee__nomcom=nomcom, nominee__duplicated__isnull=True + ) + for nominee_position in nps.pending(): + if _is_time_to_send_reminder(nomcom, date_today(), nominee_position.time.date()): + send_accept_reminder_to_nominee(nominee_position) + log(f"Sent accept reminder to {nominee_position.nominee.email.address}") + for nominee_position in nps.accepted().without_questionnaire_response(): + if _is_time_to_send_reminder(nomcom, date_today(), nominee_position.time.date()): + send_questionnaire_reminder_to_nominee(nominee_position) + log(f"Sent questionnaire reminder to {nominee_position.nominee.email.address}") diff --git a/ietf/nomcom/views.py b/ietf/nomcom/views.py index 71f76679c2..3f90be5253 100644 --- a/ietf/nomcom/views.py +++ b/ietf/nomcom/views.py @@ -57,7 +57,7 @@ def index(request): for nomcom in nomcom_list: year = int(nomcom.acronym[6:]) nomcom.year = year - nomcom.label = "%s/%s" % (year, year+1) + nomcom.label = str(year) if year > 2012: nomcom.url = "/nomcom/%04d" % year else: @@ -454,23 +454,24 @@ def nominate(request, year, public, newperson): {'nomcom': nomcom, 'year': year}) + person = request.user.person if request.method == 'POST': if newperson: - form = NominateNewPersonForm(data=request.POST, nomcom=nomcom, user=request.user, public=public) + form = NominateNewPersonForm(data=request.POST, nomcom=nomcom, person=person, public=public) else: - form = NominateForm(data=request.POST, nomcom=nomcom, user=request.user, public=public) + form = NominateForm(data=request.POST, nomcom=nomcom, person=person, public=public) if form.is_valid(): form.save() messages.success(request, 'Your nomination has been registered. Thank you for the nomination.') if newperson: return redirect('ietf.nomcom.views.%s_nominate' % ('public' if public else 'private'), year=year) else: - form = NominateForm(nomcom=nomcom, user=request.user, public=public) + form = NominateForm(nomcom=nomcom, person=person, public=public) else: if newperson: - form = NominateNewPersonForm(nomcom=nomcom, user=request.user, public=public) + form = NominateNewPersonForm(nomcom=nomcom, person=person, public=public) else: - form = NominateForm(nomcom=nomcom, user=request.user, public=public) + form = NominateForm(nomcom=nomcom, person=person, public=public) return render(request, template, {'form': form, @@ -494,6 +495,7 @@ def feedback(request, year, public): nominee = None position = None topic = None + person = request.user.person if nomcom.group.state_id != 'conclude': selected_nominee = request.GET.get('nominee') selected_position = request.GET.get('position') @@ -505,7 +507,7 @@ def feedback(request, year, public): topic = get_object_or_404(Topic,id=selected_topic) if topic.audience_id == 'nomcom' and not nomcom.group.has_role(request.user, ['chair','advisor','liaison','member']): raise Http404() - if topic.audience_id == 'nominees' and not nomcom.nominee_set.filter(person=request.user.person).exists(): + if topic.audience_id == 'nominees' and not nomcom.nominee_set.filter(person=person).exists(): raise Http404() if public: @@ -517,12 +519,12 @@ def feedback(request, year, public): if not nomcom.group.has_role(request.user, ['chair','advisor','liaison','member']): topics = topics.exclude(audience_id='nomcom') - if not nomcom.nominee_set.filter(person=request.user.person).exists(): + if not nomcom.nominee_set.filter(person=person).exists(): topics = topics.exclude(audience_id='nominees') user_comments = Feedback.objects.filter(nomcom=nomcom, type='comment', - author__in=request.user.person.email_set.filter(active='True')) + author__in=person.email_set.filter(active='True')) counter = Counter(user_comments.values_list('positions','nominees')) counts = dict() for pos,nom in counter: @@ -572,11 +574,11 @@ def feedback(request, year, public): if request.method == 'POST': if nominee and position: form = FeedbackForm(data=request.POST, - nomcom=nomcom, user=request.user, + nomcom=nomcom, person=person, public=public, position=position, nominee=nominee) elif topic: form = FeedbackForm(data=request.POST, - nomcom=nomcom, user=request.user, + nomcom=nomcom, person=person, public=public, topic=topic) else: form = None @@ -595,10 +597,10 @@ def feedback(request, year, public): pass else: if nominee and position: - form = FeedbackForm(nomcom=nomcom, user=request.user, public=public, + form = FeedbackForm(nomcom=nomcom, person=person, public=public, position=position, nominee=nominee) elif topic: - form = FeedbackForm(nomcom=nomcom, user=request.user, public=public, + form = FeedbackForm(nomcom=nomcom, person=person, public=public, topic=topic) else: form = None @@ -661,6 +663,7 @@ def private_questionnaire(request, year): has_publickey = nomcom.public_key and True or False questionnaire_response = None template = 'nomcom/private_questionnaire.html' + person = request.user.person if not has_publickey: messages.warning(request, "This Nomcom is not yet accepting questionnaires.") @@ -680,14 +683,14 @@ def private_questionnaire(request, year): if request.method == 'POST': form = QuestionnaireForm(data=request.POST, - nomcom=nomcom, user=request.user) + nomcom=nomcom, person=person) if form.is_valid(): form.save() messages.success(request, 'The questionnaire response has been registered.') questionnaire_response = force_str(form.cleaned_data['comment_text']) - form = QuestionnaireForm(nomcom=nomcom, user=request.user) + form = QuestionnaireForm(nomcom=nomcom, person=person) else: - form = QuestionnaireForm(nomcom=nomcom, user=request.user) + form = QuestionnaireForm(nomcom=nomcom, person=person) return render(request, template, {'form': form, @@ -725,15 +728,13 @@ def process_nomination_status(request, year, nominee_position_id, state, date, h if form.cleaned_data['comments']: # This Feedback object is of type comment instead of nomina in order to not # make answering "who nominated themselves" harder. - who = request.user - if isinstance(who,AnonymousUser): - who = None + who = None if isinstance(request.user, AnonymousUser) else request.user.person f = Feedback.objects.create(nomcom = nomcom, author = nominee_position.nominee.email, subject = '%s nomination %s'%(nominee_position.nominee.name(),state), comments = nomcom.encrypt(form.cleaned_data['comments']), type_id = 'comment', - user = who, + person = who, ) f.positions.add(nominee_position.position) f.nominees.add(nominee_position.nominee) @@ -779,8 +780,9 @@ def nominee_staterank(nominee): sorted_nominees = sorted(nominees,key=lambda x:x.staterank) + reviewer = request.user.person for nominee in sorted_nominees: - last_seen = FeedbackLastSeen.objects.filter(reviewer=request.user.person,nominee=nominee).first() + last_seen = FeedbackLastSeen.objects.filter(reviewer=reviewer,nominee=nominee).first() nominee_feedback = [] for ft in nominee_feedback_types: qs = nominee.feedback_set.by_type(ft.slug) @@ -795,7 +797,7 @@ def nominee_staterank(nominee): nominees_feedback.append( {'nominee':nominee, 'feedback':nominee_feedback} ) independent_feedback = [ft.feedback_set.get_by_nomcom(nomcom).count() for ft in independent_feedback_types] for topic in nomcom.topic_set.all(): - last_seen = TopicFeedbackLastSeen.objects.filter(reviewer=request.user.person,topic=topic).first() + last_seen = TopicFeedbackLastSeen.objects.filter(reviewer=reviewer,topic=topic).first() topic_feedback = [] for ft in topic_feedback_types: qs = topic.feedback_set.by_type(ft.slug) @@ -842,6 +844,7 @@ def view_feedback_pending(request, year): except EmptyPage: feedback_page = paginator.page(paginator.num_pages) extra_step = False + person = request.user.person if request.method == 'POST' and request.POST.get('end'): extra_ids = request.POST.get('extra_ids', None) extra_step = True @@ -850,7 +853,7 @@ def view_feedback_pending(request, year): formset.absolute_max = 2000 formset.validate_max = False for form in formset.forms: - form.set_nomcom(nomcom, request.user) + form.set_nomcom(nomcom, person) if formset.is_valid(): formset.save() if extra_ids: @@ -862,7 +865,7 @@ def view_feedback_pending(request, year): extra.append(feedback) formset = FullFeedbackFormSet(queryset=Feedback.objects.filter(id__in=[i.id for i in extra])) for form in formset.forms: - form.set_nomcom(nomcom, request.user, extra) + form.set_nomcom(nomcom, person, extra) extra_ids = None else: messages.success(request, 'Feedback saved') @@ -870,7 +873,7 @@ def view_feedback_pending(request, year): elif request.method == 'POST': formset = FeedbackFormSet(request.POST) for form in formset.forms: - form.set_nomcom(nomcom, request.user) + form.set_nomcom(nomcom, person) if formset.is_valid(): extra = [] nominations = [] @@ -890,12 +893,12 @@ def view_feedback_pending(request, year): if nominations: formset = FullFeedbackFormSet(queryset=Feedback.objects.filter(id__in=[i.id for i in nominations])) for form in formset.forms: - form.set_nomcom(nomcom, request.user, nominations) + form.set_nomcom(nomcom, person, nominations) extra_ids = ','.join(['%s:%s' % (i.id, i.type.pk) for i in extra]) else: formset = FullFeedbackFormSet(queryset=Feedback.objects.filter(id__in=[i.id for i in extra])) for form in formset.forms: - form.set_nomcom(nomcom, request.user, extra) + form.set_nomcom(nomcom, person, extra) if moved: messages.success(request, '%s messages classified. You must enter more information for the following feedback.' % moved) else: @@ -904,7 +907,7 @@ def view_feedback_pending(request, year): else: formset = FeedbackFormSet(queryset=feedback_page.object_list) for form in formset.forms: - form.set_nomcom(nomcom, request.user) + form.set_nomcom(nomcom, person) return render(request, 'nomcom/view_feedback_pending.html', {'year': year, 'formset': formset, @@ -975,13 +978,14 @@ def view_feedback_topic(request, year, topic_id): topic = get_object_or_404(Topic, id=topic_id) nomcom = get_nomcom_by_year(year) feedback_types = FeedbackTypeName.objects.filter(slug__in=['comment',]) + reviewer = request.user.person - last_seen = TopicFeedbackLastSeen.objects.filter(reviewer=request.user.person,topic=topic).first() - last_seen_time = (last_seen and last_seen.time) or datetime.datetime(year=1, month=1, day=1, tzinfo=datetime.timezone.utc) + last_seen = TopicFeedbackLastSeen.objects.filter(reviewer=reviewer,topic=topic).first() + last_seen_time = (last_seen and last_seen.time) or datetime.datetime(year=1, month=1, day=1, tzinfo=datetime.UTC) if last_seen: last_seen.save() else: - TopicFeedbackLastSeen.objects.create(reviewer=request.user.person,topic=topic) + TopicFeedbackLastSeen.objects.create(reviewer=reviewer,topic=topic) return render(request, 'nomcom/view_feedback_topic.html', {'year': year, @@ -997,7 +1001,7 @@ def view_feedback_nominee(request, year, nominee_id): nomcom = get_nomcom_by_year(year) nominee = get_object_or_404(Nominee, id=nominee_id) feedback_types = FeedbackTypeName.objects.filter(used=True, slug__in=settings.NOMINEE_FEEDBACK_TYPES) - + reviewer = request.user.person if request.method == 'POST': if not nomcom.group.has_role(request.user, ['chair','advisor']): return HttpResponseForbidden('Restricted to roles: Nomcom Chair, Nomcom Advisor') @@ -1013,7 +1017,10 @@ def view_feedback_nominee(request, year, nominee_id): 'positions': ','.join([str(p) for p in feedback.positions.all()]), }, request=request) - response = HttpResponse(response, content_type='text/plain') + response = HttpResponse( + response, + content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}", + ) response['Content-Disposition'] = f'attachment; filename="{fn}"' return response elif submit == 'reclassify': @@ -1036,12 +1043,12 @@ def view_feedback_nominee(request, year, nominee_id): 'is_chair_task': True, }) - last_seen = FeedbackLastSeen.objects.filter(reviewer=request.user.person,nominee=nominee).first() - last_seen_time = (last_seen and last_seen.time) or datetime.datetime(year=1, month=1, day=1, tzinfo=datetime.timezone.utc) + last_seen = FeedbackLastSeen.objects.filter(reviewer=reviewer,nominee=nominee).first() + last_seen_time = (last_seen and last_seen.time) or datetime.datetime(year=1, month=1, day=1, tzinfo=datetime.UTC) if last_seen: last_seen.save() else: - FeedbackLastSeen.objects.create(reviewer=request.user.person,nominee=nominee) + FeedbackLastSeen.objects.create(reviewer=reviewer,nominee=nominee) return render(request, 'nomcom/view_feedback_nominee.html', {'year': year, @@ -1322,15 +1329,15 @@ def configuration_help(request, year): @role_required("Nomcom Chair", "Nomcom Advisor") def edit_members(request, year): nomcom = get_nomcom_by_year(year) - if nomcom.group.state_id=='conclude': permission_denied(request, 'This nomcom is closed.') + person = request.user.person if request.method=='POST': form = EditMembersForm(nomcom, data=request.POST) if form.is_valid(): - update_role_set(nomcom.group, 'member', form.cleaned_data['members'], request.user.person) - update_role_set(nomcom.group, 'liaison', form.cleaned_data['liaisons'], request.user.person) + update_role_set(nomcom.group, 'member', form.cleaned_data['members'], person) + update_role_set(nomcom.group, 'liaison', form.cleaned_data['liaisons'], person) return HttpResponseRedirect(reverse('ietf.nomcom.views.private_index',kwargs={'year':year})) else: form = EditMembersForm(nomcom) @@ -1373,7 +1380,7 @@ def volunteer(request): form = VolunteerForm(person=person, data=request.POST) if form.is_valid(): for nc in form.cleaned_data['nomcoms']: - nc.volunteer_set.create(person=person, affiliation=form.cleaned_data['affiliation']) + nc.volunteer_set.get_or_create(person=person, defaults={"affiliation": form.cleaned_data["affiliation"], "origin":"datatracker"}) return redirect('ietf.ietfauth.views.profile') else: form = VolunteerForm(person=person,initial=dict(nomcoms=can_volunteer, affiliation=suggest_affiliation(person))) diff --git a/ietf/person/admin.py b/ietf/person/admin.py index cd8ca2abf1..f46edcf8ae 100644 --- a/ietf/person/admin.py +++ b/ietf/person/admin.py @@ -7,6 +7,7 @@ from ietf.person.models import Email, Alias, Person, PersonalApiKey, PersonEvent, PersonApiKeyEvent, PersonExtResource from ietf.person.name import name_parts +from ietf.utils.admin import SaferStackedInline, SaferTabularInline from ietf.utils.validators import validate_external_resource_value @@ -16,7 +17,7 @@ class EmailAdmin(simple_history.admin.SimpleHistoryAdmin): search_fields = ["address", "person__name", ] admin.site.register(Email, EmailAdmin) -class EmailInline(admin.TabularInline): +class EmailInline(SaferTabularInline): model = Email class AliasAdmin(admin.ModelAdmin): @@ -25,7 +26,7 @@ class AliasAdmin(admin.ModelAdmin): raw_id_fields = ["person"] admin.site.register(Alias, AliasAdmin) -class AliasInline(admin.StackedInline): +class AliasInline(SaferStackedInline): model = Alias class PersonAdmin(simple_history.admin.SimpleHistoryAdmin): diff --git a/ietf/person/api.py b/ietf/person/api.py new file mode 100644 index 0000000000..960785a3d4 --- /dev/null +++ b/ietf/person/api.py @@ -0,0 +1,45 @@ +# Copyright The IETF Trust 2024, All Rights Reserved +"""DRF API Views""" +from rest_framework import mixins, viewsets +from rest_framework.decorators import action +from rest_framework.permissions import IsAuthenticated +from rest_framework.response import Response + +from ietf.api.permissions import BelongsToOwnPerson, IsOwnPerson +from ietf.ietfauth.utils import send_new_email_confirmation_request + +from .models import Email, Person +from .serializers import NewEmailSerializer, EmailSerializer, PersonSerializer + + +class EmailViewSet(mixins.UpdateModelMixin, viewsets.GenericViewSet): + """Email viewset + + Only allows updating an existing email for now. + """ + permission_classes = [IsAuthenticated & BelongsToOwnPerson] + queryset = Email.objects.all() + serializer_class = EmailSerializer + lookup_value_regex = '.+@.+' # allow @-sign in the pk + + +class PersonViewSet(mixins.RetrieveModelMixin, viewsets.GenericViewSet): + """Person viewset""" + permission_classes = [IsAuthenticated & IsOwnPerson] + queryset = Person.objects.all() + serializer_class = PersonSerializer + + @action(detail=True, methods=["post"], serializer_class=NewEmailSerializer) + def email(self, request, pk=None): + """Add an email address for this Person + + Always succeeds if the email address is valid. Causes a confirmation email to be sent to the + requested address and completion of that handshake will actually add the email address. If the + address already exists, an alert will be sent instead of the confirmation email. + """ + person = self.get_object() + serializer = self.get_serializer(data=request.data) + serializer.is_valid(raise_exception=True) + # This may or may not actually send a confirmation, but doesn't reveal that to the user. + send_new_email_confirmation_request(person, serializer.validated_data["address"]) + return Response(serializer.data) diff --git a/ietf/person/factories.py b/ietf/person/factories.py index 2247fa9b2b..98756f26c8 100644 --- a/ietf/person/factories.py +++ b/ietf/person/factories.py @@ -8,7 +8,7 @@ import faker.config import os import random -import shutil +from PIL import Image from unidecode import unidecode from unicodedata import normalize @@ -26,20 +26,22 @@ fake = faker.Factory.create() -def setup(): - global acceptable_fakers - # The transliteration of some Arabic and Devanagari names introduces - # non-alphabetic characters that don't work with the draft author - # extraction code, and also don't seem to match the way people with Arabic - # names romanize Arabic names. Exclude those locales from name generation - # in order to avoid test failures. - locales = set( [ l for l in faker.config.AVAILABLE_LOCALES if not (l.startswith('ar_') or l.startswith('sg_') or l=='fr_QC') ] ) - acceptable_fakers = [faker.Faker(locale) for locale in locales] -setup() +# The transliteration of some Arabic and Devanagari names introduces +# non-alphabetic characters that don't work with the draft author +# extraction code, and also don't seem to match the way people with Arabic +# names romanize Arabic names. Exclude those locales from name generation +# in order to avoid test failures. +_acceptable_fakers = [ + faker.Faker(locale) + for locale in set(faker.config.AVAILABLE_LOCALES) + if not (locale.startswith('ar_') or locale.startswith('sg_') or locale == 'fr_QC') +] + def random_faker(): - global acceptable_fakers - return random.sample(acceptable_fakers, 1)[0] + """Helper to get a random faker acceptable for User names""" + return random.sample(_acceptable_fakers, 1)[0] + class UserFactory(factory.django.DjangoModelFactory): class Meta: @@ -103,10 +105,9 @@ def default_photo(obj, create, extracted, **kwargs): # pylint: disable=no-self-a media_name = "%s/%s.jpg" % (settings.PHOTOS_DIRNAME, photo_name) obj.photo = media_name obj.photo_thumb = media_name - photosrc = os.path.join(settings.TEST_DATA_DIR, "profile-default.jpg") photodst = os.path.join(settings.PHOTOS_DIR, photo_name + '.jpg') - if not os.path.exists(photodst): - shutil.copy(photosrc, photodst) + img = Image.new('RGB', (200, 200)) + img.save(photodst) def delete_file(file): os.unlink(file) atexit.register(delete_file, photodst) @@ -159,10 +160,22 @@ class Meta: class PersonalApiKeyFactory(factory.django.DjangoModelFactory): person = factory.SubFactory(PersonFactory) - endpoint = FuzzyChoice(PERSON_API_KEY_ENDPOINTS) - + endpoint = FuzzyChoice(v for v, n in PERSON_API_KEY_ENDPOINTS) + class Meta: model = PersonalApiKey + skip_postgeneration_save = True + + @factory.post_generation + def validate_model(obj, create, extracted, **kwargs): + """Validate the model after creation + + Passing validate_model=False will disable the validation. + """ + do_clean = True if extracted is None else extracted + if do_clean: + obj.full_clean() + class PersonApiKeyEventFactory(factory.django.DjangoModelFactory): key = factory.SubFactory(PersonalApiKeyFactory) diff --git a/ietf/person/forms.py b/ietf/person/forms.py index 81ee362561..7eef8aa17b 100644 --- a/ietf/person/forms.py +++ b/ietf/person/forms.py @@ -1,15 +1,26 @@ -# Copyright The IETF Trust 2018-2020, All Rights Reserved +# Copyright The IETF Trust 2018-2025, All Rights Reserved # -*- coding: utf-8 -*- from django import forms + from ietf.person.models import Person +from ietf.utils.fields import MultiEmailField, NameAddrEmailField class MergeForm(forms.Form): source = forms.IntegerField(label='Source Person ID') target = forms.IntegerField(label='Target Person ID') + def __init__(self, *args, **kwargs): + self.readonly = False + if 'readonly' in kwargs: + self.readonly = kwargs.pop('readonly') + super().__init__(*args, **kwargs) + if self.readonly: + self.fields['source'].widget.attrs['readonly'] = True + self.fields['target'].widget.attrs['readonly'] = True + def clean_source(self): return self.get_person(self.cleaned_data['source']) @@ -21,3 +32,11 @@ def get_person(self, pk): return Person.objects.get(pk=pk) except Person.DoesNotExist: raise forms.ValidationError("ID does not exist") + + +class MergeRequestForm(forms.Form): + to = MultiEmailField() + frm = NameAddrEmailField() + reply_to = MultiEmailField() + subject = forms.CharField() + body = forms.CharField(widget=forms.Textarea) diff --git a/ietf/person/management/commands/purge_old_personal_api_key_events.py b/ietf/person/management/commands/purge_old_personal_api_key_events.py deleted file mode 100644 index a32edf866c..0000000000 --- a/ietf/person/management/commands/purge_old_personal_api_key_events.py +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright The IETF Trust 2021, All Rights Reserved -# -*- coding: utf-8 -*- - -from datetime import timedelta -from django.core.management.base import BaseCommand, CommandError -from django.db.models import Max, Min -from django.utils import timezone - -from ietf.person.models import PersonApiKeyEvent - - -class Command(BaseCommand): - help = 'Purge PersonApiKeyEvent instances older than KEEP_DAYS days' - - def add_arguments(self, parser): - parser.add_argument('keep_days', type=int, - help='Delete events older than this many days') - parser.add_argument('-n', '--dry-run', action='store_true', default=False, - help="Don't delete events, just show what would be done") - - def handle(self, *args, **options): - keep_days = options['keep_days'] - dry_run = options['dry_run'] - - def _format_count(count, unit='day'): - return '{} {}{}'.format(count, unit, ('' if count == 1 else 's')) - - if keep_days < 0: - raise CommandError('Negative keep_days not allowed ({} was specified)'.format(keep_days)) - - self.stdout.write('purge_old_personal_api_key_events: Finding events older than {}\n'.format(_format_count(keep_days))) - if dry_run: - self.stdout.write('Dry run requested, records will not be deleted\n') - self.stdout.flush() - - now = timezone.now() - old_events = PersonApiKeyEvent.objects.filter( - time__lt=now - timedelta(days=keep_days) - ) - - stats = old_events.aggregate(Min('time'), Max('time')) - old_count = old_events.count() - if old_count == 0: - self.stdout.write('No events older than {} found\n'.format(_format_count(keep_days))) - return - - oldest_date = stats['time__min'] - oldest_ago = now - oldest_date - newest_date = stats['time__max'] - newest_ago = now - newest_date - - action_fmt = 'Would delete {}\n' if dry_run else 'Deleting {}\n' - self.stdout.write(action_fmt.format(_format_count(old_count, 'event'))) - self.stdout.write(' Oldest at {} ({} ago)\n'.format(oldest_date, _format_count(oldest_ago.days))) - self.stdout.write(' Most recent at {} ({} ago)\n'.format(newest_date, _format_count(newest_ago.days))) - self.stdout.flush() - - if not dry_run: - old_events.delete() diff --git a/ietf/person/management/commands/tests.py b/ietf/person/management/commands/tests.py deleted file mode 100644 index 291a6ace5f..0000000000 --- a/ietf/person/management/commands/tests.py +++ /dev/null @@ -1,122 +0,0 @@ -# Copyright The IETF Trust 2021, All Rights Reserved -# -*- coding: utf-8 -*- - -import datetime -from io import StringIO - -from django.core.management import call_command, CommandError -from django.utils import timezone - -from ietf.person.factories import PersonApiKeyEventFactory -from ietf.person.models import PersonApiKeyEvent, PersonEvent -from ietf.utils.test_utils import TestCase - - -class CommandTests(TestCase): - @staticmethod - def _call_command(command_name, *args, **options): - out = StringIO() - options['stdout'] = out - call_command(command_name, *args, **options) - return out.getvalue() - - def _assert_purge_results(self, cmd_output, expected_delete_count, expected_kept_events): - self.assertNotIn('Dry run requested', cmd_output) - if expected_delete_count == 0: - delete_text = 'No events older than' - else: - delete_text = 'Deleting {} event'.format(expected_delete_count) - self.assertIn(delete_text, cmd_output) - self.assertCountEqual( - PersonApiKeyEvent.objects.all(), - expected_kept_events, - 'Wrong events were deleted' - ) - - def _assert_purge_dry_run_results(self, cmd_output, expected_delete_count, expected_kept_events): - self.assertIn('Dry run requested', cmd_output) - if expected_delete_count == 0: - delete_text = 'No events older than' - else: - delete_text = 'Would delete {} event'.format(expected_delete_count) - self.assertIn(delete_text, cmd_output) - self.assertCountEqual( - PersonApiKeyEvent.objects.all(), - expected_kept_events, - 'Events were deleted when dry-run option was used' - ) - - def test_purge_old_personal_api_key_events(self): - keep_days = 10 - - # Remember how many PersonEvents were present so we can verify they're cleaned up properly. - personevents_before = PersonEvent.objects.count() - - now = timezone.now() - # The first of these events will be timestamped a fraction of a second more than keep_days - # days ago by the time we call the management command, so will just barely chosen for purge. - old_events = [ - PersonApiKeyEventFactory(time=now - datetime.timedelta(days=n)) - for n in range(keep_days, 2 * keep_days + 1) - ] - num_old_events = len(old_events) - - recent_events = [ - PersonApiKeyEventFactory(time=now - datetime.timedelta(days=n)) - for n in range(0, keep_days) - ] - # We did not create recent_event timestamped exactly keep_days ago because it would - # be treated as an old_event by the management command. Create an event a few seconds - # on the "recent" side of keep_days old to test the threshold. - recent_events.append( - PersonApiKeyEventFactory( - time=now + datetime.timedelta(seconds=3) - datetime.timedelta(days=keep_days) - ) - ) - num_recent_events = len(recent_events) - - # call with dry run - output = self._call_command('purge_old_personal_api_key_events', str(keep_days), '--dry-run') - self._assert_purge_dry_run_results(output, num_old_events, old_events + recent_events) - - # call for real - output = self._call_command('purge_old_personal_api_key_events', str(keep_days)) - self._assert_purge_results(output, num_old_events, recent_events) - self.assertEqual(PersonEvent.objects.count(), personevents_before + num_recent_events, - 'PersonEvents were not cleaned up properly') - - # repeat - there should be nothing left to delete - output = self._call_command('purge_old_personal_api_key_events', '--dry-run', str(keep_days)) - self._assert_purge_dry_run_results(output, 0, recent_events) - - output = self._call_command('purge_old_personal_api_key_events', str(keep_days)) - self._assert_purge_results(output, 0, recent_events) - self.assertEqual(PersonEvent.objects.count(), personevents_before + num_recent_events, - 'PersonEvents were not cleaned up properly') - - # and now delete the remaining events - output = self._call_command('purge_old_personal_api_key_events', '0') - self._assert_purge_results(output, num_recent_events, []) - self.assertEqual(PersonEvent.objects.count(), personevents_before, - 'PersonEvents were not cleaned up properly') - - def test_purge_old_personal_api_key_events_rejects_invalid_arguments(self): - """The purge_old_personal_api_key_events command should reject invalid arguments""" - event = PersonApiKeyEventFactory(time=timezone.now() - datetime.timedelta(days=30)) - - with self.assertRaises(CommandError): - self._call_command('purge_old_personal_api_key_events') - - with self.assertRaises(CommandError): - self._call_command('purge_old_personal_api_key_events', '-15') - - with self.assertRaises(CommandError): - self._call_command('purge_old_personal_api_key_events', '15.3') - - with self.assertRaises(CommandError): - self._call_command('purge_old_personal_api_key_events', '15', '15') - - with self.assertRaises(CommandError): - self._call_command('purge_old_personal_api_key_events', 'abc', '15') - - self.assertCountEqual(PersonApiKeyEvent.objects.all(), [event]) diff --git a/ietf/person/migrations/0002_alter_historicalperson_ascii_and_more.py b/ietf/person/migrations/0002_alter_historicalperson_ascii_and_more.py new file mode 100644 index 0000000000..98d5da75d6 --- /dev/null +++ b/ietf/person/migrations/0002_alter_historicalperson_ascii_and_more.py @@ -0,0 +1,82 @@ +# Generated by Django 4.2.13 on 2024-05-22 18:50 + +from django.db import migrations, models +import ietf.person.models + + +class Migration(migrations.Migration): + + dependencies = [ + ("person", "0001_initial"), + ] + + operations = [ + migrations.AlterField( + model_name="historicalperson", + name="ascii", + field=models.CharField( + help_text="Name as rendered in ASCII (Latin, unaccented) characters.", + max_length=255, + validators=[ietf.person.models.name_character_validator], + verbose_name="Full Name (ASCII)", + ), + ), + migrations.AlterField( + model_name="historicalperson", + name="ascii_short", + field=models.CharField( + blank=True, + help_text="Example: A. Nonymous. Fill in this with initials and surname only if taking the initials and surname of the ASCII name above produces an incorrect initials-only form. (Blank is OK).", + max_length=32, + null=True, + validators=[ietf.person.models.name_character_validator], + verbose_name="Abbreviated Name (ASCII)", + ), + ), + migrations.AlterField( + model_name="historicalperson", + name="plain", + field=models.CharField( + blank=True, + default="", + help_text="Use this if you have a Spanish double surname. Don't use this for nicknames, and don't use it unless you've actually observed that the datatracker shows your name incorrectly.", + max_length=64, + validators=[ietf.person.models.name_character_validator], + verbose_name="Plain Name correction (Unicode)", + ), + ), + migrations.AlterField( + model_name="person", + name="ascii", + field=models.CharField( + help_text="Name as rendered in ASCII (Latin, unaccented) characters.", + max_length=255, + validators=[ietf.person.models.name_character_validator], + verbose_name="Full Name (ASCII)", + ), + ), + migrations.AlterField( + model_name="person", + name="ascii_short", + field=models.CharField( + blank=True, + help_text="Example: A. Nonymous. Fill in this with initials and surname only if taking the initials and surname of the ASCII name above produces an incorrect initials-only form. (Blank is OK).", + max_length=32, + null=True, + validators=[ietf.person.models.name_character_validator], + verbose_name="Abbreviated Name (ASCII)", + ), + ), + migrations.AlterField( + model_name="person", + name="plain", + field=models.CharField( + blank=True, + default="", + help_text="Use this if you have a Spanish double surname. Don't use this for nicknames, and don't use it unless you've actually observed that the datatracker shows your name incorrectly.", + max_length=64, + validators=[ietf.person.models.name_character_validator], + verbose_name="Plain Name correction (Unicode)", + ), + ), + ] diff --git a/ietf/person/migrations/0003_alter_personalapikey_endpoint.py b/ietf/person/migrations/0003_alter_personalapikey_endpoint.py new file mode 100644 index 0000000000..202af4b101 --- /dev/null +++ b/ietf/person/migrations/0003_alter_personalapikey_endpoint.py @@ -0,0 +1,42 @@ +# Generated by Django 4.2.16 on 2024-10-24 21:39 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("person", "0002_alter_historicalperson_ascii_and_more"), + ] + + operations = [ + migrations.AlterField( + model_name="personalapikey", + name="endpoint", + field=models.CharField( + choices=[ + ("/api/appauth/authortools", "/api/appauth/authortools"), + ("/api/appauth/bibxml", "/api/appauth/bibxml"), + ("/api/iesg/position", "/api/iesg/position"), + ( + "/api/meeting/session/recording-name", + "/api/meeting/session/recording-name", + ), + ( + "/api/meeting/session/video/url", + "/api/meeting/session/video/url", + ), + ("/api/notify/meeting/bluesheet", "/api/notify/meeting/bluesheet"), + ( + "/api/notify/meeting/registration", + "/api/notify/meeting/registration", + ), + ("/api/notify/session/attendees", "/api/notify/session/attendees"), + ("/api/notify/session/chatlog", "/api/notify/session/chatlog"), + ("/api/notify/session/polls", "/api/notify/session/polls"), + ("/api/v2/person/person", "/api/v2/person/person"), + ], + max_length=128, + ), + ), + ] diff --git a/ietf/person/migrations/0004_alter_person_photo_alter_person_photo_thumb.py b/ietf/person/migrations/0004_alter_person_photo_alter_person_photo_thumb.py new file mode 100644 index 0000000000..f34382fa70 --- /dev/null +++ b/ietf/person/migrations/0004_alter_person_photo_alter_person_photo_thumb.py @@ -0,0 +1,38 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +from django.db import migrations, models +import ietf.utils.storage + + +class Migration(migrations.Migration): + + dependencies = [ + ("person", "0003_alter_personalapikey_endpoint"), + ] + + operations = [ + migrations.AlterField( + model_name="person", + name="photo", + field=models.ImageField( + blank=True, + default=None, + storage=ietf.utils.storage.BlobShadowFileSystemStorage( + kind="", location=None + ), + upload_to="photo", + ), + ), + migrations.AlterField( + model_name="person", + name="photo_thumb", + field=models.ImageField( + blank=True, + default=None, + storage=ietf.utils.storage.BlobShadowFileSystemStorage( + kind="", location=None + ), + upload_to="photo", + ), + ), + ] diff --git a/ietf/person/migrations/0005_alter_historicalperson_pronouns_selectable_and_more.py b/ietf/person/migrations/0005_alter_historicalperson_pronouns_selectable_and_more.py new file mode 100644 index 0000000000..2af874b1fa --- /dev/null +++ b/ietf/person/migrations/0005_alter_historicalperson_pronouns_selectable_and_more.py @@ -0,0 +1,34 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("person", "0004_alter_person_photo_alter_person_photo_thumb"), + ] + + operations = [ + migrations.AlterField( + model_name="historicalperson", + name="pronouns_selectable", + field=models.JSONField( + blank=True, + default=list, + max_length=120, + null=True, + verbose_name="Pronouns", + ), + ), + migrations.AlterField( + model_name="person", + name="pronouns_selectable", + field=models.JSONField( + blank=True, + default=list, + max_length=120, + null=True, + verbose_name="Pronouns", + ), + ), + ] diff --git a/ietf/person/models.py b/ietf/person/models.py index 22c63d4a0f..3ab89289a6 100644 --- a/ietf/person/models.py +++ b/ietf/person/models.py @@ -4,7 +4,6 @@ import email.utils import email.header -import jsonfield import uuid from hashids import Hashids @@ -29,7 +28,7 @@ from ietf.name.models import ExtResourceName from ietf.person.name import name_parts, initials, plain_name from ietf.utils.mail import send_mail_preformatted -from ietf.utils.storage import NoLocationMigrationFileSystemStorage +from ietf.utils.storage import BlobShadowFileSystemStorage from ietf.utils.mail import formataddr from ietf.person.name import unidecode_name from ietf.utils import log @@ -37,8 +36,12 @@ def name_character_validator(value): - if '/' in value: - raise ValidationError('Name cannot contain "/" character.') + disallowed = "@:/" + found = set(disallowed).intersection(value) + if len(found) > 0: + raise ValidationError( + f"This name cannot contain the characters {', '.join(disallowed)}" + ) class Person(models.Model): @@ -48,16 +51,26 @@ class Person(models.Model): # The normal unicode form of the name. This must be # set to the same value as the ascii-form if equal. name = models.CharField("Full Name (Unicode)", max_length=255, db_index=True, help_text="Preferred long form of name.", validators=[name_character_validator]) - plain = models.CharField("Plain Name correction (Unicode)", max_length=64, default='', blank=True, help_text="Use this if you have a Spanish double surname. Don't use this for nicknames, and don't use it unless you've actually observed that the datatracker shows your name incorrectly.") + plain = models.CharField("Plain Name correction (Unicode)", max_length=64, default='', blank=True, help_text="Use this if you have a Spanish double surname. Don't use this for nicknames, and don't use it unless you've actually observed that the datatracker shows your name incorrectly.", validators=[name_character_validator]) # The normal ascii-form of the name. - ascii = models.CharField("Full Name (ASCII)", max_length=255, help_text="Name as rendered in ASCII (Latin, unaccented) characters.") + ascii = models.CharField("Full Name (ASCII)", max_length=255, help_text="Name as rendered in ASCII (Latin, unaccented) characters.", validators=[name_character_validator]) # The short ascii-form of the name. Also in alias table if non-null - ascii_short = models.CharField("Abbreviated Name (ASCII)", max_length=32, null=True, blank=True, help_text="Example: A. Nonymous. Fill in this with initials and surname only if taking the initials and surname of the ASCII name above produces an incorrect initials-only form. (Blank is OK).") - pronouns_selectable = jsonfield.JSONCharField("Pronouns", max_length=120, blank=True, null=True, default=list ) + ascii_short = models.CharField("Abbreviated Name (ASCII)", max_length=32, null=True, blank=True, help_text="Example: A. Nonymous. Fill in this with initials and surname only if taking the initials and surname of the ASCII name above produces an incorrect initials-only form. (Blank is OK).", validators=[name_character_validator]) + pronouns_selectable = models.JSONField("Pronouns", max_length=120, blank=True, null=True, default=list ) pronouns_freetext = models.CharField(" ", max_length=30, null=True, blank=True, help_text="Optionally provide your personal pronouns. These will be displayed on your public profile page and alongside your name in Meetecho and, in future, other systems. Select any number of the checkboxes OR provide a custom string up to 30 characters.") biography = models.TextField(blank=True, help_text="Short biography for use on leadership pages. Use plain text or reStructuredText markup.") - photo = models.ImageField(storage=NoLocationMigrationFileSystemStorage(), upload_to=settings.PHOTOS_DIRNAME, blank=True, default=None) - photo_thumb = models.ImageField(storage=NoLocationMigrationFileSystemStorage(), upload_to=settings.PHOTOS_DIRNAME, blank=True, default=None) + photo = models.ImageField( + storage=BlobShadowFileSystemStorage(kind="photo"), + upload_to=settings.PHOTOS_DIRNAME, + blank=True, + default=None, + ) + photo_thumb = models.ImageField( + storage=BlobShadowFileSystemStorage(kind="photo"), + upload_to=settings.PHOTOS_DIRNAME, + blank=True, + default=None, + ) name_from_draft = models.CharField("Full Name (from submission)", null=True, max_length=255, editable=False, help_text="Name as found in an Internet-Draft submission.") def __str__(self): @@ -74,7 +87,7 @@ def short(self): else: prefix, first, middle, last, suffix = self.ascii_parts() return (first and first[0]+"." or "")+(middle or "")+" "+last+(suffix and " "+suffix or "") - def plain_name(self): + def plain_name(self) -> str: if not hasattr(self, '_cached_plain_name'): if self.plain: self._cached_plain_name = self.plain @@ -190,8 +203,11 @@ def has_drafts(self): def rfcs(self): from ietf.doc.models import Document - rfcs = list(Document.objects.filter(documentauthor__person=self, type='draft', states__slug='rfc')) - rfcs.sort(key=lambda d: d.canonical_name() ) + # When RfcAuthors are populated, this may over-return if an author is dropped + # from the author list between the final draft and the published RFC. Should + # ignore DocumentAuthors when an RfcAuthor exists for a draft. + rfcs = list(Document.objects.filter(type="rfc").filter(models.Q(documentauthor__person=self)|models.Q(rfcauthor__person=self)).distinct()) + rfcs.sort(key=lambda d: d.name ) return rfcs def active_drafts(self): @@ -253,11 +269,16 @@ def available_api_endpoints(self): def cdn_photo_url(self, size=80): if self.photo: if settings.SERVE_CDN_PHOTOS: + if settings.SERVER_MODE != "production": + original_media_dir = settings.MEDIA_URL + settings.MEDIA_URL = "https://www.ietf.org/lib/dt/media/" source_url = self.photo.url if source_url.startswith(settings.IETF_HOST_URL): source_url = source_url[len(settings.IETF_HOST_URL):] elif source_url.startswith('/'): source_url = source_url[1:] + if settings.SERVER_MODE != "production": + settings.MEDIA_URL = original_media_dir return f'{settings.IETF_HOST_URL}cdn-cgi/image/fit=scale-down,width={size},height={size}/{source_url}' else: datatracker_photo_path = urlreverse('ietf.person.views.photo', kwargs={'email_or_name': self.email()}) @@ -372,6 +393,7 @@ def salt(): ("/api/iesg/position", "/api/iesg/position", "Area Director"), ("/api/v2/person/person", "/api/v2/person/person", "Robot"), ("/api/meeting/session/video/url", "/api/meeting/session/video/url", "Recording Manager"), + ("/api/meeting/session/recording-name", "/api/meeting/session/recording-name", "Recording Manager"), ("/api/notify/meeting/registration", "/api/notify/meeting/registration", "Robot"), ("/api/notify/meeting/bluesheet", "/api/notify/meeting/bluesheet", "Recording Manager"), ("/api/notify/session/attendees", "/api/notify/session/attendees", "Recording Manager"), diff --git a/ietf/person/name.py b/ietf/person/name.py index dc57f58f4b..0dbeaa9b99 100644 --- a/ietf/person/name.py +++ b/ietf/person/name.py @@ -59,7 +59,7 @@ def name_parts(name): last = parts[0] if len(parts) >= 2: # Handle reverse-order names with uppercase surname correctly - if len(first)>1 and re.search("^[A-Z-]+$", first): + if len(first)>1 and re.search("^[A-Z-]+$", first) and first != "JP": first, last = last, first.capitalize() # Handle exception for RFC Editor if (prefix, first, middle, last, suffix) == ('', 'Editor', '', 'Rfc', ''): diff --git a/ietf/person/serializers.py b/ietf/person/serializers.py new file mode 100644 index 0000000000..023d77d4bc --- /dev/null +++ b/ietf/person/serializers.py @@ -0,0 +1,39 @@ +# Copyright The IETF Trust 2024, All Rights Reserved +"""DRF Serializers""" + +from rest_framework import serializers + +from ietf.ietfauth.validators import is_allowed_address + +from .models import Email, Person + + +class EmailSerializer(serializers.ModelSerializer): + """Email serializer for read/update""" + + address = serializers.EmailField(read_only=True) + + class Meta: + model = Email + fields = [ + "person", + "address", + "primary", + "active", + "origin", + ] + read_only_fields = ["person", "address", "origin"] + + +class NewEmailSerializer(serializers.Serializer): + """Serialize a new email address request""" + address = serializers.EmailField(validators=[is_allowed_address]) + + +class PersonSerializer(serializers.ModelSerializer): + """Person serializer""" + emails = EmailSerializer(many=True, source="email_set") + + class Meta: + model = Person + fields = ["id", "name", "emails"] diff --git a/ietf/person/tasks.py b/ietf/person/tasks.py new file mode 100644 index 0000000000..f0c979fa26 --- /dev/null +++ b/ietf/person/tasks.py @@ -0,0 +1,59 @@ +# Copyright The IETF Trust 2024, All Rights Reserved +# +# Celery task definitions +# +import datetime + +from celery import shared_task + +from django.conf import settings +from django.utils import timezone + +from ietf.utils import log +from ietf.utils.mail import send_mail +from .models import PersonalApiKey, PersonApiKeyEvent + + +@shared_task +def send_apikey_usage_emails_task(days): + """Send usage emails to Persons who have API keys""" + earliest = timezone.now() - datetime.timedelta(days=days) + keys = PersonalApiKey.objects.filter( + valid=True, + personapikeyevent__time__gt=earliest, + ).distinct() + for key in keys: + events = PersonApiKeyEvent.objects.filter(key=key, time__gt=earliest) + count = events.count() + events = events[:32] + if count: + key_name = key.hash()[:8] + subject = "API key usage for key '%s' for the last %s days" % ( + key_name, + days, + ) + to = key.person.email_address() + frm = settings.DEFAULT_FROM_EMAIL + send_mail( + None, + to, + frm, + subject, + "utils/apikey_usage_report.txt", + { + "person": key.person, + "days": days, + "key": key, + "key_name": key_name, + "count": count, + "events": events, + }, + ) + +@shared_task +def purge_personal_api_key_events_task(keep_days): + keep_since = timezone.now() - datetime.timedelta(days=keep_days) + old_events = PersonApiKeyEvent.objects.filter(time__lt=keep_since) + count = len(old_events) + old_events.delete() + log.log(f"Deleted {count} PersonApiKeyEvents older than {keep_since}") diff --git a/ietf/person/tests.py b/ietf/person/tests.py index bb75b438db..f55d8b8a34 100644 --- a/ietf/person/tests.py +++ b/ietf/person/tests.py @@ -1,15 +1,15 @@ -# Copyright The IETF Trust 2014-2022, All Rights Reserved +# Copyright The IETF Trust 2014-2025, All Rights Reserved # -*- coding: utf-8 -*- import datetime import json +from unittest import mock from io import StringIO, BytesIO from PIL import Image from pyquery import PyQuery - from django.core.exceptions import ValidationError from django.http import HttpRequest from django.test import override_settings @@ -22,14 +22,16 @@ from ietf.community.models import CommunityList from ietf.group.factories import RoleFactory from ietf.group.models import Group +from ietf.message.models import Message from ietf.nomcom.models import NomCom from ietf.nomcom.test_data import nomcom_test_data from ietf.nomcom.factories import NomComFactory, NomineeFactory, NominationFactory, FeedbackFactory, PositionFactory -from ietf.person.factories import EmailFactory, PersonFactory, UserFactory -from ietf.person.models import Person, Alias +from ietf.person.factories import EmailFactory, PersonFactory, PersonApiKeyEventFactory +from ietf.person.models import Person, Alias, PersonApiKeyEvent +from ietf.person.tasks import purge_personal_api_key_events_task from ietf.person.utils import (merge_persons, determine_merge_order, send_merge_notification, handle_users, get_extra_primary, dedupe_aliases, move_related_objects, merge_nominees, - handle_reviewer_settings, merge_users, get_dots) + handle_reviewer_settings, get_dots) from ietf.review.models import ReviewerSettings from ietf.utils.test_utils import TestCase, login_testing_unauthorized from ietf.utils.mail import outbox, empty_outbox @@ -165,6 +167,14 @@ def test_person_photo(self): img = Image.open(BytesIO(r.content)) self.assertEqual(img.width, 200) + def test_person_photo_duplicates(self): + person = PersonFactory(name="bazquux@example.com", user__username="bazquux@example.com", with_bio=True) + PersonFactory(name="bazquux@example.com", user__username="foobar@example.com", with_bio=True) + + url = urlreverse("ietf.person.views.photo", kwargs={ "email_or_name": person.plain_name()}) + r = self.client.get(url) + self.assertEqual(r.status_code, 404) + def test_name_methods(self): person = PersonFactory(name="Dr. Jens F. Möller", ) @@ -198,13 +208,13 @@ def test_merge(self): def test_merge_with_params(self): p1 = get_person_no_user() p2 = PersonFactory() - url = urlreverse("ietf.person.views.merge") + "?source={}&target={}".format(p1.pk, p2.pk) + url = urlreverse("ietf.person.views.merge_submit") + "?source={}&target={}".format(p1.pk, p2.pk) login_testing_unauthorized(self, "secretary", url) r = self.client.get(url) self.assertContains(r, 'retaining login', status_code=200) def test_merge_with_params_bad_id(self): - url = urlreverse("ietf.person.views.merge") + "?source=1000&target=2000" + url = urlreverse("ietf.person.views.merge_submit") + "?source=1000&target=2000" login_testing_unauthorized(self, "secretary", url) r = self.client.get(url) self.assertContains(r, 'ID does not exist', status_code=200) @@ -212,7 +222,7 @@ def test_merge_with_params_bad_id(self): def test_merge_post(self): p1 = get_person_no_user() p2 = PersonFactory() - url = urlreverse("ietf.person.views.merge") + url = urlreverse("ietf.person.views.merge_submit") expected_url = urlreverse("ietf.secr.rolodex.views.view", kwargs={'id': p2.pk}) login_testing_unauthorized(self, "secretary", url) data = {'source': p1.pk, 'target': p2.pk} @@ -236,9 +246,11 @@ def test_cdn_photo_url_cdn_off(self): self.assertNotIn('cdn-cgi/photo',p.cdn_photo_url()) def test_invalid_name_characters_rejected(self): - slash_person = PersonFactory.build(name='I have a /', user=None) # build() does not save the new object - with self.assertRaises(ValidationError): - slash_person.full_clean() # calls validators (save() does *not*) + for disallowed in "/:@": + # build() does not save the new object + person_with_bad_name = PersonFactory.build(name=f"I have a {disallowed}", user=None) + with self.assertRaises(ValidationError, msg=f"Name with a {disallowed} char should be rejected"): + person_with_bad_name.full_clean() # calls validators (save() does *not*) class PersonUtilsTests(TestCase): @@ -381,13 +393,24 @@ def test_merge_persons(self): request.user = user source = PersonFactory() target = PersonFactory() + mars = RoleFactory(name_id='chair',group__acronym='mars').group source_id = source.pk source_email = source.email_set.first() source_alias = source.alias_set.first() source_user = source.user + communitylist = CommunityList.objects.create(person=source, group=mars) + nomcom = NomComFactory() + position = PositionFactory(nomcom=nomcom) + nominee = NomineeFactory(nomcom=nomcom, person=mars.get_chair().person) + feedback = FeedbackFactory(person=source, author=source.email().address, nomcom=nomcom) + feedback.nominees.add(nominee) + nomination = NominationFactory(nominee=nominee, person=source, position=position, comments=feedback) merge_persons(request, source, target, file=StringIO()) self.assertTrue(source_email in target.email_set.all()) self.assertTrue(source_alias in target.alias_set.all()) + self.assertIn(communitylist, target.communitylist_set.all()) + self.assertIn(feedback, target.feedback_set.all()) + self.assertIn(nomination, target.nomination_set.all()) self.assertFalse(Person.objects.filter(id=source_id)) self.assertFalse(source_user.is_active) @@ -407,24 +430,6 @@ def test_merge_persons_reviewer_settings(self): rs = target.reviewersettings_set.first() self.assertEqual(rs.min_interval, 7) - def test_merge_users(self): - person = PersonFactory() - source = person.user - target = UserFactory() - mars = RoleFactory(name_id='chair',group__acronym='mars').group - communitylist = CommunityList.objects.create(user=source, group=mars) - nomcom = NomComFactory() - position = PositionFactory(nomcom=nomcom) - nominee = NomineeFactory(nomcom=nomcom, person=mars.get_chair().person) - feedback = FeedbackFactory(user=source, author=person.email().address, nomcom=nomcom) - feedback.nominees.add(nominee) - nomination = NominationFactory(nominee=nominee, user=source, position=position, comments=feedback) - - merge_users(source, target) - self.assertIn(communitylist, target.communitylist_set.all()) - self.assertIn(feedback, target.feedback_set.all()) - self.assertIn(nomination, target.nomination_set.all()) - def test_dots(self): noroles = PersonFactory() self.assertEqual(get_dots(noroles),[]) @@ -445,3 +450,40 @@ def test_dots(self): self.assertEqual(get_dots(ncmember),['nomcom']) ncchair = RoleFactory(group__acronym='nomcom2020',group__type_id='nomcom',name_id='chair').person self.assertEqual(get_dots(ncchair),['nomcom']) + + def test_send_merge_request(self): + empty_outbox() + message_count_before = Message.objects.count() + source = PersonFactory() + target = PersonFactory() + url = urlreverse('ietf.person.views.send_merge_request') + url = url + f'?source={source.pk}&target={target.pk}' + login_testing_unauthorized(self, 'secretary', url) + r = self.client.get(url) + initial = r.context['form'].initial + subject = 'Action requested: Merging possible duplicate IETF Datatracker accounts' + self.assertEqual(initial['to'], ', '.join([source.user.username, target.user.username])) + self.assertEqual(initial['subject'], subject) + self.assertEqual(initial['reply_to'], 'support@ietf.org') + self.assertEqual(r.status_code, 200) + r = self.client.post(url, data=initial) + self.assertEqual(r.status_code, 302) + self.assertEqual(len(outbox), 1) + self.assertIn(source.user.username, outbox[0]['To']) + message_count_after = Message.objects.count() + message = Message.objects.last() + self.assertEqual(message_count_after, message_count_before + 1) + self.assertIn(source.user.username, message.to) + + +class TaskTests(TestCase): + @mock.patch("ietf.person.tasks.log.log") + def test_purge_personal_api_key_events_task(self, mock_log): + now = timezone.now() + old_event = PersonApiKeyEventFactory(time=now - datetime.timedelta(days=1, minutes=1)) + young_event = PersonApiKeyEventFactory(time=now - datetime.timedelta(days=1, minutes=-1)) + purge_personal_api_key_events_task(keep_days=1) + self.assertFalse(PersonApiKeyEvent.objects.filter(pk=old_event.pk).exists()) + self.assertTrue(PersonApiKeyEvent.objects.filter(pk=young_event.pk).exists()) + self.assertTrue(mock_log.called) + self.assertIn("Deleted 1", mock_log.call_args[0][0]) diff --git a/ietf/person/urls.py b/ietf/person/urls.py index f37d8b46cf..f3eccd04b7 100644 --- a/ietf/person/urls.py +++ b/ietf/person/urls.py @@ -1,8 +1,12 @@ +# Copyright The IETF Trust 2009-2025, All Rights Reserved +# -*- coding: utf-8 -*- from ietf.person import views, ajax from ietf.utils.urls import url urlpatterns = [ - url(r'^merge/$', views.merge), + url(r'^merge/?$', views.merge), + url(r'^merge/submit/?$', views.merge_submit), + url(r'^merge/send_request/?$', views.send_merge_request), url(r'^search/(?P(person|email))/$', views.ajax_select2_search), url(r'^(?P[0-9]+)/email.json$', ajax.person_email_json), url(r'^(?P[^/]+)$', views.profile), diff --git a/ietf/person/utils.py b/ietf/person/utils.py index 942c2aaab2..5ed90591f9 100755 --- a/ietf/person/utils.py +++ b/ietf/person/utils.py @@ -3,27 +3,26 @@ import datetime -import os import pprint import sys -import syslog from django.contrib import admin from django.core.cache import cache from django.core.exceptions import ObjectDoesNotExist from django.db.models import Q +from django.http import Http404 import debug # pyflakes:ignore -from ietf.person.models import Person +from ietf.person.models import Person, Alias, Email +from ietf.utils import log from ietf.utils.mail import send_mail def merge_persons(request, source, target, file=sys.stdout, verbose=False): changes = [] # write log - syslog.openlog(str(os.path.basename(__file__)), syslog.LOG_PID, syslog.LOG_USER) - syslog.syslog("Merging person records {} => {}".format(source.pk,target.pk)) + log.log(f"Merging person records {source.pk} => {target.pk}") # handle primary emails for email in get_extra_primary(source,target): @@ -31,6 +30,20 @@ def merge_persons(request, source, target, file=sys.stdout, verbose=False): email.save() changes.append('EMAIL ACTION: {} no longer marked as primary'.format(email.address)) + # handle community list + for communitylist in source.communitylist_set.all(): + source.communitylist_set.remove(communitylist) + target.communitylist_set.add(communitylist) + + # handle feedback + for feedback in source.feedback_set.all(): + feedback.person = target + feedback.save() + # handle nominations + for nomination in source.nomination_set.all(): + nomination.person = target + nomination.save() + changes.append(handle_users(source, target)) reviewer_changes = handle_reviewer_settings(source, target) if reviewer_changes: @@ -103,8 +116,7 @@ def handle_users(source,target,check_only=False): if source.user and target.user: message = "DATATRACKER LOGIN ACTION: retaining login: {}, removing login: {}".format(target.user,source.user) if not check_only: - merge_users(source.user, target.user) - syslog.syslog('merge-person-records: deactivating user {}'.format(source.user.username)) + log.log(f"merge-person-records: deactivating user {source.user.username}") user = source.user source.user = None source.save() @@ -126,21 +138,6 @@ def move_related_objects(source, target, file, verbose=False): kwargs = { field_name:target } queryset.update(**kwargs) -def merge_users(source, target): - '''Move related objects from source user to target user''' - # handle community list - for communitylist in source.communitylist_set.all(): - source.communitylist_set.remove(communitylist) - target.communitylist_set.add(communitylist) - # handle feedback - for feedback in source.feedback_set.all(): - feedback.user = target - feedback.save() - # handle nominations - for nomination in source.nomination_set.all(): - nomination.user = target - nomination.save() - def dedupe_aliases(person): '''Check person for duplicate aliases and purge''' seen = [] @@ -248,3 +245,17 @@ def get_dots(person): if roles.filter(group__acronym__startswith='nomcom', name_id__in=('chair','member')).exists(): dots.append('nomcom') return dots + +def lookup_persons(email_or_name): + aliases = Alias.objects.filter(name__iexact=email_or_name) + persons = set(a.person for a in aliases) + + if '@' in email_or_name: + emails = Email.objects.filter(address__iexact=email_or_name) + persons.update(e.person for e in emails) + + persons = [p for p in persons if p and p.id] + if not persons: + raise Http404 + persons.sort(key=lambda p: p.id) + return persons diff --git a/ietf/person/views.py b/ietf/person/views.py index 23a8dea3d2..d0b5912431 100644 --- a/ietf/person/views.py +++ b/ietf/person/views.py @@ -1,23 +1,26 @@ -# Copyright The IETF Trust 2012-2020, All Rights Reserved +# Copyright The IETF Trust 2012-2025, All Rights Reserved # -*- coding: utf-8 -*- from io import StringIO, BytesIO from PIL import Image +from django.conf import settings from django.contrib import messages from django.db.models import Q from django.http import HttpResponse, Http404 -from django.shortcuts import render, get_object_or_404, redirect +from django.shortcuts import render, redirect +from django.template.loader import render_to_string from django.utils import timezone import debug # pyflakes:ignore from ietf.ietfauth.utils import role_required -from ietf.person.models import Email, Person, Alias +from ietf.person.models import Email, Person from ietf.person.fields import select2_id_name_json -from ietf.person.forms import MergeForm -from ietf.person.utils import handle_users, merge_persons +from ietf.person.forms import MergeForm, MergeRequestForm +from ietf.person.utils import handle_users, merge_persons, lookup_persons +from ietf.utils.mail import send_mail_text def ajax_select2_search(request, model_name): @@ -62,37 +65,22 @@ def ajax_select2_search(request, model_name): page = int(request.GET.get("p", 1)) - 1 except ValueError: page = 0 - - objs = objs.distinct()[page:page + 10] + PAGE_SIZE = 10 + first_item = page * PAGE_SIZE + objs = objs.distinct()[first_item:first_item + PAGE_SIZE] return HttpResponse(select2_id_name_json(objs), content_type='application/json') def profile(request, email_or_name): - aliases = Alias.objects.filter(name__iexact=email_or_name) - persons = set(a.person for a in aliases) - - if '@' in email_or_name: - emails = Email.objects.filter(address__iexact=email_or_name) - persons.update(e.person for e in emails) - - persons = [p for p in persons if p and p.id] - if not persons: - raise Http404 - persons.sort(key=lambda p: p.id) + persons = lookup_persons(email_or_name) return render(request, 'person/profile.html', {'persons': persons, 'today': timezone.now()}) def photo(request, email_or_name): - if '@' in email_or_name: - persons = [ get_object_or_404(Email, address=email_or_name).person, ] - else: - aliases = Alias.objects.filter(name=email_or_name) - persons = list(set([ a.person for a in aliases ])) - if not persons: - raise Http404("No such person") + persons = lookup_persons(email_or_name) if len(persons) > 1: - return HttpResponse(r"\r\n".join([p.email() for p in persons]), status=300) + raise Http404("No photo found") person = persons[0] if not person.photo: raise Http404("No photo found") @@ -113,16 +101,19 @@ def photo(request, email_or_name): @role_required("Secretariat") def merge(request): form = MergeForm() - method = 'get' + return render(request, 'person/merge.html', {'form': form}) + + +@role_required("Secretariat") +def merge_submit(request): change_details = '' warn_messages = [] source = None target = None if request.method == "GET": - form = MergeForm() if request.GET: - form = MergeForm(request.GET) + form = MergeForm(request.GET, readonly=True) if form.is_valid(): source = form.cleaned_data.get('source') target = form.cleaned_data.get('target') @@ -131,12 +122,9 @@ def merge(request): if source.user.last_login and target.user.last_login and source.user.last_login > target.user.last_login: warn_messages.append('WARNING: The most recently used login is being deleted!') change_details = handle_users(source, target, check_only=True) - method = 'post' - else: - method = 'get' if request.method == "POST": - form = MergeForm(request.POST) + form = MergeForm(request.POST, readonly=True) if form.is_valid(): source = form.cleaned_data.get('source') source_id = source.id @@ -151,11 +139,72 @@ def merge(request): messages.error(request, output) return redirect('ietf.secr.rolodex.views.view', id=target.pk) - return render(request, 'person/merge.html', { + return render(request, 'person/merge_submit.html', { 'form': form, - 'method': method, 'change_details': change_details, 'source': source, 'target': target, 'warn_messages': warn_messages, }) + + +@role_required("Secretariat") +def send_merge_request(request): + if request.method == 'GET': + merge_form = MergeForm(request.GET) + if merge_form.is_valid(): + source = merge_form.cleaned_data['source'] + target = merge_form.cleaned_data['target'] + to = [] + if source.email(): + to.append(source.email().address) + if target.email(): + to.append(target.email().address) + if source.user: + source_account = source.user.username + else: + source_account = source.email() + if target.user: + target_account = target.user.username + else: + target_account = target.email() + sender_name = request.user.person.name + subject = 'Action requested: Merging possible duplicate IETF Datatracker accounts' + context = { + 'source_account': source_account, + 'target_account': target_account, + 'sender_name': sender_name, + } + body = render_to_string('person/merge_request_email.txt', context) + initial = { + 'to': ', '.join(to), + 'frm': settings.DEFAULT_FROM_EMAIL, + 'reply_to': 'support@ietf.org', + 'subject': subject, + 'body': body, + 'by': request.user.person.pk, + } + form = MergeRequestForm(initial=initial) + else: + messages.error(request, "Error requesting merge email: " + merge_form.errors.as_text()) + return redirect("ietf.person.views.merge") + + if request.method == 'POST': + form = MergeRequestForm(request.POST) + if form.is_valid(): + extra = {"Reply-To": form.cleaned_data.get("reply_to")} + send_mail_text( + request, + form.cleaned_data.get("to"), + form.cleaned_data.get("frm"), + form.cleaned_data.get("subject"), + form.cleaned_data.get("body"), + extra=extra, + ) + + messages.success(request, "The merge confirmation email was sent.") + return redirect("ietf.person.views.merge") + + return render(request, "person/send_merge_request.html", { + "form": form, + }) diff --git a/ietf/review/mailarch.py b/ietf/review/mailarch.py index c34a6079ce..61abc83aa5 100644 --- a/ietf/review/mailarch.py +++ b/ietf/review/mailarch.py @@ -6,25 +6,18 @@ # mailarchive.ietf.org import base64 -import contextlib import datetime import email.utils import hashlib -import mailbox -import tarfile -import tempfile - -from urllib.parse import urlencode -from urllib.request import urlopen +import requests import debug # pyflakes:ignore -from pyquery import PyQuery from django.conf import settings from django.utils.encoding import force_bytes, force_str -from ietf.utils.mail import get_payload_text +from ietf.utils.log import log from ietf.utils.timezone import date_today @@ -43,7 +36,7 @@ def hash_list_message_id(list_name, msgid): sha.update(force_bytes(list_name)) return force_str(base64.urlsafe_b64encode(sha.digest()).rstrip(b"=")) -def construct_query_urls(doc, team, query=None): +def construct_query_data(doc, team, query=None): list_name = list_name_from_email(team.list_email) if not list_name: return None @@ -51,83 +44,48 @@ def construct_query_urls(doc, team, query=None): if not query: query = doc.name - encoded_query = "?" + urlencode({ - "qdr": "c", # custom time frame - "start_date": (date_today() - datetime.timedelta(days=180)).isoformat(), - "email_list": list_name, - "q": "subject:({})".format(query), - "as": "1", # this is an advanced search - }) - - return { - "query": query, - "query_url": settings.MAILING_LIST_ARCHIVE_URL + "/arch/search/" + encoded_query, - "query_data_url": settings.MAILING_LIST_ARCHIVE_URL + "/arch/export/mbox/" + encoded_query, + query_data = { + 'start_date': (date_today() - datetime.timedelta(days=180)).isoformat(), + 'email_list': list_name, + 'query_value': query, + 'query': f'subject:({query})', + 'limit': '30', } + return query_data def construct_message_url(list_name, msgid): return "{}/arch/msg/{}/{}".format(settings.MAILING_LIST_ARCHIVE_URL, list_name, hash_list_message_id(list_name, msgid)) -def retrieve_messages_from_mbox(mbox_fileobj): - """Return selected content in message from mbox from mailarch.""" - res = [] - with tempfile.NamedTemporaryFile(suffix=".mbox") as mbox_file: - # mailbox.mbox needs a path, so we need to put the contents - # into a file - mbox_data = mbox_fileobj.read() - mbox_file.write(mbox_data) - mbox_file.flush() - - mbox = mailbox.mbox(mbox_file.name, create=False) - for msg in mbox: - content = "" - - for part in msg.walk(): - if part.get_content_type() == "text/plain": - charset = part.get_content_charset() or "utf-8" - content += get_payload_text(part, default_charset=charset) - - # parse a couple of things for the front end - utcdate = None - d = email.utils.parsedate_tz(msg["Date"]) - if d: - utcdate = datetime.datetime.fromtimestamp(email.utils.mktime_tz(d), datetime.timezone.utc) - - res.append({ - "from": msg["From"], - "splitfrom": email.utils.parseaddr(msg["From"]), - "subject": msg["Subject"], - "content": content.replace("\r\n", "\n").replace("\r", "\n").strip("\n"), - "message_id": email.utils.unquote(msg["Message-ID"].strip()), - "url": email.utils.unquote(msg["Archived-At"].strip()), - "date": msg["Date"], - "utcdate": (utcdate.date().isoformat(), utcdate.time().isoformat()) if utcdate else ("", ""), - }) - mbox.close() - - return res - -def retrieve_messages(query_data_url): +def retrieve_messages(query_data): """Retrieve and return selected content from mailarch.""" - res = [] - - # This has not been rewritten to use requests.get() because get() does - # not handle file URLs out of the box, which we need for tesing - with contextlib.closing(urlopen(query_data_url, timeout=15)) as fileobj: - content_type = fileobj.info()["Content-type"] - if not content_type.startswith("application/x-tar"): - if content_type.startswith("text/html"): - r = fileobj.read(20000) - q = PyQuery(r) - div = q('div[class~="no-results"]') - if div: - raise KeyError("No results: %s -> %s" % (query_data_url, div.text(), )) - raise Exception("Export failed - this usually means no matches were found") - - with tarfile.open(fileobj=fileobj, mode='r|*') as tar: - for entry in tar: - if entry.isfile(): - mbox_fileobj = tar.extractfile(entry) - res.extend(retrieve_messages_from_mbox(mbox_fileobj)) - - return res + + headers = {'X-Api-Key': settings.MAILING_LIST_ARCHIVE_API_KEY} + try: + response = requests.post( + settings.MAILING_LIST_ARCHIVE_SEARCH_URL, + headers=headers, + json=query_data, + timeout=settings.DEFAULT_REQUESTS_TIMEOUT) + except requests.Timeout as exc: + log(f'POST request failed for [{query_data["url"]}]: {exc}') + raise RuntimeError(f'Timeout retrieving [{query_data["url"]}]') from exc + + results = [] + jresponse = response.json() + if 'results' not in jresponse or len(jresponse['results']) == 0: + raise KeyError(f'No results: {query_data["query"]}') + for msg in jresponse['results']: + # datetime is already UTC + dt = datetime.datetime.fromisoformat(msg['date']) + dt_utc = dt.replace(tzinfo=datetime.timezone.utc) + results.append({ + "from": msg["from"], + "splitfrom": email.utils.parseaddr(msg["from"]), + "subject": msg["subject"], + "content": msg["content"].replace("\r\n", "\n").replace("\r", "\n").strip("\n"), + "message_id": msg["message_id"], + "url": msg["url"], + "utcdate": (dt_utc.date().isoformat(), dt_utc.time().isoformat()), + }) + + return results diff --git a/ietf/review/policies.py b/ietf/review/policies.py index 2b97fda146..91398a1b24 100644 --- a/ietf/review/policies.py +++ b/ietf/review/policies.py @@ -7,7 +7,7 @@ from django.utils import timezone from simple_history.utils import bulk_update_with_history -from ietf.doc.models import DocumentAuthor, DocAlias +from ietf.doc.models import DocumentAuthor from ietf.doc.utils import extract_complete_replaces_ancestor_mapping_for_docs from ietf.group.models import Role from ietf.name.models import ReviewAssignmentStateName @@ -131,12 +131,15 @@ def _update_skip_next(self, rotation_pks, assignee_person): assignee_index = rotation_pks.index(assignee_person.pk) skipped = rotation_pks[0:assignee_index] skipped_settings = self.team.reviewersettings_set.filter(person__in=skipped) # list of PKs is valid here + changed = [] for ss in skipped_settings: - ss.skip_next = max(0, ss.skip_next - 1) # ensure we don't go negative - bulk_update_with_history(skipped_settings, + if ss.skip_next > 0: + ss.skip_next = max(0, ss.skip_next - 1) # ensure we don't go negative + ss._change_reason = "Skip count decremented" + changed.append(ss) + bulk_update_with_history(changed, ReviewerSettings, - ['skip_next'], - default_change_reason='skipped') + ['skip_next']) def _assignment_in_order(self, rotation_pks, assignee_person): """Is this an in-order assignment?""" @@ -262,12 +265,15 @@ def _filter_unavailable_reviewers(self, reviewers, review_req=None): def _clear_request_next_assignment(self, person): s = self._reviewer_settings_for(person) - s.request_assignment_next = False - s.save() + if s.request_assignment_next: + s.request_assignment_next = False + s._change_reason = "Clearing request next assignment" + s.save() def _add_skip(self, person): s = self._reviewer_settings_for(person) s.skip_next += 1 + s._change_reason = "Incrementing skip count" s.save() def _reviewer_settings_for(self, person): @@ -293,8 +299,6 @@ def __init__(self, email_queryset, review_req, rotation_list): def _collect_context(self): """Collect all relevant data about this team, document and review request.""" - self.doc_aliases = DocAlias.objects.filter(docs=self.doc).values_list("name", flat=True) - # This data is collected as a dict, keys being person IDs, values being numbers/objects. self.rotation_index = {p.pk: i for i, p in enumerate(self.rotation_list)} self.reviewer_settings = self._reviewer_settings_for_person_ids(self.possible_person_ids) @@ -360,8 +364,7 @@ def format_period(p): add_boolean_score(+1, email.person_id in self.wish_to_review, "wishes to review document") add_boolean_score(-1, email.person_id in self.connections, self.connections.get(email.person_id)) # reviewer is somehow connected: bad - add_boolean_score(-1, settings.filter_re and any( - re.search(settings.filter_re, n) for n in self.doc_aliases), "filter regexp matches") + add_boolean_score(-1, settings.filter_re and re.search(settings.filter_re, self.doc.name), "filter regexp matches") # minimum interval between reviews days_needed = self.days_needed_for_reviewers.get(email.person_id, 0) @@ -487,6 +490,7 @@ def set_wants_to_be_next(self, reviewer_person): # Instead, the "assign me next" flag is set. settings = self._reviewer_settings_for(reviewer_person) settings.request_assignment_next = True + settings._change_reason = "Setting request next assignment" settings.save() def _update_skip_next(self, rotation_pks, assignee_person): @@ -526,20 +530,22 @@ def _update_skip_next(self, rotation_pks, assignee_person): min_skip_next = min([rs.skip_next for rs in rotation_settings.values()]) next_reviewer_index = None + changed = [] for index, pk in enumerate(unfolded_rotation_pks): rs = rotation_settings.get(pk) if (rs is None) or (rs.skip_next == min_skip_next): next_reviewer_index = index break else: - rs.skip_next = max(0, rs.skip_next - 1) # ensure never negative + if rs.skip_next > 0: + rs.skip_next = max(0, rs.skip_next - 1) # ensure never negative + rs._change_reason = "Skip count decremented" + changed.append(rs) log.assertion('next_reviewer_index is not None') # some entry in the list must have the minimum value - - bulk_update_with_history(rotation_settings.values(), + bulk_update_with_history(changed, ReviewerSettings, - ['skip_next'], - default_change_reason='skipped') + ['skip_next']) next_reviewer_pk = unfolded_rotation_pks[next_reviewer_index] NextReviewerInTeam.objects.update_or_create( @@ -581,6 +587,7 @@ def set_wants_to_be_next(self, reviewer_person): # who rejected a review and no further action is needed. settings = self._reviewer_settings_for(reviewer_person) settings.request_assignment_next = True + settings._change_reason = "Setting request next assignment" settings.save() diff --git a/ietf/review/tasks.py b/ietf/review/tasks.py new file mode 100644 index 0000000000..5d8afa6943 --- /dev/null +++ b/ietf/review/tasks.py @@ -0,0 +1,43 @@ +# Copyright The IETF Trust 2024, All Rights Reserved +# +# Celery task definitions +# +from celery import shared_task + +from ietf.review.utils import ( + review_assignments_needing_reviewer_reminder, email_reviewer_reminder, + review_assignments_needing_secretary_reminder, email_secretary_reminder, + send_unavailability_period_ending_reminder, send_reminder_all_open_reviews, + send_review_reminder_overdue_assignment, send_reminder_unconfirmed_assignments) +from ietf.utils.log import log +from ietf.utils.timezone import date_today, DEADLINE_TZINFO + + +@shared_task +def send_review_reminders_task(): + today = date_today(DEADLINE_TZINFO) + + for assignment in review_assignments_needing_reviewer_reminder(today): + email_reviewer_reminder(assignment) + log("Emailed reminder to {} for review of {} in {} (req. id {})".format(assignment.reviewer.address, assignment.review_request.doc_id, assignment.review_request.team.acronym, assignment.review_request.pk)) + + for assignment, secretary_role in review_assignments_needing_secretary_reminder(today): + email_secretary_reminder(assignment, secretary_role) + review_req = assignment.review_request + log("Emailed reminder to {} for review of {} in {} (req. id {})".format(secretary_role.email.address, review_req.doc_id, review_req.team.acronym, review_req.pk)) + + period_end_reminders_sent = send_unavailability_period_ending_reminder(today) + for msg in period_end_reminders_sent: + log(msg) + + overdue_reviews_reminders_sent = send_review_reminder_overdue_assignment(today) + for msg in overdue_reviews_reminders_sent: + log(msg) + + open_reviews_reminders_sent = send_reminder_all_open_reviews(today) + for msg in open_reviews_reminders_sent: + log(msg) + + unconfirmed_assignment_reminders_sent = send_reminder_unconfirmed_assignments(today) + for msg in unconfirmed_assignment_reminders_sent: + log(msg) diff --git a/ietf/review/tests.py b/ietf/review/tests.py index f9d55d9d14..5dc8f11e8e 100644 --- a/ietf/review/tests.py +++ b/ietf/review/tests.py @@ -1,9 +1,11 @@ # Copyright The IETF Trust 2019-2020, All Rights Reserved # -*- coding: utf-8 -*- import datetime +from unittest import mock import debug # pyflakes:ignore from pyquery import PyQuery + from ietf.group.factories import RoleFactory from ietf.doc.factories import WgDraftFactory from ietf.utils.mail import empty_outbox, get_payload_text, outbox @@ -13,6 +15,7 @@ from .factories import ReviewAssignmentFactory, ReviewRequestFactory, ReviewerSettingsFactory from .mailarch import hash_list_message_id from .models import ReviewerSettings, ReviewSecretarySettings, ReviewTeamSettings, UnavailablePeriod +from .tasks import send_review_reminders_task from .utils import (email_secretary_reminder, review_assignments_needing_secretary_reminder, email_reviewer_reminder, review_assignments_needing_reviewer_reminder, send_reminder_unconfirmed_assignments, send_review_reminder_overdue_assignment, @@ -550,3 +553,66 @@ def test_review_add_comment(self): # But can't have the comment we are goint to add. self.assertContains(r, 'This is a test.') + +class TaskTests(TestCase): + # hyaaa it's mockzilla + @mock.patch("ietf.review.tasks.date_today") + @mock.patch("ietf.review.tasks.review_assignments_needing_reviewer_reminder") + @mock.patch("ietf.review.tasks.email_reviewer_reminder") + @mock.patch("ietf.review.tasks.review_assignments_needing_secretary_reminder") + @mock.patch("ietf.review.tasks.email_secretary_reminder") + @mock.patch("ietf.review.tasks.send_unavailability_period_ending_reminder") + @mock.patch("ietf.review.tasks.send_reminder_all_open_reviews") + @mock.patch("ietf.review.tasks.send_review_reminder_overdue_assignment") + @mock.patch("ietf.review.tasks.send_reminder_unconfirmed_assignments") + def test_send_review_reminders_task( + self, + mock_send_reminder_unconfirmed_assignments, + mock_send_review_reminder_overdue_assignment, + mock_send_reminder_all_open_reviews, + mock_send_unavailability_period_ending_reminder, + mock_email_secretary_reminder, + mock_review_assignments_needing_secretary_reminder, + mock_email_reviewer_reminder, + mock_review_assignments_needing_reviewer_reminder, + mock_date_today, + ): + """Test that send_review_reminders calls functions correctly + + Does not test individual methods, just that they are called as expected. + """ + mock_today = object() + assignment = ReviewAssignmentFactory() + secretary_role = RoleFactory(name_id="secr") + + mock_date_today.return_value = mock_today + mock_review_assignments_needing_reviewer_reminder.return_value = [assignment] + mock_review_assignments_needing_secretary_reminder.return_value = [[assignment, secretary_role]] + mock_send_unavailability_period_ending_reminder.return_value = ["pretending I sent a period end reminder"] + mock_send_review_reminder_overdue_assignment.return_value = ["pretending I sent an overdue reminder"] + mock_send_reminder_all_open_reviews.return_value = ["pretending I sent an open review reminder"] + mock_send_reminder_unconfirmed_assignments.return_value = ["pretending I sent an unconfirmed reminder"] + + send_review_reminders_task() + + self.assertEqual(mock_review_assignments_needing_reviewer_reminder.call_count, 1) + self.assertEqual(mock_review_assignments_needing_reviewer_reminder.call_args[0], (mock_today,)) + self.assertEqual(mock_email_reviewer_reminder.call_count, 1) + self.assertEqual(mock_email_reviewer_reminder.call_args[0], (assignment,)) + + self.assertEqual(mock_review_assignments_needing_secretary_reminder.call_count, 1) + self.assertEqual(mock_review_assignments_needing_secretary_reminder.call_args[0], (mock_today,)) + self.assertEqual(mock_email_secretary_reminder.call_count, 1) + self.assertEqual(mock_email_secretary_reminder.call_args[0], (assignment, secretary_role)) + + self.assertEqual(mock_send_unavailability_period_ending_reminder.call_count, 1) + self.assertEqual(mock_send_unavailability_period_ending_reminder.call_args[0], (mock_today,)) + + self.assertEqual(mock_send_review_reminder_overdue_assignment.call_count, 1) + self.assertEqual(mock_send_review_reminder_overdue_assignment.call_args[0], (mock_today,)) + + self.assertEqual(mock_send_reminder_all_open_reviews.call_count, 1) + self.assertEqual(mock_send_reminder_all_open_reviews.call_args[0], (mock_today,)) + + self.assertEqual(mock_send_reminder_unconfirmed_assignments.call_count, 1) + self.assertEqual(mock_send_reminder_unconfirmed_assignments.call_args[0], (mock_today,)) diff --git a/ietf/review/utils.py b/ietf/review/utils.py index a91bcbd62f..61494738d3 100644 --- a/ietf/review/utils.py +++ b/ietf/review/utils.py @@ -1,4 +1,4 @@ -# Copyright The IETF Trust 2016-2020, All Rights Reserved +# Copyright The IETF Trust 2016-2023, All Rights Reserved # -*- coding: utf-8 -*- @@ -50,6 +50,8 @@ def can_request_review_of_doc(user, doc): if not user.is_authenticated: return False + # This is in a strange place as it has nothing to do with the user + # but this utility is used in too many places to move this quickly. if doc.type_id == 'draft' and doc.get_state_slug() != 'active': return False @@ -390,7 +392,9 @@ def assign_review_request_to_reviewer(request, review_req, reviewer, add_skip=Fa # cannot reference reviewassignment_set relation until pk exists if review_req.pk is not None: reviewassignment_set = review_req.reviewassignment_set.filter(reviewer=reviewer) - if reviewassignment_set.exists() and not reviewassignment_set.filter(state_id='rejected').exists(): + if (reviewassignment_set.exists() and not + (reviewassignment_set.filter(state_id='rejected').exists() or + reviewassignment_set.filter(state_id='withdrawn').exists())): return # Note that assigning a review no longer unassigns other reviews diff --git a/ietf/secr/announcement/forms.py b/ietf/secr/announcement/forms.py index 3aacbfe622..91004ea270 100644 --- a/ietf/secr/announcement/forms.py +++ b/ietf/secr/announcement/forms.py @@ -14,93 +14,133 @@ # Globals # --------------------------------------------- -TO_LIST = ('IETF Announcement List ', - 'I-D Announcement List ', - 'RFP Announcement List ', - 'The IESG ', - 'Working Group Chairs ', - 'BOF Chairs ', - 'Other...') +TO_LIST = ( + "IETF Announcement List ", + "I-D Announcement List ", + "RFP Announcement List ", + "The IESG ", + "Working Group Chairs ", + "BOF Chairs ", + "Other...", +) # --------------------------------------------- # Helper Functions # --------------------------------------------- + def get_from_choices(user): - ''' + """ This function returns a choices tuple containing all the Announced From choices. Including leadership chairs and other entities. - ''' + """ addresses = [] - if has_role(user,'Secretariat'): - addresses = AnnouncementFrom.objects.values_list('address', flat=True).order_by('address').distinct() + if has_role(user, "Secretariat"): + addresses = ( + AnnouncementFrom.objects.values_list("address", flat=True) + .order_by("address") + .distinct() + ) else: for role in user.person.role_set.all(): - addresses.extend(AnnouncementFrom.objects.filter(name=role.name, group=role.group).values_list('address', flat=True).order_by('address')) + addresses.extend( + AnnouncementFrom.objects.filter(name=role.name, group=role.group) + .values_list("address", flat=True) + .order_by("address") + ) nomcom_choices = get_nomcom_choices(user) if nomcom_choices: addresses = list(addresses) + nomcom_choices - - return list(zip(addresses, addresses)) + + choices = list(zip(addresses, addresses)) + if len(choices) > 1: + choices.insert(0, ("", "(Choose an option)")) + return choices def get_nomcom_choices(user): - ''' + """ Returns the list of nomcom email addresses for given user - ''' - nomcoms = Role.objects.filter(name="chair", - group__acronym__startswith="nomcom", - group__state="active", - group__type="nomcom", - person=user.person) + """ + nomcoms = Role.objects.filter( + name="chair", + group__acronym__startswith="nomcom", + group__state="active", + group__type="nomcom", + person=user.person, + ) addresses = [] for nomcom in nomcoms: year = nomcom.group.acronym[-4:] - addresses.append('NomCom Chair %s ' % (year,year)) + addresses.append("NomCom Chair %s " % (year, year)) return addresses - + def get_to_choices(): - return list(zip(TO_LIST,TO_LIST)) + return list(zip(TO_LIST, TO_LIST)) # --------------------------------------------- # Forms # --------------------------------------------- + class AnnounceForm(forms.ModelForm): - nomcom = forms.ModelChoiceField(queryset=Group.objects.filter(acronym__startswith='nomcom',type='nomcom',state='active'),required=False) + nomcom = forms.ModelChoiceField( + queryset=Group.objects.filter( + acronym__startswith="nomcom", type="nomcom", state="active" + ), + required=False, + ) to_custom = MultiEmailField(required=False) class Meta: model = Message - fields = ('nomcom', 'to','to_custom','frm','cc','bcc','reply_to','subject','body') + fields = ( + "nomcom", + "to", + "to_custom", + "frm", + "cc", + "bcc", + "reply_to", + "subject", + "body", + ) + labels = {"frm": "From"} + help_texts = { + "to": "Select name OR select Other... and enter email below", + "cc": "Use comma separated lists for emails (Cc, Bcc, Reply To)", + } def __init__(self, *args, **kwargs): - if 'hidden' in kwargs: - self.hidden = kwargs.pop('hidden') + if "hidden" in kwargs: + self.hidden = kwargs.pop("hidden") else: self.hidden = False - user = kwargs.pop('user') + user = kwargs.pop("user") person = user.person super(AnnounceForm, self).__init__(*args, **kwargs) - self.fields['to'].widget = forms.Select(choices=get_to_choices()) - self.fields['to'].help_text = 'Select name OR select Other... and enter email below' - self.fields['cc'].help_text = 'Use comma separated lists for emails (Cc, Bcc, Reply To)' - self.fields['frm'].widget = forms.Select(choices=get_from_choices(user)) - self.fields['frm'].label = 'From' - self.fields['reply_to'].required = True - self.fields['nomcom'].label = 'NomCom message:' - nomcom_roles = person.role_set.filter(group__in=self.fields['nomcom'].queryset,name='chair') - secr_roles = person.role_set.filter(group__acronym='secretariat',name='secr') + self.fields["to"].widget = forms.Select(choices=get_to_choices()) + self.fields["frm"].widget = forms.Select(choices=get_from_choices(user)) + self.fields["reply_to"].required = True + # nomcom field is defined declaratively so label and help_text must be set here + self.fields["nomcom"].label = "NomCom message:" + self.fields["nomcom"].help_text = ( + "If this is a NomCom announcement specify which NomCom group here" + ) + nomcom_roles = person.role_set.filter( + group__in=self.fields["nomcom"].queryset, name="chair" + ) + secr_roles = person.role_set.filter(group__acronym="secretariat", name="secr") if nomcom_roles: - self.initial['nomcom'] = nomcom_roles[0].group.pk + self.initial["nomcom"] = nomcom_roles[0].group.pk if not nomcom_roles and not secr_roles: - self.fields['nomcom'].widget = forms.HiddenInput() - + self.fields["nomcom"].widget = forms.HiddenInput() + if self.hidden: for key in list(self.fields.keys()): self.fields[key].widget = forms.HiddenInput() @@ -110,25 +150,29 @@ def clean(self): data = self.cleaned_data if self.errors: return self.cleaned_data - if data['to'] == 'Other...' and not data['to_custom']: + if data["to"] == "Other..." and not data["to_custom"]: raise forms.ValidationError('You must enter a "To" email address') - for k in ['to', 'frm', 'cc',]: + for k in [ + "to", + "frm", + "cc", + ]: data[k] = unescape(data[k]) return data def save(self, *args, **kwargs): - user = kwargs.pop('user') + user = kwargs.pop("user") message = super(AnnounceForm, self).save(commit=False) message.by = user.person - if self.cleaned_data['to'] == 'Other...': - message.to = self.cleaned_data['to_custom'] - if kwargs['commit']: + if self.cleaned_data["to"] == "Other...": + message.to = self.cleaned_data["to_custom"] + if kwargs["commit"]: message.save() # handle nomcom message - nomcom = self.cleaned_data.get('nomcom',False) + nomcom = self.cleaned_data.get("nomcom", False) if nomcom: message.related_groups.add(nomcom) - return message \ No newline at end of file + return message diff --git a/ietf/secr/announcement/tests.py b/ietf/secr/announcement/tests.py index c50e997f97..f08e824397 100644 --- a/ietf/secr/announcement/tests.py +++ b/ietf/secr/announcement/tests.py @@ -6,7 +6,7 @@ from django.urls import reverse -import debug # pyflakes:ignore +import debug # pyflakes:ignore from ietf.utils.test_utils import TestCase from ietf.group.factories import RoleFactory @@ -17,97 +17,102 @@ from ietf.message.models import AnnouncementFrom from ietf.utils.mail import outbox, empty_outbox -SECR_USER='secretary' -WG_USER='' -AD_USER='' +SECR_USER = "secretary" +WG_USER = "" +AD_USER = "" + class SecrAnnouncementTestCase(TestCase): def setUp(self): super().setUp() - chair = RoleName.objects.get(slug='chair') - secr = RoleName.objects.get(slug='secr') - ietf = Group.objects.get(acronym='ietf') - iab = Group.objects.get(acronym='iab') - secretariat = Group.objects.get(acronym='secretariat') - AnnouncementFrom.objects.create(name=secr,group=secretariat,address='IETF Secretariat ') - AnnouncementFrom.objects.create(name=chair,group=ietf,address='IETF Chair ') - AnnouncementFrom.objects.create(name=chair,group=iab,address='IAB Chair ') + chair = RoleName.objects.get(slug="chair") + secr = RoleName.objects.get(slug="secr") + ietf = Group.objects.get(acronym="ietf") + iab = Group.objects.get(acronym="iab") + secretariat = Group.objects.get(acronym="secretariat") + AnnouncementFrom.objects.create( + name=secr, + group=secretariat, + address="IETF Secretariat ", + ) + AnnouncementFrom.objects.create( + name=chair, group=ietf, address="IETF Chair " + ) + AnnouncementFrom.objects.create( + name=chair, group=iab, address="IAB Chair " + ) def test_main(self): "Main Test" - url = reverse('ietf.secr.announcement.views.main') + url = reverse("ietf.secr.announcement.views.main") self.client.login(username="secretary", password="secretary+password") r = self.client.get(url) self.assertEqual(r.status_code, 200) - + def test_main_announce_from(self): - url = reverse('ietf.secr.announcement.views.main') + url = reverse("ietf.secr.announcement.views.main") # Secretariat self.client.login(username="secretary", password="secretary+password") r = self.client.get(url) self.assertEqual(r.status_code, 200) q = PyQuery(r.content) - self.assertEqual(len(q('#id_frm option')),3) + self.assertEqual(len(q("#id_frm option")), 4) # IAB Chair self.client.login(username="iab-chair", password="iab-chair+password") r = self.client.get(url) self.assertEqual(r.status_code, 200) q = PyQuery(r.content) - self.assertEqual(len(q('#id_frm option')),1) - self.assertTrue('' in q('#id_frm option').val()) + self.assertEqual(len(q("#id_frm option")), 1) + self.assertTrue("" in q("#id_frm option").val()) # IETF Chair self.client.login(username="ietf-chair", password="ietf-chair+password") r = self.client.get(url) self.assertEqual(r.status_code, 200) q = PyQuery(r.content) - self.assertEqual(len(q('#id_frm option')),1) - self.assertTrue('' in q('#id_frm option').val()) + self.assertEqual(len(q("#id_frm option")), 1) + self.assertTrue("" in q("#id_frm option").val()) + class UnauthorizedAnnouncementCase(TestCase): def test_unauthorized(self): "Unauthorized Test" - url = reverse('ietf.secr.announcement.views.main') - person = RoleFactory(name_id='chair',group__acronym='mars').person - self.client.login(username=person.user.username, password=person.user.username+"+password") + url = reverse("ietf.secr.announcement.views.main") + person = RoleFactory(name_id="chair", group__acronym="mars").person + self.client.login( + username=person.user.username, password=person.user.username + "+password" + ) r = self.client.get(url) self.assertEqual(r.status_code, 403) - + + class SubmitAnnouncementCase(TestCase): - def test_invalid_submit(self): - "Invalid Submit" - url = reverse('ietf.secr.announcement.views.main') - post_data = {'id_subject':''} - self.client.login(username="secretary", password="secretary+password") - r = self.client.post(url,post_data) - self.assertEqual(r.status_code, 200) - q = PyQuery(r.content) - self.assertTrue(len(q('form ul.errorlist')) > 0) - def test_valid_submit(self): "Valid Submit" nomcom_test_data() empty_outbox() - url = reverse('ietf.secr.announcement.views.main') - confirm_url = reverse('ietf.secr.announcement.views.confirm') - nomcom = Group.objects.get(type='nomcom') - post_data = {'nomcom': nomcom.pk, - 'to':'Other...', - 'to_custom':'rcross@amsl.com', - 'frm':'IETF Secretariat <ietf-secretariat@ietf.org>', - 'reply_to':'secretariat@ietf.org', - 'subject':'Test Subject', - 'body':'This is a test.'} + url = reverse("ietf.secr.announcement.views.main") + confirm_url = reverse("ietf.secr.announcement.views.confirm") + nomcom = Group.objects.get(type="nomcom") + post_data = { + "nomcom": nomcom.pk, + "to": "Other...", + "to_custom": "phil@example.com", + "frm": "IETF Secretariat <ietf-secretariat@ietf.org>", + "reply_to": "secretariat@ietf.org", + "subject": "Test Subject", + "body": "This is a test.", + } self.client.login(username="secretary", password="secretary+password") - response = self.client.post(url,post_data) - self.assertContains(response, 'Confirm Announcement') - response = self.client.post(confirm_url,post_data,follow=True) + response = self.client.post(url, post_data) + self.assertContains(response, "Confirm Announcement") + response = self.client.post(confirm_url, post_data, follow=True) self.assertRedirects(response, url) - self.assertEqual(len(outbox),1) - self.assertEqual(outbox[0]['subject'],'Test Subject') - self.assertEqual(outbox[0]['to'],'') - message = Message.objects.filter(by__user__username='secretary').last() - self.assertEqual(message.subject,'Test Subject') + self.assertEqual(len(outbox), 1) + self.assertEqual(outbox[0]["subject"], "Test Subject") + self.assertEqual(outbox[0]["to"], "") + message = Message.objects.filter(by__user__username="secretary").last() + self.assertEqual(message.subject, "Test Subject") self.assertTrue(nomcom in message.related_groups.all()) diff --git a/ietf/secr/announcement/urls.py b/ietf/secr/announcement/urls.py index 3c3c05a09c..dc534f64ae 100644 --- a/ietf/secr/announcement/urls.py +++ b/ietf/secr/announcement/urls.py @@ -1,8 +1,7 @@ - from ietf.secr.announcement import views from ietf.utils.urls import url urlpatterns = [ - url(r'^$', views.main), - url(r'^confirm/$', views.confirm), + url(r"^$", views.main), + url(r"^confirm/$", views.confirm), ] diff --git a/ietf/secr/announcement/views.py b/ietf/secr/announcement/views.py index 42de089c59..5617ae9e6f 100644 --- a/ietf/secr/announcement/views.py +++ b/ietf/secr/announcement/views.py @@ -18,86 +18,93 @@ # Helper Functions # ------------------------------------------------- def check_access(user): - ''' + """ This function takes a Django User object and returns true if the user has access to the Announcement app. - ''' + """ if hasattr(user, "person"): person = user.person if has_role(user, "Secretariat"): return True - + for role in person.role_set.all(): - if AnnouncementFrom.objects.filter(name=role.name,group=role.group): + if AnnouncementFrom.objects.filter(name=role.name, group=role.group): return True - if Role.objects.filter(name="chair", - group__acronym__startswith="nomcom", - group__state="active", - group__type="nomcom", - person=person): + if Role.objects.filter( + name="chair", + group__acronym__startswith="nomcom", + group__state="active", + group__type="nomcom", + person=person, + ): return True return False + # -------------------------------------------------- # STANDARD VIEW FUNCTIONS # -------------------------------------------------- # this seems to cause some kind of circular problem # @check_for_cancel(reverse('home')) @login_required -@check_for_cancel('../') +@check_for_cancel("../") def main(request): - ''' + """ Main view for Announcement tool. Authrozied users can fill out email details: header, body, etc and send. - ''' + """ if not check_access(request.user): - permission_denied(request, 'Restricted to: Secretariat, IAD, or chair of IETF, IAB, RSOC, RSE, IAOC, ISOC, NomCom.') + permission_denied( + request, + "Restricted to: Secretariat, IAD, or chair of IETF, IAB, RSOC, RSE, IAOC, ISOC, NomCom.", + ) - form = AnnounceForm(request.POST or None,user=request.user) + form = AnnounceForm(request.POST or None, user=request.user) if form.is_valid(): # recast as hidden form for next page of process form = AnnounceForm(request.POST, user=request.user, hidden=True) - if form.data['to'] == 'Other...': - to = form.data['to_custom'] + if form.data["to"] == "Other...": + to = form.data["to_custom"] else: - to = form.data['to'] + to = form.data["to"] - return render(request, 'announcement/confirm.html', { - 'message': form.data, - 'to': to, - 'form': form}, + return render( + request, + "announcement/confirm.html", + {"message": form.data, "to": to, "form": form}, ) - return render(request, 'announcement/main.html', { 'form': form} ) + return render(request, "announcement/index.html", {"form": form}) + @login_required -@check_for_cancel('../') +@check_for_cancel("../") def confirm(request): if not check_access(request.user): - permission_denied(request, 'Restricted to: Secretariat, IAD, or chair of IETF, IAB, RSOC, RSE, IAOC, ISOC, NomCom.') + permission_denied( + request, + "Restricted to: Secretariat, IAD, or chair of IETF, IAB, RSOC, RSE, IAOC, ISOC, NomCom.", + ) - if request.method == 'POST': + if request.method == "POST": form = AnnounceForm(request.POST, user=request.user) - if request.method == 'POST': - message = form.save(user=request.user,commit=True) - extra = {'Reply-To': message.get('reply_to') } - send_mail_text(None, - message.to, - message.frm, - message.subject, - message.body, - cc=message.cc, - bcc=message.bcc, - extra=extra, - ) - - messages.success(request, 'The announcement was sent.') - return redirect('ietf.secr.announcement.views.main') - - - - + if request.method == "POST": + message = form.save(user=request.user, commit=True) + extra = {"Reply-To": message.get("reply_to")} + send_mail_text( + None, + message.to, + message.frm, + message.subject, + message.body, + cc=message.cc, + bcc=message.bcc, + extra=extra, + ) + + messages.success(request, "The announcement was sent.") + return redirect("ietf.secr.announcement.views.main") diff --git a/ietf/secr/meetings/tests.py b/ietf/secr/meetings/tests.py index 0e51ff8ca2..08c792ce1e 100644 --- a/ietf/secr/meetings/tests.py +++ b/ietf/secr/meetings/tests.py @@ -3,14 +3,11 @@ import datetime -import os -import shutil from pyquery import PyQuery import debug # pyflakes:ignore -from django.conf import settings from django.urls import reverse from django.utils import timezone @@ -27,24 +24,6 @@ class SecrMeetingTestCase(TestCase): settings_temp_path_overrides = TestCase.settings_temp_path_overrides + ['AGENDA_PATH'] - def setUp(self): - super().setUp() - self.bluesheet_dir = self.tempdir('bluesheet') - self.bluesheet_path = os.path.join(self.bluesheet_dir,'blue_sheet.rtf') - self.saved_secr_blue_sheet_path = settings.SECR_BLUE_SHEET_PATH - settings.SECR_BLUE_SHEET_PATH = self.bluesheet_path - - # n.b., the bluesheet upload relies on SECR_PROCEEDINGS_DIR being the same - # as AGENDA_PATH. This is probably a bug, but may not be worth fixing if - # the secr app is on the way out. - self.saved_secr_proceedings_dir = settings.SECR_PROCEEDINGS_DIR - settings.SECR_PROCEEDINGS_DIR = settings.AGENDA_PATH - - def tearDown(self): - settings.SECR_PROCEEDINGS_DIR = self.saved_secr_proceedings_dir - settings.SECR_BLUE_SHEET_PATH = self.saved_secr_blue_sheet_path - shutil.rmtree(self.bluesheet_dir) - super().tearDown() def test_main(self): "Main Test" @@ -103,6 +82,10 @@ def test_add_meeting(self): [cn.slug for cn in new_meeting.group_conflict_types.all()], post_data['group_conflict_types'], ) + self.assertEqual( + new_meeting.session_request_lock_message, + "Session requests for this meeting have not yet opened.", + ) def test_add_meeting_default_conflict_types(self): """Add meeting should default to same conflict types as previous meeting""" @@ -416,4 +399,4 @@ def test_get_times(self): times = get_times(meeting,day) values = [ x[0] for x in times ] self.assertTrue(times) - self.assertTrue(timeslot.time.strftime('%H%M') in values) \ No newline at end of file + self.assertTrue(timeslot.time.strftime('%H%M') in values) diff --git a/ietf/secr/meetings/views.py b/ietf/secr/meetings/views.py index 8afcf5a11e..1f6f2f3297 100644 --- a/ietf/secr/meetings/views.py +++ b/ietf/secr/meetings/views.py @@ -1,4 +1,4 @@ -# Copyright The IETF Trust 2007-2023, All Rights Reserved +# Copyright The IETF Trust 2007-2025, All Rights Reserved # -*- coding: utf-8 -*- import datetime @@ -17,15 +17,15 @@ from ietf.ietfauth.utils import role_required from ietf.utils.mail import send_mail from ietf.meeting.forms import duration_string -from ietf.meeting.helpers import get_meeting, make_materials_directories, populate_important_dates +from ietf.meeting.helpers import make_materials_directories, populate_important_dates from ietf.meeting.models import Meeting, Session, Room, TimeSlot, SchedTimeSessAssignment, Schedule, SchedulingEvent from ietf.meeting.utils import add_event_info_to_session_qs +from ietf.meeting.views_session_request import get_initial_session from ietf.name.models import SessionStatusName from ietf.group.models import Group, GroupEvent from ietf.secr.meetings.forms import ( BaseMeetingRoomFormSet, MeetingModelForm, MeetingSelectForm, MeetingRoomForm, MiscSessionForm, TimeSlotForm, RegularSessionEditForm, MeetingRoomOptionsForm ) -from ietf.secr.sreq.views import get_initial_session from ietf.secr.utils.meeting import get_session, get_timeslot from ietf.mailtrigger.utils import gather_address_lists from ietf.utils.timezone import make_aware @@ -223,9 +223,8 @@ def add(request): ) meeting.schedule = schedule - # we want to carry session request lock status over from previous meeting - previous_meeting = get_meeting( int(meeting.number) - 1 ) - meeting.session_request_lock_message = previous_meeting.session_request_lock_message + # Create meeting with session requests locked + meeting.session_request_lock_message = "Session requests for this meeting have not yet opened." meeting.save() populate_important_dates(meeting) diff --git a/ietf/secr/rolodex/views.py b/ietf/secr/rolodex/views.py index 7dd8201f0c..9fd4a8b107 100644 --- a/ietf/secr/rolodex/views.py +++ b/ietf/secr/rolodex/views.py @@ -7,7 +7,6 @@ from ietf.ietfauth.utils import role_required from ietf.person.models import Person, Email, Alias -from ietf.person.utils import merge_users from ietf.secr.rolodex.forms import EditPersonForm, EmailForm, NameForm, NewPersonForm, SearchForm @@ -179,7 +178,6 @@ def edit(request, id): if 'user' in person_form.changed_data and person_form.initial['user']: try: source = User.objects.get(username__iexact=person_form.initial['user']) - merge_users(source, person_form.cleaned_data['user']) source.is_active = False source.save() except User.DoesNotExist: diff --git a/ietf/secr/sreq/forms.py b/ietf/secr/sreq/forms.py deleted file mode 100644 index 1100bc7c8a..0000000000 --- a/ietf/secr/sreq/forms.py +++ /dev/null @@ -1,332 +0,0 @@ -# Copyright The IETF Trust 2013-2022, All Rights Reserved -# -*- coding: utf-8 -*- - - -from django import forms -from django.template.defaultfilters import pluralize - -import debug # pyflakes:ignore - -from ietf.name.models import TimerangeName, ConstraintName -from ietf.group.models import Group -from ietf.meeting.forms import sessiondetailsformset_factory -from ietf.meeting.models import ResourceAssociation, Constraint -from ietf.person.fields import SearchablePersonsField -from ietf.person.models import Person -from ietf.utils.html import clean_text_field -from ietf.utils import log - -# ------------------------------------------------- -# Globals -# ------------------------------------------------- - -NUM_SESSION_CHOICES = (('','--Please select'),('1','1'),('2','2')) -SESSION_TIME_RELATION_CHOICES = (('', 'No preference'),) + Constraint.TIME_RELATION_CHOICES -JOINT_FOR_SESSION_CHOICES = (('1', 'First session'), ('2', 'Second session'), ('3', 'Third session'), ) - -# ------------------------------------------------- -# Helper Functions -# ------------------------------------------------- -def allowed_conflicting_groups(): - return Group.objects.filter(type__in=['wg', 'ag', 'rg', 'rag', 'program', 'edwg'], state__in=['bof', 'proposed', 'active']) - -def check_conflict(groups, source_group): - ''' - Takes a string which is a list of group acronyms. Checks that they are all active groups - ''' - # convert to python list (allow space or comma separated lists) - items = groups.replace(',',' ').split() - active_groups = allowed_conflicting_groups() - for group in items: - if group == source_group.acronym: - raise forms.ValidationError("Cannot declare a conflict with the same group: %s" % group) - - if not active_groups.filter(acronym=group): - raise forms.ValidationError("Invalid or inactive group acronym: %s" % group) - -# ------------------------------------------------- -# Forms -# ------------------------------------------------- - -class GroupSelectForm(forms.Form): - group = forms.ChoiceField() - - def __init__(self,*args,**kwargs): - choices = kwargs.pop('choices') - super(GroupSelectForm, self).__init__(*args,**kwargs) - self.fields['group'].widget.choices = choices - - -class NameModelMultipleChoiceField(forms.ModelMultipleChoiceField): - def label_from_instance(self, name): - return name.desc - - -class SessionForm(forms.Form): - num_session = forms.ChoiceField(choices=NUM_SESSION_CHOICES) - # session fields are added in __init__() - session_time_relation = forms.ChoiceField(choices=SESSION_TIME_RELATION_CHOICES, required=False) - attendees = forms.IntegerField() - # FIXME: it would cleaner to have these be - # ModelMultipleChoiceField, and just customize the widgetry, that - # way validation comes for free (applies to this CharField and the - # constraints dynamically instantiated in __init__()) - joint_with_groups = forms.CharField(max_length=255,required=False) - joint_with_groups_selector = forms.ChoiceField(choices=[], required=False) # group select widget for prev field - joint_for_session = forms.ChoiceField(choices=JOINT_FOR_SESSION_CHOICES, required=False) - comments = forms.CharField(max_length=200,required=False) - third_session = forms.BooleanField(required=False) - resources = forms.MultipleChoiceField(widget=forms.CheckboxSelectMultiple,required=False) - bethere = SearchablePersonsField(label="Must be present", required=False) - timeranges = NameModelMultipleChoiceField(widget=forms.CheckboxSelectMultiple, required=False, - queryset=TimerangeName.objects.all()) - adjacent_with_wg = forms.ChoiceField(required=False) - send_notifications = forms.BooleanField(label="Send notification emails?", required=False, initial=False) - - def __init__(self, group, meeting, data=None, *args, **kwargs): - self.hidden = kwargs.pop('hidden', False) - self.notifications_optional = kwargs.pop('notifications_optional', False) - - self.group = group - formset_class = sessiondetailsformset_factory(max_num=3 if group.features.acts_like_wg else 50) - self.session_forms = formset_class(group=self.group, meeting=meeting, data=data) - super(SessionForm, self).__init__(data=data, *args, **kwargs) - if not self.notifications_optional: - self.fields['send_notifications'].widget = forms.HiddenInput() - - # Allow additional sessions for non-wg-like groups - if not self.group.features.acts_like_wg: - self.fields['num_session'].choices = ((n, str(n)) for n in range(1, 51)) - - self.fields['comments'].widget = forms.Textarea(attrs={'rows':'3','cols':'65'}) - - other_groups = list(allowed_conflicting_groups().exclude(pk=group.pk).values_list('acronym', 'acronym').order_by('acronym')) - self.fields['adjacent_with_wg'].choices = [('', '--No preference')] + other_groups - group_acronym_choices = [('','--Select WG(s)')] + other_groups - self.fields['joint_with_groups_selector'].choices = group_acronym_choices - - # Set up constraints for the meeting - self._wg_field_data = [] - for constraintname in meeting.group_conflict_types.all(): - # two fields for each constraint: a CharField for the group list and a selector to add entries - constraint_field = forms.CharField(max_length=255, required=False) - constraint_field.widget.attrs['data-slug'] = constraintname.slug - constraint_field.widget.attrs['data-constraint-name'] = str(constraintname).title() - self._add_widget_class(constraint_field.widget, 'wg_constraint') - - selector_field = forms.ChoiceField(choices=group_acronym_choices, required=False) - selector_field.widget.attrs['data-slug'] = constraintname.slug # used by onchange handler - self._add_widget_class(selector_field.widget, 'wg_constraint_selector') - - cfield_id = 'constraint_{}'.format(constraintname.slug) - cselector_id = 'wg_selector_{}'.format(constraintname.slug) - # keep an eye out for field name conflicts - log.assertion('cfield_id not in self.fields') - log.assertion('cselector_id not in self.fields') - self.fields[cfield_id] = constraint_field - self.fields[cselector_id] = selector_field - self._wg_field_data.append((constraintname, cfield_id, cselector_id)) - - # Show constraints that are not actually used by the meeting so these don't get lost - self._inactive_wg_field_data = [] - inactive_cnames = ConstraintName.objects.filter( - is_group_conflict=True # Only collect group conflicts... - ).exclude( - meeting=meeting # ...that are not enabled for this meeting... - ).filter( - constraint__source=group, # ...but exist for this group... - constraint__meeting=meeting, # ... at this meeting. - ).distinct() - - for inactive_constraint_name in inactive_cnames: - field_id = 'delete_{}'.format(inactive_constraint_name.slug) - self.fields[field_id] = forms.BooleanField(required=False, label='Delete this conflict', help_text='Delete this inactive conflict?') - constraints = group.constraint_source_set.filter(meeting=meeting, name=inactive_constraint_name) - self._inactive_wg_field_data.append( - (inactive_constraint_name, - ' '.join([c.target.acronym for c in constraints]), - field_id) - ) - - self.fields['joint_with_groups_selector'].widget.attrs['onchange'] = "document.form_post.joint_with_groups.value=document.form_post.joint_with_groups.value + ' ' + this.options[this.selectedIndex].value; return 1;" - self.fields["resources"].choices = [(x.pk,x.desc) for x in ResourceAssociation.objects.filter(name__used=True).order_by('name__order') ] - - if self.hidden: - # replace all the widgets to start... - for key in list(self.fields.keys()): - self.fields[key].widget = forms.HiddenInput() - # re-replace a couple special cases - self.fields['resources'].widget = forms.MultipleHiddenInput() - self.fields['timeranges'].widget = forms.MultipleHiddenInput() - # and entirely replace bethere - no need to support searching if input is hidden - self.fields['bethere'] = forms.ModelMultipleChoiceField( - widget=forms.MultipleHiddenInput, required=False, - queryset=Person.objects.all(), - ) - - def wg_constraint_fields(self): - """Iterates over wg constraint fields - - Intended for use in the template. - """ - for cname, cfield_id, cselector_id in self._wg_field_data: - yield cname, self[cfield_id], self[cselector_id] - - def wg_constraint_count(self): - """How many wg constraints are there?""" - return len(self._wg_field_data) - - def wg_constraint_field_ids(self): - """Iterates over wg constraint field IDs""" - for cname, cfield_id, _ in self._wg_field_data: - yield cname, cfield_id - - def inactive_wg_constraints(self): - for cname, value, field_id in self._inactive_wg_field_data: - yield cname, value, self[field_id] - - def inactive_wg_constraint_count(self): - return len(self._inactive_wg_field_data) - - def inactive_wg_constraint_field_ids(self): - """Iterates over wg constraint field IDs""" - for cname, _, field_id in self._inactive_wg_field_data: - yield cname, field_id - - @staticmethod - def _add_widget_class(widget, new_class): - """Add a new class, taking care in case some already exist""" - existing_classes = widget.attrs.get('class', '').split() - widget.attrs['class'] = ' '.join(existing_classes + [new_class]) - - def _join_conflicts(self, cleaned_data, slugs): - """Concatenate constraint fields from cleaned data into a single list""" - conflicts = [] - for cname, cfield_id, _ in self._wg_field_data: - if cname.slug in slugs and cfield_id in cleaned_data: - groups = cleaned_data[cfield_id] - # convert to python list (allow space or comma separated lists) - items = groups.replace(',',' ').split() - conflicts.extend(items) - return conflicts - - def _validate_duplicate_conflicts(self, cleaned_data): - """Validate that no WGs appear in more than one constraint that does not allow duplicates - - Raises ValidationError - """ - # Only the older constraints (conflict, conflic2, conflic3) need to be mutually exclusive. - all_conflicts = self._join_conflicts(cleaned_data, ['conflict', 'conflic2', 'conflic3']) - seen = [] - duplicated = [] - errors = [] - for c in all_conflicts: - if c not in seen: - seen.append(c) - elif c not in duplicated: # only report once - duplicated.append(c) - errors.append(forms.ValidationError('%s appears in conflicts more than once' % c)) - return errors - - def clean_joint_with_groups(self): - groups = self.cleaned_data['joint_with_groups'] - check_conflict(groups, self.group) - return groups - - def clean_comments(self): - return clean_text_field(self.cleaned_data['comments']) - - def clean_bethere(self): - bethere = self.cleaned_data["bethere"] - if bethere: - extra = set( - Person.objects.filter( - role__group=self.group, role__name__in=["chair", "ad"] - ) - & bethere - ) - if extra: - extras = ", ".join(e.name for e in extra) - raise forms.ValidationError( - ( - f"Please remove the following person{pluralize(len(extra))}, the system " - f"tracks their availability due to their role{pluralize(len(extra))}: {extras}." - ) - ) - return bethere - - def clean_send_notifications(self): - return True if not self.notifications_optional else self.cleaned_data['send_notifications'] - - def is_valid(self): - return super().is_valid() and self.session_forms.is_valid() - - def clean(self): - super(SessionForm, self).clean() - self.session_forms.clean() - - data = self.cleaned_data - - # Validate the individual conflict fields - for _, cfield_id, _ in self._wg_field_data: - try: - check_conflict(data[cfield_id], self.group) - except forms.ValidationError as e: - self.add_error(cfield_id, e) - - # Skip remaining tests if individual field tests had errors, - if self.errors: - return data - - # error if conflicts contain disallowed dupes - for error in self._validate_duplicate_conflicts(data): - self.add_error(None, error) - - # Verify expected number of session entries are present - num_sessions_with_data = len(self.session_forms.forms_to_keep) - num_sessions_expected = -1 - try: - num_sessions_expected = int(data.get('num_session', '')) - except ValueError: - self.add_error('num_session', 'Invalid value for number of sessions') - if num_sessions_with_data < num_sessions_expected: - self.add_error('num_session', 'Must provide data for all sessions') - - # if default (empty) option is selected, cleaned_data won't include num_session key - if num_sessions_expected != 2 and num_sessions_expected is not None: - if data.get('session_time_relation'): - self.add_error( - 'session_time_relation', - forms.ValidationError('Time between sessions can only be used when two sessions are requested.') - ) - - joint_session = data.get('joint_for_session', '') - if joint_session != '': - joint_session = int(joint_session) - if joint_session > num_sessions_with_data: - self.add_error( - 'joint_for_session', - forms.ValidationError( - f'Session {joint_session} can not be the joint session, the session has not been requested.' - ) - ) - - return data - - @property - def media(self): - # get media for our formset - return super().media + self.session_forms.media + forms.Media(js=('secr/js/session_form.js',)) - - -# Used for totally virtual meetings during COVID-19 to omit the expected -# number of attendees since there were no room size limitations -# -# class VirtualSessionForm(SessionForm): -# '''A SessionForm customized for special virtual meeting requirements''' -# attendees = forms.IntegerField(required=False) - - -class ToolStatusForm(forms.Form): - message = forms.CharField(widget=forms.Textarea(attrs={'rows':'3','cols':'80'}), strip=False) - diff --git a/ietf/secr/sreq/urls.py b/ietf/secr/sreq/urls.py deleted file mode 100644 index 7e0db8117a..0000000000 --- a/ietf/secr/sreq/urls.py +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright The IETF Trust 2007-2019, All Rights Reserved - -from django.conf import settings - -from ietf.secr.sreq import views -from ietf.utils.urls import url - -urlpatterns = [ - url(r'^$', views.main), - url(r'^status/$', views.tool_status), - url(r'^%(acronym)s/$' % settings.URL_REGEXPS, views.view), - url(r'^(?P[A-Za-z0-9_\-\+]+)/%(acronym)s/view/$' % settings.URL_REGEXPS, views.view), - url(r'^%(acronym)s/approve/$' % settings.URL_REGEXPS, views.approve), - url(r'^%(acronym)s/cancel/$' % settings.URL_REGEXPS, views.cancel), - url(r'^%(acronym)s/confirm/$' % settings.URL_REGEXPS, views.confirm), - url(r'^%(acronym)s/edit/$' % settings.URL_REGEXPS, views.edit), - url(r'^%(acronym)s/new/$' % settings.URL_REGEXPS, views.new), - url(r'^%(acronym)s/no_session/$' % settings.URL_REGEXPS, views.no_session), - url(r'^(?P[A-Za-z0-9_\-\+]+)/%(acronym)s/edit/$' % settings.URL_REGEXPS, views.edit), -] diff --git a/ietf/secr/static/css/custom.css b/ietf/secr/static/css/custom.css index 8816b3f13d..8a622cba5d 100644 --- a/ietf/secr/static/css/custom.css +++ b/ietf/secr/static/css/custom.css @@ -319,11 +319,6 @@ input.draft-file-input { width: 4em; } -.draft-container #id_internal_comments { - height: 4em; - width: 40em; -} - .draft-container #id_abstract { height: 15em; width: 40em; @@ -842,4 +837,4 @@ td, th, li, h2 { thead th { font-size: 12px; -} \ No newline at end of file +} diff --git a/ietf/secr/static/js/dynamic_inlines.js b/ietf/secr/static/js/dynamic_inlines.js index a1abc2d0e5..d0753a3a63 100644 --- a/ietf/secr/static/js/dynamic_inlines.js +++ b/ietf/secr/static/js/dynamic_inlines.js @@ -1,71 +1,71 @@ -/* Following functions based off code written by Arne Brodowski -http://www.arnebrodowski.de/blog/507-Add-and-remove-Django-Admin-Inlines-with-JavaScript.html - -2012-02-01 customized for new Rolodex. Email formset doesn't have an id field, rather a "address" -field as primary key. Also for some reason the "active" boolean field doesn't get saved properly -if the checkbox input has an empty "value" argument. -*/ -import $ from 'jquery'; - -function increment_form_ids(el, to, name) { - var from = to-1 - $(':input', $(el)).each(function(i,e){ - var old_name = $(e).attr('name') - var old_id = $(e).attr('id') - $(e).attr('name', old_name.replace(from, to)) - $(e).attr('id', old_id.replace(from, to)) - if ($(e).attr('type') != 'checkbox') { - $(e).val('') - } - }) -} - -function add_inline_form(name) { - if (name=="email") { - var first = $('#id_'+name+'-0-address').parents('.inline-related') - } - else { - var first = $('#id_'+name+'-0-id').parents('.inline-related') - } - // check to see if this is a stacked or tabular inline - if (first.hasClass("tabular")) { - var field_table = first.parent().find('table > tbody') - const children = field_table.children('tr.dynamic-inline') - var count = children.length - const last = $(children[count-1]) - var copy = last.clone(true) - copy.removeClass("row1 row2") - copy.find("input[name$='address']").attr("readonly", false) - copy.addClass("row"+((count % 2) ? 2 : 1)) - copy.insertAfter(last) - increment_form_ids($(copy), count, name) - } - else { - var last = $(first).parent().children('.last-related') - var copy = $(last).clone(true) - var count = $(first).parent().children('.inline-related').length - $(last).removeClass('last-related') - var header = $('h3', copy) - header.html(header.html().replace("#"+count, "#"+(count+1))) - $(last).after(copy) - increment_form_ids($(first).parents('.inline-group').children('.last-related'), count, name) - } - $('input#id_'+name+'-TOTAL_FORMS').val(count+1) - return false; -} - -// Add all the "Add Another" links to the bottom of each inline group -$(function() { - var html_template = '' - $('.inline-group').each(function(i) { - //prefix is in the name of the input fields before the "-" - var prefix = $("input[type='hidden'][name!='csrfmiddlewaretoken']", this).attr("name").split("-")[0]; - $(this).append(html_template.replace("{{prefix}}", prefix)); - $('#addlink-' + prefix).on('click', () => add_inline_form(prefix)); - }) -}) +/* Following functions based off code written by Arne Brodowski +http://www.arnebrodowski.de/blog/507-Add-and-remove-Django-Admin-Inlines-with-JavaScript.html + +2012-02-01 customized for new Rolodex. Email formset doesn't have an id field, rather a "address" +field as primary key. Also for some reason the "active" boolean field doesn't get saved properly +if the checkbox input has an empty "value" argument. +*/ +import $ from 'jquery'; + +function increment_form_ids(el, to, name) { + var from = to-1 + $(':input', $(el)).each(function(i,e){ + var old_name = $(e).attr('name') + var old_id = $(e).attr('id') + $(e).attr('name', old_name.replace(from, to)) + $(e).attr('id', old_id.replace(from, to)) + if ($(e).attr('type') != 'checkbox') { + $(e).val('') + } + }) +} + +function add_inline_form(name) { + if (name=="email") { + var first = $('#id_'+name+'-0-address').parents('.inline-related') + } + else { + var first = $('#id_'+name+'-0-id').parents('.inline-related') + } + // check to see if this is a stacked or tabular inline + if (first.hasClass("tabular")) { + var field_table = first.parent().find('table > tbody') + const children = field_table.children('tr.dynamic-inline') + var count = children.length + const last = $(children[count-1]) + var copy = last.clone(true) + copy.removeClass("row1 row2") + copy.find("input[name$='address']").attr("readonly", false) + copy.addClass("row"+((count % 2) ? 2 : 1)) + copy.insertAfter(last) + increment_form_ids($(copy), count, name) + } + else { + var last = $(first).parent().children('.last-related') + var copy = $(last).clone(true) + var count = $(first).parent().children('.inline-related').length + $(last).removeClass('last-related') + var header = $('h3', copy) + header.html(header.html().replace("#"+count, "#"+(count+1))) + $(last).after(copy) + increment_form_ids($(first).parents('.inline-group').children('.last-related'), count, name) + } + $('input#id_'+name+'-TOTAL_FORMS').val(count+1) + return false; +} + +// Add all the "Add Another" links to the bottom of each inline group +$(function() { + var html_template = '' + $('.inline-group').each(function(i) { + //prefix is in the name of the input fields before the "-" + var prefix = $("input[type='hidden'][name!='csrfmiddlewaretoken']", this).attr("name").split("-")[0]; + $(this).append(html_template.replace("{{prefix}}", prefix)); + $('#addlink-' + prefix).on('click', () => add_inline_form(prefix)); + }) +}) diff --git a/ietf/secr/telechat/tests.py b/ietf/secr/telechat/tests.py index e4661b767d..91ccde2187 100644 --- a/ietf/secr/telechat/tests.py +++ b/ietf/secr/telechat/tests.py @@ -13,6 +13,7 @@ IndividualDraftFactory, ConflictReviewFactory) from ietf.doc.models import BallotDocEvent, BallotType, BallotPositionDocEvent, State, Document from ietf.doc.utils import update_telechat, create_ballot_if_not_open +from ietf.meeting.factories import MeetingFactory from ietf.utils.test_utils import TestCase from ietf.utils.timezone import date_today, datetime_today from ietf.iesg.models import TelechatDate @@ -25,6 +26,26 @@ def augment_data(): TelechatDate.objects.create(date=date_today()) +class SecrUrlTests(TestCase): + def test_urls(self): + MeetingFactory(type_id='ietf', date=date_today()) + + # check public options + response = self.client.get("/secr/") + self.assertEqual(response.status_code, 200) + q = PyQuery(response.content) + links = q('div.secr-menu a') + self.assertEqual(len(links), 1) + self.assertEqual(PyQuery(links[0]).text(), 'Announcements') + + # check secretariat only options + self.client.login(username="secretary", password="secretary+password") + response = self.client.get("/secr/") + self.assertEqual(response.status_code, 200) + q = PyQuery(response.content) + links = q('div.secr-menu a') + self.assertEqual(len(links), 4) + class SecrTelechatTestCase(TestCase): def test_main(self): "Main Test" @@ -67,10 +88,8 @@ def test_doc_detail_draft(self): def test_doc_detail_draft_with_downref(self): ad = Person.objects.get(user__username="ad") draft = WgDraftFactory(ad=ad, intended_std_level_id='ps', states=[('draft-iesg','pub-req'),]) - rfc = IndividualRfcFactory.create(stream_id='irtf', other_aliases=['rfc6666',], - states=[('draft','rfc'),('draft-iesg','pub')], std_level_id='inf', ) - draft.relateddocument_set.create(target=rfc.docalias.get(name='rfc6666'), - relationship_id='refnorm') + rfc = IndividualRfcFactory.create(stream_id='irtf', rfc_number=6666, std_level_id='inf') + draft.relateddocument_set.create(target=rfc, relationship_id='refnorm') create_ballot_if_not_open(None, draft, ad, 'approve') d = get_next_telechat_date() date = d.strftime('%Y-%m-%d') @@ -237,7 +256,7 @@ def test_doc_detail_post_update_state_action_holder_automation(self): self.assertEqual(response.status_code,302) draft = Document.objects.get(name=draft.name) self.assertEqual(draft.get_state('draft-iesg').slug,'defer') - self.assertCountEqual(draft.action_holders.all(), [draft.ad] + draft.authors()) + self.assertCountEqual(draft.action_holders.all(), [draft.ad] + draft.author_persons()) self.assertEqual(draft.docevent_set.filter(type='changed_action_holders').count(), 1) # Removing need-rev should remove authors @@ -254,7 +273,7 @@ def test_doc_detail_post_update_state_action_holder_automation(self): # Setting to approved should remove all action holders # noinspection DjangoOrm - draft.action_holders.add(*(draft.authors())) # add() with through model ok in Django 2.2+ + draft.action_holders.add(*(draft.author_persons())) # add() with through model ok in Django 2.2+ response = self.client.post(url,{ 'submit': 'update_state', 'state': State.objects.get(type_id='draft-iesg', slug='approved').pk, diff --git a/ietf/secr/telechat/urls.py b/ietf/secr/telechat/urls.py index 0f2ff4aace..08c51eab5f 100644 --- a/ietf/secr/telechat/urls.py +++ b/ietf/secr/telechat/urls.py @@ -11,5 +11,4 @@ url(r'^(?P[0-9\-]+)/management/$', views.management), url(r'^(?P[0-9\-]+)/minutes/$', views.minutes), url(r'^(?P[0-9\-]+)/roll-call/$', views.roll_call), - url(r'^new/$', views.new), ] diff --git a/ietf/secr/telechat/views.py b/ietf/secr/telechat/views.py index f13a082f29..c39aecf748 100644 --- a/ietf/secr/telechat/views.py +++ b/ietf/secr/telechat/views.py @@ -17,7 +17,7 @@ from ietf.person.models import Person from ietf.doc.lastcall import request_last_call from ietf.doc.mails import email_state_changed -from ietf.iesg.models import TelechatDate, TelechatAgendaItem, Telechat +from ietf.iesg.models import TelechatDate, TelechatAgendaItem from ietf.iesg.agenda import agenda_data, get_doc_section from ietf.ietfauth.utils import role_required from ietf.secr.telechat.forms import BallotForm, ChangeStateForm, DateSelectForm, TELECHAT_TAGS @@ -175,7 +175,7 @@ def doc_detail(request, date, name): This view displays the ballot information for the document, and lets the user make changes to ballot positions and document state. ''' - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) if not is_doc_on_telechat(doc, date): messages.warning(request, 'Dcoument: {name} is not on the Telechat agenda for {date}'.format( name=doc.name, @@ -313,7 +313,7 @@ def doc_detail(request, date, name): # if this is a conflict review document add referenced document if doc.type_id == 'conflrev': - conflictdoc = doc.relateddocument_set.get(relationship__slug='conflrev').target.document + conflictdoc = doc.relateddocument_set.get(relationship__slug='conflrev').target else: conflictdoc = None @@ -342,7 +342,7 @@ def doc_navigate(request, date, name, nav): nav - [next|previous] which direction the user wants to navigate in the list of docs The view retrieves the appropriate document and redirects to the doc view. ''' - doc = get_object_or_404(Document, docalias__name=name) + doc = get_object_or_404(Document, name=name) agenda = agenda_data(date=date) target = name @@ -419,18 +419,6 @@ def minutes(request, date): 'da_docs': da_docs}, ) -@role_required('Secretariat') -def new(request): - ''' - This view creates a new telechat agenda and redirects to the default view - ''' - if request.method == 'POST': - date = request.POST['date'] - # create legacy telechat record - Telechat.objects.create(telechat_date=date) - - messages.success(request,'New Telechat Agenda created') - return redirect('ietf.secr.telechat.views.doc', date=date) @role_required('Secretariat') def roll_call(request, date): diff --git a/ietf/secr/templates/announcement/confirm.html b/ietf/secr/templates/announcement/confirm.html index ddf2a6de6e..0e1f72c54b 100644 --- a/ietf/secr/templates/announcement/confirm.html +++ b/ietf/secr/templates/announcement/confirm.html @@ -1,22 +1,17 @@ -{% extends "base_site.html" %} +{# Copyright The IETF Trust 2024, All Rights Reserved #} +{% extends "base.html" %} {% load static %} - +{% load ietf_filters %} +{% load django_bootstrap5 %} {% block title %}Announcement{% endblock %} - -{% block extrahead %}{{ block.super }} - -{% endblock %} - -{% block breadcrumbs %}{{ block.super }} - » Announcement -{% endblock %} - {% block content %} +

    Announcement

    +

    Confirm Announcement

    -
    {% csrf_token %} + {% csrf_token %}
     To: {{ to }}
    @@ -29,15 +24,13 @@ 

    Confirm Announcement

    {{ message.body }}
    - {{ form }} -
    -
      -
    • -
    • -
    • -
    -
    + {% bootstrap_form form %} +
    + + + +
    diff --git a/ietf/secr/templates/announcement/index.html b/ietf/secr/templates/announcement/index.html new file mode 100644 index 0000000000..ad7226e3bc --- /dev/null +++ b/ietf/secr/templates/announcement/index.html @@ -0,0 +1,31 @@ +{# Copyright The IETF Trust 2024, All Rights Reserved #} +{% extends "base.html" %} +{% load static %} +{% load ietf_filters %} +{% load django_bootstrap5 %} +{% block title %}Announcement{% endblock %} +{% block content %} +

    Announcement

    + {% if form.non_field_errors %}
    {{ form.non_field_errors }}
    {% endif %} + +
    + {% csrf_token %} + {% bootstrap_field form.nomcom layout='horizontal' %} + {% bootstrap_field form.to layout='horizontal' %} + {% bootstrap_field form.to_custom layout='horizontal' %} + {% bootstrap_field form.frm layout='horizontal' %} + {% bootstrap_field form.cc layout='horizontal' %} + {% bootstrap_field form.bcc layout='horizontal' %} + {% bootstrap_field form.reply_to layout='horizontal' %} + {% bootstrap_field form.subject layout='horizontal' %} + {% bootstrap_field form.body layout='horizontal' %} + + + Cancel +
    + +{% endblock %} +{% block js %} + +{% endblock %} \ No newline at end of file diff --git a/ietf/secr/templates/announcement/main.html b/ietf/secr/templates/announcement/main.html deleted file mode 100644 index c88b4a2406..0000000000 --- a/ietf/secr/templates/announcement/main.html +++ /dev/null @@ -1,36 +0,0 @@ -{% extends "base_site.html" %} - -{% block title %}Announcement{% endblock %} - -{% block breadcrumbs %}{{ block.super }} - » Announcement -{% endblock %} - -{% block content %} - -
    -

    Announcement

    - -
    {% csrf_token %} - - - {% if form.non_field_errors %}{{ form.non_field_errors }}{% endif %} - {% for field in form.visible_fields %} - - - - - {% endfor %} - -
    {{ field.label_tag }}{% if field.field.required %} *{% endif %}{{ field.errors }}{{ field }}{% if field.help_text %}
    {{ field.help_text }}{% endif %}
    -
    -
      -
    • -
    • -
    -
    - -
    -
    - -{% endblock %} diff --git a/ietf/secr/templates/includes/activities.html b/ietf/secr/templates/includes/activities.html deleted file mode 100644 index 3e79c9aed4..0000000000 --- a/ietf/secr/templates/includes/activities.html +++ /dev/null @@ -1,23 +0,0 @@ -

    Activies Log

    - diff --git a/ietf/secr/templates/includes/buttons_next_cancel.html b/ietf/secr/templates/includes/buttons_next_cancel.html deleted file mode 100644 index 95d25f55bc..0000000000 --- a/ietf/secr/templates/includes/buttons_next_cancel.html +++ /dev/null @@ -1,6 +0,0 @@ -
    -
      -
    • -
    • -
    -
    diff --git a/ietf/secr/templates/includes/buttons_submit_cancel.html b/ietf/secr/templates/includes/buttons_submit_cancel.html deleted file mode 100644 index df40c98255..0000000000 --- a/ietf/secr/templates/includes/buttons_submit_cancel.html +++ /dev/null @@ -1,6 +0,0 @@ -
    -
      -
    • -
    • -
    -
    diff --git a/ietf/secr/templates/includes/sessions_footer.html b/ietf/secr/templates/includes/sessions_footer.html deleted file mode 100755 index 2a26440047..0000000000 --- a/ietf/secr/templates/includes/sessions_footer.html +++ /dev/null @@ -1,5 +0,0 @@ - \ No newline at end of file diff --git a/ietf/secr/templates/includes/sessions_request_form.html b/ietf/secr/templates/includes/sessions_request_form.html deleted file mode 100755 index cd36fe45f1..0000000000 --- a/ietf/secr/templates/includes/sessions_request_form.html +++ /dev/null @@ -1,139 +0,0 @@ -* Required Field -
    {% csrf_token %} - {{ form.session_forms.management_form }} - {% if form.non_field_errors %} - {{ form.non_field_errors }} - {% endif %} - - - - - - {% if group.features.acts_like_wg %} - - {% if not is_virtual %} - - {% endif %} - - {% else %}{# else not group.features.acts_like_wg #} - {% for session_form in form.session_forms %} - - {% endfor %} - {% endif %} - - - - - - - - - - {% if not is_virtual %} - - - - - - - - - - - - - - - - - - - - - - - {% endif %} - - - - - - {% if form.notifications_optional %} - - - - - {% endif %} - -
    Working Group Name:{{ group.name }} ({{ group.acronym }})
    Area Name:{% if group.parent %}{{ group.parent.name }} ({{ group.parent.acronym }}){% endif %}
    Number of Sessions:*{{ form.num_session.errors }}{{ form.num_session }}
    Session 1:*{% include 'meeting/session_details_form.html' with form=form.session_forms.0 hide_onsite_tool_prompt=True only %}
    Session 2:*{% include 'meeting/session_details_form.html' with form=form.session_forms.1 hide_onsite_tool_prompt=True only %}
    Time between two sessions:{{ form.session_time_relation.errors }}{{ form.session_time_relation }}
    Additional Session Request:{{ form.third_session }} Check this box to request an additional session.
    - Additional slot may be available after agenda scheduling has closed and with the approval of an Area Director.
    -
    - Third Session: - {% include 'meeting/session_details_form.html' with form=form.session_forms.2 hide_onsite_tool_prompt=True only %} -
    -
    Session {{ forloop.counter }}:*{% include 'meeting/session_details_form.html' with form=session_form only %}
    Number of Attendees:{% if not is_virtual %}*{% endif %}{{ form.attendees.errors }}{{ form.attendees }}
    Participants who must be present: - {{ form.bethere.errors }} - {{ form.bethere }} -

    - Do not include Area Directors and WG Chairs; the system already tracks their availability. -

    -
    Conflicts to Avoid: - - - - - - - {% for cname, cfield, cselector in form.wg_constraint_fields %} - - {% if forloop.first %}{% endif %} - - - - {% empty %}{# shown if there are no constraint fields #} - - {% endfor %} - {% if form.inactive_wg_constraints %} - {% for cname, value, field in form.inactive_wg_constraints %} - - {% if forloop.first %} - - {% endif %} - - - - {% endfor %} - {% endif %} - - - - - -
    Other WGs that included {{ group.name }} in their conflict lists:{{ session_conflicts.inbound|default:"None" }}
    WG Sessions:
    You may select multiple WGs within each category
    {{ cname|title }}{{ cselector }} -
    - {{ cfield.errors }}{{ cfield }} -
    No constraints are enabled for this meeting.
    - Disabled for this meeting - {{ cname|title }}
    {{ field }} {{ field.label }}
    BOF Sessions:If the sessions can not be found in the fields above, please enter free form requests in the Special Requests field below.
    -
    Resources requested: - {{ form.resources.errors }} {{ form.resources }} -
    Times during which this WG can not meet:
    Please explain any selections in Special Requests below.
    {{ form.timeranges.errors }}{{ form.timeranges }}
    - Plan session adjacent with another WG:
    - (Immediately before or after another WG, no break in between, in the same room.) -
    {{ form.adjacent_with_wg.errors }}{{ form.adjacent_with_wg }}
    - Joint session with:
    - (To request one session for multiple WGs together.) -
    {{ form.joint_with_groups_selector }} -
    - {{ form.joint_with_groups.errors }}{{ form.joint_with_groups }} -
    - Of the sessions requested by this WG, the joint session, if applicable, is: - {{ form.joint_for_session.errors }}{{ form.joint_for_session }}
    Special Requests:
     
    i.e. restrictions on meeting times / days, etc.
    (limit 200 characters)
    {{ form.comments.errors }}{{ form.comments }}
    {{ form.send_notifications.label }}{{ form.send_notifications.errors }}{{ form.send_notifications }}
    - -
    -
      -
    • -
    • -
    -
    -
    \ No newline at end of file diff --git a/ietf/secr/templates/includes/sessions_request_view.html b/ietf/secr/templates/includes/sessions_request_view.html deleted file mode 100644 index bc6aef0611..0000000000 --- a/ietf/secr/templates/includes/sessions_request_view.html +++ /dev/null @@ -1,73 +0,0 @@ -{% load ams_filters %} - - - - - - {% if form %} - {% include 'includes/sessions_request_view_formset.html' with formset=form.session_forms group=group session=session only %} - {% else %} - {% include 'includes/sessions_request_view_session_set.html' with session_set=sessions group=group session=session only %} - {% endif %} - - - - - - - - - - {% if not is_virtual %} - - - - - {% endif %} - - - - - - - - - {% if not is_virtual %} - - - - - - - - - {% endif %} - - {% if form and form.notifications_optional %} - - - - - {% endif %} - -
    Working Group Name:{{ group.name }} ({{ group.acronym }})
    Area Name:{{ group.parent }}
    Number of Sessions Requested:{% if session.third_session %}3{% else %}{{ session.num_session }}{% endif %}
    Number of Attendees:{{ session.attendees }}
    Conflicts to Avoid: - {% if session_conflicts.outbound %} - - - {% for conflict in session_conflicts.outbound %} - - {% endfor %} - -
    {{ conflict.name|title }}: {{ conflict.groups }}
    - {% else %}None{% endif %} -
    Other WGs that included {{ group }} in their conflict list:{% if session_conflicts.inbound %}{{ session_conflicts.inbound }}{% else %}None so far{% endif %}
    Resources requested:{% if session.resources %}
      {% for resource in session.resources %}
    • {{ resource.desc }}
    • {% endfor %}
    {% else %}None so far{% endif %}
    Participants who must be present:{% if session.bethere %}
      {% for person in session.bethere %}
    • {{ person }}
    • {% endfor %}
    {% else %}None{% endif %}
    Can not meet on:{% if session.timeranges_display %}{{ session.timeranges_display|join:', ' }}{% else %}No constraints{% endif %}
    Adjacent with WG:{{ session.adjacent_with_wg|default:'No preference' }}
    Joint session: - {% if session.joint_with_groups %} - {{ session.joint_for_session_display }} with: {{ session.joint_with_groups }} - {% else %} - Not a joint session - {% endif %} -
    Special Requests:{{ session.comments }}
    - {{ form.send_notifications.label}} - - {% if form.cleaned_data.send_notifications %}Yes{% else %}No{% endif %} -
    \ No newline at end of file diff --git a/ietf/secr/templates/includes/sessions_request_view_formset.html b/ietf/secr/templates/includes/sessions_request_view_formset.html deleted file mode 100644 index 80cad8d829..0000000000 --- a/ietf/secr/templates/includes/sessions_request_view_formset.html +++ /dev/null @@ -1,32 +0,0 @@ -{% load ams_filters %}{# keep this in sync with sessions_request_view_session_set.html #} -{% for sess_form in formset %}{% if sess_form.cleaned_data and not sess_form.cleaned_data.DELETE %} - - Session {{ forloop.counter }}: - -
    -
    Length
    -
    {{ sess_form.cleaned_data.requested_duration.total_seconds|display_duration }}
    - {% if sess_form.cleaned_data.name %} -
    Name
    -
    {{ sess_form.cleaned_data.name }}
    {% endif %} - {% if sess_form.cleaned_data.purpose.slug != 'regular' %} -
    Purpose
    -
    - {{ sess_form.cleaned_data.purpose }} - {% if sess_form.cleaned_data.purpose.timeslot_types|length > 1 %}({{ sess_form.cleaned_data.type }} - ){% endif %} -
    -
    Onsite tool?
    -
    {{ sess_form.cleaned_data.has_onsite_tool|yesno }}
    - {% endif %} -
    - - - {% if group.features.acts_like_wg and forloop.counter == 2 and not is_virtual %} - - Time between sessions: - {% if session.session_time_relation_display %}{{ session.session_time_relation_display }}{% else %}No - preference{% endif %} - - {% endif %} -{% endif %}{% endfor %} \ No newline at end of file diff --git a/ietf/secr/templates/includes/sessions_request_view_session_set.html b/ietf/secr/templates/includes/sessions_request_view_session_set.html deleted file mode 100644 index a434b9d22b..0000000000 --- a/ietf/secr/templates/includes/sessions_request_view_session_set.html +++ /dev/null @@ -1,32 +0,0 @@ -{% load ams_filters %}{# keep this in sync with sessions_request_view_formset.html #} -{% for sess in session_set %} - - Session {{ forloop.counter }}: - -
    -
    Length
    -
    {{ sess.requested_duration.total_seconds|display_duration }}
    - {% if sess.name %} -
    Name
    -
    {{ sess.name }}
    {% endif %} - {% if sess.purpose.slug != 'regular' %} -
    Purpose
    -
    - {{ sess.purpose }} - {% if sess.purpose.timeslot_types|length > 1 %}({{ sess.type }} - ){% endif %} -
    -
    Onsite tool?
    -
    {{ sess.has_onsite_tool|yesno }}
    - {% endif %} -
    - - - {% if group.features.acts_like_wg and forloop.counter == 2 and not is_virtual %} - - Time between sessions: - {% if session.session_time_relation_display %}{{ session.session_time_relation_display }}{% else %}No - preference{% endif %} - - {% endif %} -{% endfor %} \ No newline at end of file diff --git a/ietf/secr/templates/index.html b/ietf/secr/templates/index.html new file mode 100644 index 0000000000..9ea7021279 --- /dev/null +++ b/ietf/secr/templates/index.html @@ -0,0 +1,31 @@ +{# Copyright The IETF Trust 2007-2025, All Rights Reserved #} +{% extends "base.html" %} +{% load static %} +{% load ietf_filters %} +{% block title %}Secretariat Dashboard{% endblock %} +{% block content %} +

    Secretariat Dashboard

    +
    + {% if user|has_role:"Secretariat" %} +

    IESG

    + + +

    IDs and WGs Process

    + + +

    Meetings and Proceedings

    + + {% else %} + + {% endif %} +
    +{% endblock %} \ No newline at end of file diff --git a/ietf/secr/templates/main.html b/ietf/secr/templates/main.html deleted file mode 100644 index 42d6e8f6a1..0000000000 --- a/ietf/secr/templates/main.html +++ /dev/null @@ -1,69 +0,0 @@ -{% extends "base_site.html" %} -{% load ietf_filters %} - -{% block content %} -
    - - {% if user|has_role:"Secretariat" %} - - - - - - - - - - - - - - - {% else %} - - - - - - - - - - - - - - - {% endif %} - -
    -{% endblock %} \ No newline at end of file diff --git a/ietf/secr/templates/sreq/confirm.html b/ietf/secr/templates/sreq/confirm.html deleted file mode 100755 index 025375af32..0000000000 --- a/ietf/secr/templates/sreq/confirm.html +++ /dev/null @@ -1,57 +0,0 @@ -{% extends "base_site.html" %} -{% load static %} - -{% block title %}Sessions - Confirm{% endblock %} - -{% block extrastyle %} - -{% endblock %} - -{% block extrahead %}{{ block.super }} - - {{ form.media }} -{% endblock %} - -{% block breadcrumbs %}{{ block.super }} - » Sessions - » New - » Session Request Confirmation -{% endblock %} - -{% block content %} - -
    -

    Sessions - Confirm

    - - {% include "includes/sessions_request_view.html" %} - - {% if group.features.acts_like_wg and form.session_forms.forms_to_keep|length > 2 %} -
    -

    - - Note: Your request for a third session must be approved by an area director before - being submitted to agenda@ietf.org. Click "Submit" below to email an approval - request to the area directors. - -

    -
    - {% endif %} - -
    - {% csrf_token %} - {{ form }} - {{ form.session_forms.management_form }} - {% for sf in form.session_forms %} - {% include 'meeting/session_details_form.html' with form=sf hidden=True only %} - {% endfor %} - {% include "includes/buttons_submit_cancel.html" %} -
    - -
    - -{% endblock %} \ No newline at end of file diff --git a/ietf/secr/templates/sreq/edit.html b/ietf/secr/templates/sreq/edit.html deleted file mode 100755 index f6e62104b0..0000000000 --- a/ietf/secr/templates/sreq/edit.html +++ /dev/null @@ -1,39 +0,0 @@ -{% extends "base_site.html" %} -{% load static %} -{% block title %}Sessions - Edit{% endblock %} - -{% block extrahead %}{{ block.super }} - - - {{ form.media }} - -{% endblock %} - -{% block breadcrumbs %}{{ block.super }} - » Sessions - » {{ group.acronym }} - » Edit -{% endblock %} - -{% block instructions %} - Instructions -{% endblock %} - -{% block content %} -
    -

    IETF {{ meeting.number }}: Edit Session Request

    - -
    -{% endblock %} - -{% block footer-extras %} - {% include "includes/sessions_footer.html" %} -{% endblock %} \ No newline at end of file diff --git a/ietf/secr/templates/sreq/locked.html b/ietf/secr/templates/sreq/locked.html deleted file mode 100755 index c27cf578ed..0000000000 --- a/ietf/secr/templates/sreq/locked.html +++ /dev/null @@ -1,30 +0,0 @@ -{% extends "base_site.html" %} -{% load static %} - -{% block title %}Sessions{% endblock %} - -{% block extrahead %}{{ block.super }} - -{% endblock %} - -{% block breadcrumbs %}{{ block.super }} - » Sessions (Locked) -{% endblock %} - -{% block content %} -

    » View list of timeslot requests

    -
    -

    Sessions - Status

    - -

    {{ message }}

    - -
    -
      -
    • -
    -
    - - -
    - -{% endblock %} \ No newline at end of file diff --git a/ietf/secr/templates/sreq/main.html b/ietf/secr/templates/sreq/main.html deleted file mode 100755 index a6695cd4f3..0000000000 --- a/ietf/secr/templates/sreq/main.html +++ /dev/null @@ -1,65 +0,0 @@ -{% extends "base_site.html" %} -{% load ietf_filters %} -{% load static %} - -{% block title %}Sessions{% endblock %} - -{% block extrahead %}{{ block.super }} - -{% endblock %} - -{% block breadcrumbs %}{{ block.super }} - » Sessions -{% endblock %} -{% block instructions %} - Instructions -{% endblock %} - -{% block content %} -

    » View list of timeslot requests

    -
    -

    - Sessions Request Tool: IETF {{ meeting.number }} - {% if user|has_role:"Secretariat" %} - {% if is_locked %} - Tool Status: Locked - {% else %} - Tool Status: Unlocked - {% endif %} - {% endif %} -

    - -
    - -
    - -{% endblock %} - -{% block footer-extras %} - {% include "includes/sessions_footer.html" %} -{% endblock %} \ No newline at end of file diff --git a/ietf/secr/templates/sreq/new.html b/ietf/secr/templates/sreq/new.html deleted file mode 100755 index 3f46e6f897..0000000000 --- a/ietf/secr/templates/sreq/new.html +++ /dev/null @@ -1,43 +0,0 @@ -{% extends "base_site.html" %} -{% load static %} - -{% block title %}Sessions- New{% endblock %} - -{% block extrahead %}{{ block.super }} - - - {{ form.media }} - -{% endblock %} - -{% block breadcrumbs %}{{ block.super }} - » Sessions - » New Session Request -{% endblock %} - -{% block instructions %} - Instructions -{% endblock %} - -{% block content %} -
    -

    IETF {{ meeting.number }}: New Session Request

    - - {% include "includes/sessions_request_form.html" %} - -
    - -{% endblock %} - -{% block footer-extras %} - {% include "includes/sessions_footer.html" %} -{% endblock %} \ No newline at end of file diff --git a/ietf/secr/templates/sreq/session_cancel_notification.txt b/ietf/secr/templates/sreq/session_cancel_notification.txt deleted file mode 100644 index 3e6dd43f69..0000000000 --- a/ietf/secr/templates/sreq/session_cancel_notification.txt +++ /dev/null @@ -1,4 +0,0 @@ -{% load ams_filters %} - -A request to cancel a meeting session has just been submitted by {{ requester }}. - diff --git a/ietf/secr/templates/sreq/session_request_notification.txt b/ietf/secr/templates/sreq/session_request_notification.txt deleted file mode 100644 index a41f202447..0000000000 --- a/ietf/secr/templates/sreq/session_request_notification.txt +++ /dev/null @@ -1,5 +0,0 @@ -{% load ams_filters %} - -{% filter wordwrap:78 %}{{ header }} meeting session request has just been submitted by {{ requester }}.{% endfilter %} - -{% include "includes/session_info.txt" %} diff --git a/ietf/secr/templates/sreq/tool_status.html b/ietf/secr/templates/sreq/tool_status.html deleted file mode 100755 index b91e73a129..0000000000 --- a/ietf/secr/templates/sreq/tool_status.html +++ /dev/null @@ -1,42 +0,0 @@ -{% extends "base_site.html" %} -{% load static %} - -{% block title %}Sessions{% endblock %} - -{% block extrahead %}{{ block.super }} - -{% endblock %} - -{% block breadcrumbs %}{{ block.super }} - » Sessions - » Session Status -{% endblock %} - -{% block content %} - -
    -

    Sessions - Status

    -

    Enter the message that you would like displayed to the WG Chair when this tool is locked.

    -
    {% csrf_token %} - - - - {{ form.as_table }} - -
    -
    -
      - {% if is_locked %} -
    • - {% else %} -
    • - {% endif %} -
    • -
    -
    - -
    - -
    - -{% endblock %} diff --git a/ietf/secr/templates/sreq/view.html b/ietf/secr/templates/sreq/view.html deleted file mode 100644 index 9a0a3b01c1..0000000000 --- a/ietf/secr/templates/sreq/view.html +++ /dev/null @@ -1,55 +0,0 @@ -{% extends "base_site.html" %} -{% load static %} - -{% block title %}Sessions - View{% endblock %} - -{% block extrahead %}{{ block.super }} - -{% endblock %} - -{% block extrastyle %} - -{% endblock %} - -{% block breadcrumbs %}{{ block.super }} - » Sessions - » {{ group.acronym }} -{% endblock %} - -{% block instructions %} - Instructions -{% endblock %} - -{% block content %} - -
    -

    Sessions - View (meeting: {{ meeting.number }})

    - - {% include "includes/sessions_request_view.html" %} - -
    - - {% include "includes/activities.html" %} - -
    -
      -
    • - {% if show_approve_button %} -
    • - {% endif %} -
    • -
    • -
    -
    -
    - -{% endblock %} - -{% block footer-extras %} - {% include "includes/sessions_footer.html" %} -{% endblock %} diff --git a/ietf/secr/templates/telechat/doc.html b/ietf/secr/templates/telechat/doc.html index 9d37db4cb0..6727e157f5 100644 --- a/ietf/secr/templates/telechat/doc.html +++ b/ietf/secr/templates/telechat/doc.html @@ -85,13 +85,13 @@

    Ballot Writeup

    {% if downrefs %}

    Downward References

    {% for ref in downrefs %} -

    Add {{ref.target.document.canonical_name}} - ({{ref.target.document.std_level}} - {{ref.target.document.stream.desc}}) +

    Add {{ref.target.name}} + ({{ref.target.std_level}} - {{ref.target.stream.desc}} stream) to downref registry.
    - {% if not ref.target.document.std_level %} + {% if not ref.target.std_level %} +++ Warning: The standards level has not been set yet!!!
    {% endif %} - {% if not ref.target.document.stream %} + {% if not ref.target.stream %} +++ Warning: document stream has not been set yet!!!
    {% endif %} {% endfor %}

    diff --git a/ietf/secr/templates/telechat/group.html b/ietf/secr/templates/telechat/group.html index 890c451e83..4e04f0e16e 100644 --- a/ietf/secr/templates/telechat/group.html +++ b/ietf/secr/templates/telechat/group.html @@ -3,7 +3,7 @@ Does anyone have an objection to the creation of this working group being sent for EXTERNAL REVIEW?

    External Review APPROVED; "The Secretariat will send a Working Group Review announcement with a copy to new-work and place it back on the agenda for the next telechat."

    External Review NOT APPROVED; -
    +
    The Secretariat will wait for instructions from
    The IESG decides the document needs more time in INTERNAL REVIEW. The Secretariat will put it back on the agenda for the next teleconference in the same category.
    The IESG has made changes since the charter was seen in INTERNAL REVIEW, and decides to send it back to INTERNAL REVIEW the charter again. diff --git a/ietf/secr/urls.py b/ietf/secr/urls.py index 0ce14a449a..ab21046654 100644 --- a/ietf/secr/urls.py +++ b/ietf/secr/urls.py @@ -1,11 +1,22 @@ +# Copyright The IETF Trust 2025, All Rights Reserved + +from django.conf import settings from django.urls import re_path, include from django.views.generic import TemplateView +from django.views.generic.base import RedirectView urlpatterns = [ - re_path(r'^$', TemplateView.as_view(template_name='main.html')), + re_path(r'^$', TemplateView.as_view(template_name='index.html'), name='ietf.secr'), re_path(r'^announcement/', include('ietf.secr.announcement.urls')), re_path(r'^meetings/', include('ietf.secr.meetings.urls')), re_path(r'^rolodex/', include('ietf.secr.rolodex.urls')), - re_path(r'^sreq/', include('ietf.secr.sreq.urls')), + # remove these redirects after 125 + re_path(r'^sreq/$', RedirectView.as_view(url='/meeting/session/request/', permanent=True)), + re_path(r'^sreq/%(acronym)s/$' % settings.URL_REGEXPS, RedirectView.as_view(url='/meeting/session/request/%(acronym)s/view/', permanent=True)), + re_path(r'^sreq/%(acronym)s/edit/$' % settings.URL_REGEXPS, RedirectView.as_view(url='/meeting/session/request/%(acronym)s/edit/', permanent=True)), + re_path(r'^sreq/%(acronym)s/new/$' % settings.URL_REGEXPS, RedirectView.as_view(url='/meeting/session/request/%(acronym)s/new/', permanent=True)), + re_path(r'^sreq/(?P[A-Za-z0-9_\-\+]+)/%(acronym)s/view/$' % settings.URL_REGEXPS, RedirectView.as_view(url='/meeting/%(num)s/session/request/%(acronym)s/view/', permanent=True)), + re_path(r'^sreq/(?P[A-Za-z0-9_\-\+]+)/%(acronym)s/edit/$' % settings.URL_REGEXPS, RedirectView.as_view(url='/meeting/%(num)s/session/request/%(acronym)s/edit/', permanent=True)), + # --------------------------------- re_path(r'^telechat/', include('ietf.secr.telechat.urls')), ] diff --git a/ietf/secr/utils/document.py b/ietf/secr/utils/document.py index 0a34512a17..361bf836df 100644 --- a/ietf/secr/utils/document.py +++ b/ietf/secr/utils/document.py @@ -13,15 +13,6 @@ def get_full_path(doc): return None return os.path.join(doc.get_file_path(), doc.uploaded_filename) -def get_rfc_num(doc): - qs = doc.docalias.filter(name__startswith='rfc') - return qs[0].name[3:] if qs else None - -def is_draft(doc): - if doc.docalias.filter(name__startswith='rfc'): - return False - else: - return True def get_start_date(doc): ''' diff --git a/ietf/secr/utils/group.py b/ietf/secr/utils/group.py deleted file mode 100644 index a4c1c0f98a..0000000000 --- a/ietf/secr/utils/group.py +++ /dev/null @@ -1,74 +0,0 @@ -# Copyright The IETF Trust 2013-2020, All Rights Reserved -# -*- coding: utf-8 -*- - - -# Python imports -import io -import os - -# Django imports -from django.conf import settings -from django.core.exceptions import ObjectDoesNotExist - -# Datatracker imports -from ietf.group.models import Group -from ietf.ietfauth.utils import has_role - - - - -def current_nomcom(): - qs = Group.objects.filter(acronym__startswith='nomcom',state__slug="active").order_by('-time') - if qs.count(): - return qs[0] - else: - return None - -def get_charter_text(group): - ''' - Takes a group object and returns the text or the group's charter as a string - ''' - charter = group.charter - path = os.path.join(settings.CHARTER_PATH, '%s-%s.txt' % (charter.canonical_name(), charter.rev)) - f = io.open(path,'r') - text = f.read() - f.close() - - return text - -def get_my_groups(user,conclude=False): - ''' - Takes a Django user object (from request) - Returns a list of groups the user has access to. Rules are as follows - secretariat - has access to all groups - area director - has access to all groups in their area - wg chair or secretary - has access to their own group - chair of irtf has access to all irtf groups - - If user=None than all groups are returned. - concluded=True means include concluded groups. Need this to upload materials for groups - after they've been concluded. it happens. - ''' - my_groups = set() - states = ['bof','proposed','active'] - if conclude: - states.extend(['conclude','bof-conc']) - - all_groups = Group.objects.filter(type__features__has_meetings=True, state__in=states).order_by('acronym') - if user == None or has_role(user,'Secretariat'): - return all_groups - - try: - person = user.person - except ObjectDoesNotExist: - return list() - - for group in all_groups: - if group.role_set.filter(person=person,name__in=('chair','secr','ad')): - my_groups.add(group) - continue - if group.parent and group.parent.role_set.filter(person=person,name__in=('ad','chair')): - my_groups.add(group) - continue - - return list(my_groups) diff --git a/ietf/settings.py b/ietf/settings.py index 34076f3299..50e069ff1a 100644 --- a/ietf/settings.py +++ b/ietf/settings.py @@ -1,4 +1,4 @@ -# Copyright The IETF Trust 2007-2023, All Rights Reserved +# Copyright The IETF Trust 2007-2026, All Rights Reserved # -*- coding: utf-8 -*- @@ -9,31 +9,42 @@ import os import sys import datetime +import pathlib import warnings from hashlib import sha384 from typing import Any, Dict, List, Tuple # pyflakes:ignore +from django.http import UnreadablePostError +# DeprecationWarnings are suppressed by default, enable them warnings.simplefilter("always", DeprecationWarning) -warnings.filterwarnings("ignore", message="pkg_resources is deprecated as an API") -warnings.filterwarnings("ignore", "Log out via GET requests is deprecated") # happens in oidc_provider -warnings.filterwarnings("ignore", module="tastypie", message="The django.utils.datetime_safe module is deprecated.") -warnings.filterwarnings("ignore", module="oidc_provider", message="The django.utils.timezone.utc alias is deprecated.") + +# Warnings that must be resolved for Django 5.x +warnings.filterwarnings("ignore", "Log out via GET requests is deprecated") # caused by oidc_provider +warnings.filterwarnings("ignore", message="The django.utils.timezone.utc alias is deprecated.", module="oidc_provider") +warnings.filterwarnings("ignore", message="The django.utils.datetime_safe module is deprecated.", module="tastypie") warnings.filterwarnings("ignore", message="The USE_DEPRECATED_PYTZ setting,") # https://github.com/ietf-tools/datatracker/issues/5635 +warnings.filterwarnings("ignore", message="The is_dst argument to make_aware\\(\\)") # caused by django-filters when USE_DEPRECATED_PYTZ is true warnings.filterwarnings("ignore", message="The USE_L10N setting is deprecated.") # https://github.com/ietf-tools/datatracker/issues/5648 warnings.filterwarnings("ignore", message="django.contrib.auth.hashers.CryptPasswordHasher is deprecated.") # https://github.com/ietf-tools/datatracker/issues/5663 -warnings.filterwarnings("ignore", message="'urllib3\\[secure\\]' extra is deprecated") -warnings.filterwarnings("ignore", message="The logout\\(\\) view is superseded by") + +# Other DeprecationWarnings +warnings.filterwarnings("ignore", message="pkg_resources is deprecated as an API", module="pyang.plugin") warnings.filterwarnings("ignore", message="Report.file_reporters will no longer be available in Coverage.py 4.2", module="coverage.report") -warnings.filterwarnings("ignore", message="Using or importing the ABCs from 'collections' instead of from 'collections.abc' is deprecated", module="bleach") -warnings.filterwarnings("ignore", message="HTTPResponse.getheader\\(\\) is deprecated", module='selenium.webdriver') -try: - import syslog - syslog.openlog(str("datatracker"), syslog.LOG_PID, syslog.LOG_USER) -except ImportError: - pass +warnings.filterwarnings("ignore", message="currentThread\\(\\) is deprecated", module="coverage.pytracer") +warnings.filterwarnings("ignore", message="co_lnotab is deprecated", module="coverage.parser") +warnings.filterwarnings("ignore", message="datetime.datetime.utcnow\\(\\) is deprecated", module="botocore.auth") +warnings.filterwarnings("ignore", message="datetime.datetime.utcnow\\(\\) is deprecated", module="oic.utils.time_util") +warnings.filterwarnings("ignore", message="datetime.datetime.utcfromtimestamp\\(\\) is deprecated", module="oic.utils.time_util") +warnings.filterwarnings("ignore", message="datetime.datetime.utcfromtimestamp\\(\\) is deprecated", module="pytz.tzinfo") +warnings.filterwarnings("ignore", message="'instantiateVariableFont' is deprecated", module="weasyprint") + -BASE_DIR = os.path.dirname(os.path.abspath(__file__)) -sys.path.append(os.path.abspath(BASE_DIR + "/..")) +base_path = pathlib.Path(__file__).resolve().parent +BASE_DIR = str(base_path) + +project_path = base_path.parent +PROJECT_DIR = str(project_path) +sys.path.append(PROJECT_DIR) from ietf import __version__ import debug @@ -66,6 +77,26 @@ 'django.contrib.auth.hashers.CryptPasswordHasher', ] + +PASSWORD_POLICY_MIN_LENGTH = 12 +PASSWORD_POLICY_ENFORCE_AT_LOGIN = False # should turn this on for prod + +AUTH_PASSWORD_VALIDATORS = [ + { + "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator", + "OPTIONS": { + "min_length": PASSWORD_POLICY_MIN_LENGTH, + } + }, + { + "NAME": "ietf.ietfauth.password_validation.StrongPasswordValidator", + }, +] +# In dev environments, settings_local overrides the password validators. Save +# a handle to the original value so settings_test can restore it so tests match +# production. +ORIG_AUTH_PASSWORD_VALIDATORS = AUTH_PASSWORD_VALIDATORS + ALLOWED_HOSTS = [".ietf.org", ".ietf.org.", "209.208.19.216", "4.31.198.44", "127.0.0.1", "localhost", ] # Server name of the tools server @@ -125,6 +156,10 @@ # In the future (relative to 4.2), the default will become 'django.db.models.BigAutoField.' DEFAULT_AUTO_FIELD = 'django.db.models.AutoField' +# OIDC configuration +_SITE_URL = os.environ.get("OIDC_SITE_URL", None) +if _SITE_URL is not None: + SITE_URL = _SITE_URL if SERVER_MODE == 'production': MEDIA_ROOT = '/a/www/www6s/lib/dt/media/' @@ -169,7 +204,7 @@ STATIC_URL = "/static/" STATIC_ROOT = os.path.abspath(BASE_DIR + "/../static/") else: - STATIC_URL = "https://www.ietf.org/lib/dt/%s/"%__version__ + STATIC_URL = "https://static.ietf.org/dt/%s/"%__version__ STATIC_ROOT = "/a/www/www6s/lib/dt/%s/"%__version__ # List of finder classes that know how to find static files in @@ -184,162 +219,133 @@ # Server-side static.ietf.org URL (used in pdfized) STATIC_IETF_ORG_INTERNAL = STATIC_IETF_ORG +ENABLE_BLOBSTORAGE = True + +# "standard" retry mode is used, which does exponential backoff with a base factor of 2 +# and a cap of 20. +BLOBSTORAGE_MAX_ATTEMPTS = 5 # boto3 default is 3 (for "standard" retry mode) +BLOBSTORAGE_CONNECT_TIMEOUT = 10 # seconds; boto3 default is 60 +BLOBSTORAGE_READ_TIMEOUT = 10 # seconds; boto3 default is 60 + +# Caching for agenda data in seconds +AGENDA_CACHE_TIMEOUT_DEFAULT = 8 * 24 * 60 * 60 # 8 days +AGENDA_CACHE_TIMEOUT_CURRENT_MEETING = 6 * 60 # 6 minutes + + WSGI_APPLICATION = "ietf.wsgi.application" AUTHENTICATION_BACKENDS = ( 'ietf.ietfauth.backends.CaseInsensitiveModelBackend', ) -FILE_UPLOAD_PERMISSIONS = 0o644 +FILE_UPLOAD_PERMISSIONS = 0o644 -# ------------------------------------------------------------------------ -# Django/Python Logging Framework Modifications +FIRST_V3_RFC = 8650 -# Filter out "Invalid HTTP_HOST" emails -# Based on http://www.tiwoc.de/blog/2013/03/django-prevent-email-notification-on-suspiciousoperation/ -from django.core.exceptions import SuspiciousOperation -def skip_suspicious_operations(record): - if record.exc_info: - exc_value = record.exc_info[1] - if isinstance(exc_value, SuspiciousOperation): - return False - return True -# Filter out UreadablePostError: -from django.http import UnreadablePostError +# +# Logging config +# + +# Callback to filter out UnreadablePostError: def skip_unreadable_post(record): if record.exc_info: - exc_type, exc_value = record.exc_info[:2] # pylint: disable=unused-variable + exc_type, exc_value = record.exc_info[:2] # pylint: disable=unused-variable if isinstance(exc_value, UnreadablePostError): return False return True -# Copied from DEFAULT_LOGGING as of Django 1.10.5 on 22 Feb 2017, and modified -# to incorporate html logging, invalid http_host filtering, and more. -# Changes from the default has comments. - -# The Python logging flow is as follows: -# (see https://docs.python.org/2.7/howto/logging.html#logging-flow) -# -# Init: get a Logger: logger = logging.getLogger(name) -# -# Logging call, e.g. logger.error(level, msg, *args, exc_info=(...), extra={...}) -# --> Logger (discard if level too low for this logger) -# (create log record from level, msg, args, exc_info, extra) -# --> Filters (discard if any filter attach to logger rejects record) -# --> Handlers (discard if level too low for handler) -# --> Filters (discard if any filter attached to handler rejects record) -# --> Formatter (format log record and emit) -# - LOGGING = { - 'version': 1, - 'disable_existing_loggers': False, - # - 'loggers': { - 'django': { - 'handlers': ['debug_console', 'mail_admins'], - 'level': 'INFO', + "version": 1, + "disable_existing_loggers": False, + "loggers": { + "celery": { + "handlers": ["console"], + "level": "INFO", }, - 'django.request': { - 'handlers': ['debug_console'], - 'level': 'ERROR', + "datatracker": { + "handlers": ["console"], + "level": "INFO", }, - 'django.server': { - 'handlers': ['django.server'], - 'level': 'INFO', + "django": { + "handlers": ["console", "mail_admins"], + "level": "INFO", }, - 'django.security': { - 'handlers': ['debug_console', ], - 'level': 'INFO', + "django.request": {"level": "ERROR"}, # only log 5xx, ignore 4xx + "django.security": { + # SuspiciousOperation errors - log to console only + "handlers": ["console"], + "propagate": False, # no further handling please }, - 'oidc_provider': { - 'handlers': ['debug_console', ], - 'level': 'DEBUG', - }, - }, - # - # No logger filters - # - 'handlers': { - 'console': { - 'level': 'DEBUG', - 'class': 'logging.StreamHandler', - 'formatter': 'plain', + "django.server": { + # Only used by Django's runserver development server + "handlers": ["django.server"], + "level": "INFO", }, - 'syslog': { - 'level': 'DEBUG', - 'class': 'logging.handlers.SysLogHandler', - 'facility': 'user', - 'formatter': 'plain', - 'address': '/dev/log', + "oidc_provider": { + "handlers": ["console"], + "level": "DEBUG", }, - 'debug_console': { - # Active only when DEBUG=True - 'level': 'DEBUG', - 'filters': ['require_debug_true'], - 'class': 'logging.StreamHandler', - 'formatter': 'plain', + }, + "handlers": { + "console": { + "level": "DEBUG", + "class": "logging.StreamHandler", + "formatter": "plain", + }, + "debug_console": { + "level": "DEBUG", + "filters": ["require_debug_true"], + "class": "logging.StreamHandler", + "formatter": "plain", }, - 'django.server': { - 'level': 'INFO', - 'class': 'logging.StreamHandler', - 'formatter': 'django.server', + "django.server": { + "level": "INFO", + "class": "logging.StreamHandler", + "formatter": "django.server", }, - 'mail_admins': { - 'level': 'ERROR', - 'filters': [ - 'require_debug_false', - 'skip_suspicious_operations', # custom - 'skip_unreadable_posts', # custom + "mail_admins": { + "level": "ERROR", + "filters": [ + "require_debug_false", + "skip_unreadable_posts", ], - 'class': 'django.utils.log.AdminEmailHandler', - 'include_html': True, # non-default - } + "class": "django.utils.log.AdminEmailHandler", + "include_html": True, + }, }, - # # All these are used by handlers - 'filters': { - 'require_debug_false': { - '()': 'django.utils.log.RequireDebugFalse', - }, - 'require_debug_true': { - '()': 'django.utils.log.RequireDebugTrue', + "filters": { + "require_debug_false": { + "()": "django.utils.log.RequireDebugFalse", }, - # custom filter, function defined above: - 'skip_suspicious_operations': { - '()': 'django.utils.log.CallbackFilter', - 'callback': skip_suspicious_operations, + "require_debug_true": { + "()": "django.utils.log.RequireDebugTrue", }, # custom filter, function defined above: - 'skip_unreadable_posts': { - '()': 'django.utils.log.CallbackFilter', - 'callback': skip_unreadable_post, + "skip_unreadable_posts": { + "()": "django.utils.log.CallbackFilter", + "callback": skip_unreadable_post, }, }, - # And finally the formatters - 'formatters': { - 'django.server': { - '()': 'django.utils.log.ServerFormatter', - 'format': '[%(server_time)s] %(message)s', + "formatters": { + "django.server": { + "()": "django.utils.log.ServerFormatter", + "format": "[%(server_time)s] %(message)s", }, - 'plain': { - 'style': '{', - 'format': '{levelname}: {name}:{lineno}: {message}', + "plain": { + "style": "{", + "format": "{levelname}: {name}:{lineno}: {message}", + }, + "json": { + "class": "ietf.utils.jsonlogger.DatatrackerJsonFormatter", + "style": "{", + "format": ( + "{asctime}{levelname}{message}{name}{pathname}{lineno}{funcName}" + "{process}{status_code}" + ), }, }, } -# This should be overridden by settings_local for any logger where debug (or -# other) custom log settings are wanted. Use "ietf/manage.py showloggers -l" -# to show registered loggers. The content here should match the levels above -# and is shown as an example: -UTILS_LOGGER_LEVELS: Dict[str, str] = { -# 'django': 'INFO', -# 'django.server': 'INFO', -} - -# End logging -# ------------------------------------------------------------------------ - X_FRAME_OPTIONS = 'SAMEORIGIN' CSRF_TRUSTED_ORIGINS = [ @@ -373,6 +379,7 @@ def skip_unreadable_post(record): ], 'OPTIONS': { 'context_processors': [ + 'ietf.context_processors.traceparent_id', 'django.contrib.auth.context_processors.auth', 'django.template.context_processors.debug', # makes 'sql_queries' available in templates 'django.template.context_processors.i18n', @@ -405,42 +412,45 @@ def skip_unreadable_post(record): MIDDLEWARE = [ - 'django.middleware.csrf.CsrfViewMiddleware', - 'corsheaders.middleware.CorsMiddleware', # see docs on CORS_REPLACE_HTTPS_REFERER before using it - 'django.middleware.common.CommonMiddleware', - 'django.contrib.sessions.middleware.SessionMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', - 'django.middleware.http.ConditionalGetMiddleware', - 'simple_history.middleware.HistoryRequestMiddleware', + "ietf.middleware.add_otel_traceparent_header", + "django.middleware.csrf.CsrfViewMiddleware", + "corsheaders.middleware.CorsMiddleware", # see docs on CORS_REPLACE_HTTPS_REFERER before using it + "django.middleware.common.CommonMiddleware", + "django.contrib.sessions.middleware.SessionMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + "ietf.middleware.is_authenticated_header_middleware", + "django.middleware.http.ConditionalGetMiddleware", + "simple_history.middleware.HistoryRequestMiddleware", # comment in this to get logging of SQL insert and update statements: - #'ietf.middleware.sql_log_middleware', - 'ietf.middleware.SMTPExceptionMiddleware', - 'ietf.middleware.Utf8ExceptionMiddleware', - 'ietf.middleware.redirect_trailing_period_middleware', - 'django_referrer_policy.middleware.ReferrerPolicyMiddleware', - 'django.middleware.clickjacking.XFrameOptionsMiddleware', - 'django.middleware.security.SecurityMiddleware', - # 'csp.middleware.CSPMiddleware', - 'ietf.middleware.unicode_nfkc_normalization_middleware', + #"ietf.middleware.sql_log_middleware", + "ietf.middleware.SMTPExceptionMiddleware", + "ietf.middleware.Utf8ExceptionMiddleware", + "ietf.middleware.redirect_trailing_period_middleware", + "django.middleware.clickjacking.XFrameOptionsMiddleware", + "django.middleware.security.SecurityMiddleware", + "ietf.middleware.unicode_nfkc_normalization_middleware", ] ROOT_URLCONF = 'ietf.urls' -DJANGO_VITE_ASSETS_PATH = os.path.join(BASE_DIR, 'static/dist-neue') +# Configure django_vite +DJANGO_VITE: dict = {"default": {}} if DEBUG: - DJANGO_VITE_MANIFEST_PATH = os.path.join(BASE_DIR, 'static/dist-neue/manifest.json') + DJANGO_VITE["default"]["manifest_path"] = os.path.join( + BASE_DIR, 'static/dist-neue/manifest.json' + ) # Additional locations of static files (in addition to each app's static/ dir) STATICFILES_DIRS = ( - DJANGO_VITE_ASSETS_PATH, + os.path.join(BASE_DIR, "static/dist-neue"), # for django_vite os.path.join(BASE_DIR, 'static/dist'), os.path.join(BASE_DIR, 'secr/static/dist'), ) INSTALLED_APPS = [ # Django apps - 'django.contrib.admin', + 'ietf.admin', # replaces django.contrib.admin 'django.contrib.admindocs', 'django.contrib.auth', 'django.contrib.contenttypes', @@ -455,14 +465,21 @@ def skip_unreadable_post(record): 'django_vite', 'django_bootstrap5', 'django_celery_beat', + 'django_celery_results', 'corsheaders', 'django_markup', + 'django_filters', 'oidc_provider', + 'drf_spectacular', + 'drf_standardized_errors', + 'rest_framework', + 'rangefilter', 'simple_history', 'tastypie', 'widget_tweaks', # IETF apps 'ietf.api', + 'ietf.blobdb', 'ietf.community', 'ietf.dbtemplate', 'ietf.doc', @@ -483,6 +500,7 @@ def skip_unreadable_post(record): 'ietf.release', 'ietf.review', 'ietf.stats', + 'ietf.status', 'ietf.submit', 'ietf.sync', 'ietf.utils', @@ -490,7 +508,6 @@ def skip_unreadable_post(record): 'ietf.secr.announcement', 'ietf.secr.meetings', 'ietf.secr.rolodex', - 'ietf.secr.sreq', 'ietf.secr.telechat', ] @@ -530,8 +547,6 @@ def skip_unreadable_post(record): CORS_ALLOW_METHODS = ( 'GET', 'OPTIONS', ) CORS_URLS_REGEX = r'^(/api/.*|.*\.json|.*/json/?)$' -# Setting for django_referrer_policy.middleware.ReferrerPolicyMiddleware -REFERRER_POLICY = 'strict-origin-when-cross-origin' # django.middleware.security.SecurityMiddleware SECURE_BROWSER_XSS_FILTER = True @@ -544,6 +559,7 @@ def skip_unreadable_post(record): #SECURE_SSL_REDIRECT = True # Relax the COOP policy to allow Meetecho authentication pop-up SECURE_CROSS_ORIGIN_OPENER_POLICY = "unsafe-none" +SECURE_REFERRER_POLICY = "strict-origin-when-cross-origin" # Override this in your settings_local with the IP addresses relevant for you: INTERNAL_IPS = ( @@ -552,17 +568,83 @@ def skip_unreadable_post(record): '::1', ) +# django-rest-framework configuration +REST_FRAMEWORK = { + "DEFAULT_AUTHENTICATION_CLASSES": [ + "ietf.api.authentication.ApiKeyAuthentication", + "rest_framework.authentication.SessionAuthentication", + ], + "DEFAULT_PERMISSION_CLASSES": [ + "ietf.api.permissions.HasApiKey", + ], + "DEFAULT_RENDERER_CLASSES": [ + "rest_framework.renderers.JSONRenderer", + ], + "DEFAULT_PARSER_CLASSES": [ + "rest_framework.parsers.JSONParser", + ], + "DEFAULT_SCHEMA_CLASS": "drf_standardized_errors.openapi.AutoSchema", + "EXCEPTION_HANDLER": "drf_standardized_errors.handler.exception_handler", +} + +# DRF OpenApi schema settings +SPECTACULAR_SETTINGS = { + "TITLE": "Datatracker API", + "DESCRIPTION": "Datatracker API", + "VERSION": "1.0.0", + "SCHEMA_PATH_PREFIX": "/api/", + "COMPONENT_SPLIT_REQUEST": True, + "COMPONENT_NO_READ_ONLY_REQUIRED": True, + "SERVERS": [ + {"url": "http://localhost:8000", "description": "local dev server"}, + {"url": "https://datatracker.ietf.org", "description": "production server"}, + ], + # The following settings are needed for drf-standardized-errors + "ENUM_NAME_OVERRIDES": { + "ValidationErrorEnum": "drf_standardized_errors.openapi_serializers.ValidationErrorEnum.choices", + "ClientErrorEnum": "drf_standardized_errors.openapi_serializers.ClientErrorEnum.choices", + "ServerErrorEnum": "drf_standardized_errors.openapi_serializers.ServerErrorEnum.choices", + "ErrorCode401Enum": "drf_standardized_errors.openapi_serializers.ErrorCode401Enum.choices", + "ErrorCode403Enum": "drf_standardized_errors.openapi_serializers.ErrorCode403Enum.choices", + "ErrorCode404Enum": "drf_standardized_errors.openapi_serializers.ErrorCode404Enum.choices", + "ErrorCode405Enum": "drf_standardized_errors.openapi_serializers.ErrorCode405Enum.choices", + "ErrorCode406Enum": "drf_standardized_errors.openapi_serializers.ErrorCode406Enum.choices", + "ErrorCode415Enum": "drf_standardized_errors.openapi_serializers.ErrorCode415Enum.choices", + "ErrorCode429Enum": "drf_standardized_errors.openapi_serializers.ErrorCode429Enum.choices", + "ErrorCode500Enum": "drf_standardized_errors.openapi_serializers.ErrorCode500Enum.choices", + }, + "POSTPROCESSING_HOOKS": ["drf_standardized_errors.openapi_hooks.postprocess_schema_enums"], +} + +# DRF Standardized Errors settings +DRF_STANDARDIZED_ERRORS = { + # enable the standardized errors when DEBUG=True for unhandled exceptions. + # By default, this is set to False so you're able to view the traceback in + # the terminal and get more information about the exception. + "ENABLE_IN_DEBUG_FOR_UNHANDLED_EXCEPTIONS": False, + # ONLY the responses that correspond to these status codes will appear + # in the API schema. + "ALLOWED_ERROR_STATUS_CODES": [ + "400", + # "401", + # "403", + "404", + # "405", + # "406", + # "415", + # "429", + # "500", + ], + +} + # no slash at end IDTRACKER_BASE_URL = "https://datatracker.ietf.org" RFCDIFF_BASE_URL = "https://author-tools.ietf.org/iddiff" IDNITS_BASE_URL = "https://author-tools.ietf.org/api/idnits" +IDNITS3_BASE_URL = "https://author-tools.ietf.org/idnits3/results" IDNITS_SERVICE_URL = "https://author-tools.ietf.org/idnits" -# Content security policy configuration (django-csp) -# (In current production, the Content-Security-Policy header is completely set by nginx configuration, but -# we try to keep this in sync to avoid confusion) -CSP_DEFAULT_SRC = ("'self'", "'unsafe-inline'", f"data: {IDTRACKER_BASE_URL} http://ietf.org/ https://www.ietf.org/ https://analytics.ietf.org/ https://static.ietf.org") - # The name of the method to use to invoke the test suite TEST_RUNNER = 'ietf.utils.test_runner.IetfTestRunner' @@ -572,8 +654,6 @@ def skip_unreadable_post(record): TEST_DIFF_FAILURE_DIR = "/tmp/test/failure/" -TEST_GHOSTDRIVER_LOG_PATH = "ghostdriver.log" - # These are regexes TEST_URL_COVERAGE_EXCLUDE = [ r"^\^admin/", @@ -600,9 +680,10 @@ def skip_unreadable_post(record): "ietf/utils/test_runner.py", "ietf/name/generate_fixtures.py", "ietf/review/import_from_review_tool.py", - "ietf/stats/backfill_data.py", "ietf/utils/patch.py", "ietf/utils/test_data.py", + "ietf/utils/jstest.py", + "ietf/utils/coverage.py", ] # These are code line regex patterns @@ -616,12 +697,15 @@ def skip_unreadable_post(record): ] # These are filename globs. They are used by test_parse_templates() and -# get_template_paths() +# get_template_paths(). Globs are applied via pathlib.Path().match, using +# the path to the template from the project root. TEST_TEMPLATE_IGNORE = [ - ".*", # dot-files - "*~", # tilde temp-files - "#*", # files beginning with a hashmark - "500.html" # isn't loaded by regular loader, but checked by test_500_page() + ".*", # dot-files + "*~", # tilde temp-files + "#*", # files beginning with a hashmark + "500.html", # isn't loaded by regular loader, but checked by test_500_page() + "ietf/templates/admin/meeting/RegistrationTicket/change_list.html", + "ietf/templates/admin/meeting/Registration/change_list.html", ] TEST_COVERAGE_MAIN_FILE = os.path.join(BASE_DIR, "../release-coverage.json") @@ -629,8 +713,8 @@ def skip_unreadable_post(record): TEST_CODE_COVERAGE_CHECKER = None if SERVER_MODE != 'production': - import coverage - TEST_CODE_COVERAGE_CHECKER = coverage.Coverage(source=[ BASE_DIR ], cover_pylib=False, omit=TEST_CODE_COVERAGE_EXCLUDE_FILES) + from ietf.utils.coverage import CoverageManager + TEST_CODE_COVERAGE_CHECKER = CoverageManager() TEST_CODE_COVERAGE_REPORT_PATH = "coverage/" TEST_CODE_COVERAGE_REPORT_URL = os.path.join(STATIC_URL, TEST_CODE_COVERAGE_REPORT_PATH, "index.html") @@ -666,6 +750,63 @@ def skip_unreadable_post(record): "schedule_name": r"(?P[A-Za-z0-9-:_]+)", } +STORAGES: dict[str, Any] = { + "default": {"BACKEND": "django.core.files.storage.FileSystemStorage"}, + "staticfiles": {"BACKEND": "django.contrib.staticfiles.storage.StaticFilesStorage"}, +} + +# Storages for artifacts stored as blobs +ARTIFACT_STORAGE_NAMES: list[str] = [ + "active-draft", + "agenda", + "bibxml-ids", + "bluesheets", + "bofreq", + "charter", + "chatlog", + "conflrev", + "draft", + "floorplan", + "indexes", + "liai-att", + "meetinghostlogo", + "minutes", + "narrativeminutes", + "photo", + "polls", + "procmaterials", + "review", + "rfc", + "slides", + "staging", + "statchg", + "statement", +] +for storagename in ARTIFACT_STORAGE_NAMES: + STORAGES[storagename] = { + "BACKEND": "ietf.doc.storage.StoredObjectBlobdbStorage", + "OPTIONS": {"bucket_name": storagename}, + } + +# Buckets / doc types of meeting materials the CF worker is allowed to serve. This +# differs from the list in Session.meeting_related() by the omission of "recording" +MATERIALS_TYPES_SERVED_BY_WORKER = [ + "agenda", + "bluesheets", + "chatlog", + "minutes", + "narrativeminutes", + "polls", + "procmaterials", + "slides", +] + +# Other storages +STORAGES["red_bucket"] = { + "BACKEND": "django.core.files.storage.InMemoryStorage", + "OPTIONS": {"location": "red_bucket"}, +} + # Override this in settings_local.py if needed # *_PATH variables ends with a slash/ . @@ -674,26 +815,33 @@ def skip_unreadable_post(record): INTERNET_DRAFT_PDF_PATH = '/a/www/ietf-datatracker/pdf/' RFC_PATH = '/a/www/ietf-ftp/rfc/' CHARTER_PATH = '/a/ietfdata/doc/charter/' +CHARTER_COPY_PATH = '/a/www/ietf-ftp/ietf' # copy 1wg-charters files here if set +CHARTER_COPY_OTHER_PATH = '/a/ftp/ietf' +CHARTER_COPY_THIRD_PATH = '/a/ftp/charter' +GROUP_SUMMARY_PATH = '/a/www/ietf-ftp/ietf' BOFREQ_PATH = '/a/ietfdata/doc/bofreq/' CONFLICT_REVIEW_PATH = '/a/ietfdata/doc/conflict-review' STATUS_CHANGE_PATH = '/a/ietfdata/doc/status-change' AGENDA_PATH = '/a/www/www6s/proceedings/' MEETINGHOST_LOGO_PATH = AGENDA_PATH # put these in the same place as other proceedings files -IPR_DOCUMENT_PATH = '/a/www/ietf-ftp/ietf/IPR/' -IESG_WG_EVALUATION_DIR = "/a/www/www6/iesg/evaluation" # Move drafts to this directory when they expire INTERNET_DRAFT_ARCHIVE_DIR = '/a/ietfdata/doc/draft/collection/draft-archive/' -# The following directory contains linked copies of all drafts, but don't -# write anything to this directory -- its content is maintained by ghostlinkd: +# The following directory contains copies of all drafts - it used to be +# a set of hardlinks maintained by ghostlinkd, but is now explicitly written to INTERNET_ALL_DRAFTS_ARCHIVE_DIR = '/a/ietfdata/doc/draft/archive' MEETING_RECORDINGS_DIR = '/a/www/audio' DERIVED_DIR = '/a/ietfdata/derived' +FTP_DIR = '/a/ftp' +ALL_ID_DOWNLOAD_DIR = '/a/www/www6s/download' +NFS_METRICS_TMP_DIR = '/a/tmp' DOCUMENT_FORMAT_ALLOWLIST = ["txt", "ps", "pdf", "xml", "html", ] # Mailing list info URL for lists hosted on the IETF servers -MAILING_LIST_INFO_URL = "https://www.ietf.org/mailman/listinfo/%(list_addr)s" +MAILING_LIST_INFO_URL = "https://mailman3.%(domain)s/mailman3/lists/%(list_addr)s.%(domain)s" MAILING_LIST_ARCHIVE_URL = "https://mailarchive.ietf.org" +MAILING_LIST_ARCHIVE_SEARCH_URL = "https://mailarchive.ietf.org/api/v1/message/search/" +MAILING_LIST_ARCHIVE_API_KEY = "changeme" # Liaison Statement Tool settings (one is used in DOC_HREFS below) LIAISON_UNIVERSAL_FROM = 'Liaison Statement Management Tool ' @@ -705,7 +853,7 @@ def skip_unreadable_post(record): DOC_HREFS = { "charter": "https://www.ietf.org/charter/{doc.name}-{doc.rev}.txt", "draft": "https://www.ietf.org/archive/id/{doc.name}-{doc.rev}.txt", - "rfc": "https://www.rfc-editor.org/rfc/rfc{doc.rfcnum}.txt", + "rfc": "https://www.rfc-editor.org/rfc/rfc{doc.rfc_number}.txt", "slides": "https://www.ietf.org/slides/{doc.name}-{doc.rev}", "procmaterials": "https://www.ietf.org/procmaterials/{doc.name}-{doc.rev}", "conflrev": "https://www.ietf.org/cr/{doc.name}-{doc.rev}.txt", @@ -737,16 +885,14 @@ def skip_unreadable_post(record): SESSION_REQUEST_FROM_EMAIL = 'IETF Meeting Session Request Tool ' SECRETARIAT_SUPPORT_EMAIL = "support@ietf.org" -SECRETARIAT_ACTION_EMAIL = "ietf-action@ietf.org" -SECRETARIAT_INFO_EMAIL = "ietf-info@ietf.org" +SECRETARIAT_ACTION_EMAIL = SECRETARIAT_SUPPORT_EMAIL +SECRETARIAT_INFO_EMAIL = SECRETARIAT_SUPPORT_EMAIL # Put real password in settings_local.py IANA_SYNC_PASSWORD = "secret" IANA_SYNC_CHANGES_URL = "https://datatracker.iana.org:4443/data-tracker/changes" IANA_SYNC_PROTOCOLS_URL = "https://www.iana.org/protocols/" -RFC_TEXT_RSYNC_SOURCE="ftp.rfc-editor.org::rfcs-text-only" - RFC_EDITOR_SYNC_PASSWORD="secret" RFC_EDITOR_SYNC_NOTIFICATION_URL = "https://www.rfc-editor.org/parser/parser.php" RFC_EDITOR_GROUP_NOTIFICATION_EMAIL = "webmaster@rfc-editor.org" @@ -754,10 +900,11 @@ def skip_unreadable_post(record): RFC_EDITOR_QUEUE_URL = "https://www.rfc-editor.org/queue2.xml" RFC_EDITOR_INDEX_URL = "https://www.rfc-editor.org/rfc/rfc-index.xml" RFC_EDITOR_ERRATA_JSON_URL = "https://www.rfc-editor.org/errata.json" -RFC_EDITOR_ERRATA_URL = "https://www.rfc-editor.org/errata_search.php?rfc={rfc_number}" RFC_EDITOR_INLINE_ERRATA_URL = "https://www.rfc-editor.org/rfc/inline-errata/rfc{rfc_number}.html" +RFC_EDITOR_ERRATA_BASE_URL = "https://www.rfc-editor.org/errata/" RFC_EDITOR_INFO_BASE_URL = "https://www.rfc-editor.org/info/" + # NomCom Tool settings ROLODEX_URL = "" NOMCOM_PUBLIC_KEYS_DIR = '/a/www/nomcom/public_keys/' @@ -819,7 +966,8 @@ def skip_unreadable_post(record): # Max time to allow for validation before a submission is subject to cancellation IDSUBMIT_MAX_VALIDATION_TIME = datetime.timedelta(minutes=20) -IDSUBMIT_MANUAL_STAGING_DIR = '/tmp/' +# Age at which a submission expires if not posted +IDSUBMIT_EXPIRATION_AGE = datetime.timedelta(days=14) IDSUBMIT_FILE_TYPES = ( 'txt', @@ -859,6 +1007,7 @@ def skip_unreadable_post(record): MEETING_DOC_LOCAL_HREFS = { "agenda": "/meeting/{meeting.number}/materials/{doc.name}-{doc.rev}", "minutes": "/meeting/{meeting.number}/materials/{doc.name}-{doc.rev}", + "narrativeminutes": "/meeting/{meeting.number}/materials/{doc.name}-{doc.rev}", "slides": "/meeting/{meeting.number}/materials/{doc.name}-{doc.rev}", "chatlog": "/meeting/{meeting.number}/materials/{doc.name}-{doc.rev}", "polls": "/meeting/{meeting.number}/materials/{doc.name}-{doc.rev}", @@ -870,6 +1019,7 @@ def skip_unreadable_post(record): MEETING_DOC_CDN_HREFS = { "agenda": "https://www.ietf.org/proceedings/{meeting.number}/agenda/{doc.name}-{doc.rev}", "minutes": "https://www.ietf.org/proceedings/{meeting.number}/minutes/{doc.name}-{doc.rev}", + "narrativeminutes": "https://www.ietf.org/proceedings/{meeting.number}/narrative-minutes/{doc.name}-{doc.rev}", "slides": "https://www.ietf.org/proceedings/{meeting.number}/slides/{doc.name}-{doc.rev}", "recording": "{doc.external_url}", "bluesheets": "https://www.ietf.org/proceedings/{meeting.number}/bluesheets/{doc.uploaded_filename}", @@ -881,6 +1031,7 @@ def skip_unreadable_post(record): MEETING_DOC_OLD_HREFS = { "agenda": "/meeting/{meeting.number}/materials/{doc.name}", "minutes": "/meeting/{meeting.number}/materials/{doc.name}", + "narrativeminutes" : "/meeting/{meeting.number}/materials/{doc.name}", "slides": "/meeting/{meeting.number}/materials/{doc.name}", "recording": "{doc.external_url}", "bluesheets": "https://www.ietf.org/proceedings/{meeting.number}/bluesheets/{doc.uploaded_filename}", @@ -890,6 +1041,7 @@ def skip_unreadable_post(record): MEETING_DOC_GREFS = { "agenda": "/meeting/{meeting.number}/materials/{doc.name}", "minutes": "/meeting/{meeting.number}/materials/{doc.name}", + "narrativeminutes": "/meeting/{meeting.number}/materials/{doc.name}", "slides": "/meeting/{meeting.number}/materials/{doc.name}", "recording": "{doc.external_url}", "bluesheets": "https://www.ietf.org/proceedings/{meeting.number}/bluesheets/{doc.uploaded_filename}", @@ -903,6 +1055,7 @@ def skip_unreadable_post(record): MEETING_VALID_UPLOAD_EXTENSIONS = { 'agenda': ['.txt','.html','.htm', '.md', ], 'minutes': ['.txt','.html','.htm', '.md', '.pdf', ], + 'narrativeminutes': ['.txt','.html','.htm', '.md', '.pdf', ], 'slides': ['.doc','.docx','.pdf','.ppt','.pptx','.txt', ], # Note the removal of .zip 'bluesheets': ['.pdf', '.txt', ], 'procmaterials':['.pdf', ], @@ -912,6 +1065,7 @@ def skip_unreadable_post(record): MEETING_VALID_UPLOAD_MIME_TYPES = { 'agenda': ['text/plain', 'text/html', 'text/markdown', 'text/x-markdown', ], 'minutes': ['text/plain', 'text/html', 'application/pdf', 'text/markdown', 'text/x-markdown', ], + 'narrativeminutes': ['text/plain', 'text/html', 'application/pdf', 'text/markdown', 'text/x-markdown', ], 'slides': [], 'bluesheets': ['application/pdf', 'text/plain', ], 'procmaterials':['application/pdf', ], @@ -966,15 +1120,12 @@ def skip_unreadable_post(record): # ============================================================================== -RSYNC_BINARY = '/usr/bin/rsync' YANGLINT_BINARY = '/usr/bin/yanglint' DE_GFM_BINARY = '/usr/bin/de-gfm.ruby2.5' # Account settings DAYS_TO_EXPIRE_REGISTRATION_LINK = 3 MINUTES_TO_EXPIRE_RESET_PASSWORD_LINK = 60 -HTPASSWD_COMMAND = "/usr/bin/htpasswd" -HTPASSWD_FILE = "/www/htpasswd" # Generation of pdf files GHOSTSCRIPT_COMMAND = "/usr/bin/gs" @@ -985,13 +1136,19 @@ def skip_unreadable_post(record): # Timezone files for iCalendar TZDATA_ICS_PATH = BASE_DIR + '/../vzic/zoneinfo/' -SECR_BLUE_SHEET_PATH = '/a/www/ietf-datatracker/documents/blue_sheet.rtf' -SECR_BLUE_SHEET_URL = IDTRACKER_BASE_URL + '/documents/blue_sheet.rtf' -SECR_INTERIM_LISTING_DIR = '/a/www/www6/meeting/interim' -SECR_MAX_UPLOAD_SIZE = 40960000 -SECR_PROCEEDINGS_DIR = '/a/www/www6s/proceedings/' -SECR_PPT2PDF_COMMAND = ['/usr/bin/soffice','--headless','--convert-to','pdf:writer_globaldocument_pdf_Export','--outdir'] -STATS_REGISTRATION_ATTENDEES_JSON_URL = 'https://registration.ietf.org/{number}/attendees/' +DATATRACKER_MAX_UPLOAD_SIZE = 40960000 +PPT2PDF_COMMAND = [ + "/usr/bin/soffice", + "--headless", # no GUI + "--safe-mode", # use a new libreoffice profile every time (ensures no reliance on accumulated profile config) + "--norestore", # don't attempt to restore files after a previous crash (ensures that one crash won't block future conversions until UI intervention) + "--convert-to", "pdf:writer_globaldocument_pdf_Export", + "--outdir" +] + +REGISTRATION_PARTICIPANTS_API_URL = 'https://registration.ietf.org/api/v1/participants-dt/' +REGISTRATION_PARTICIPANTS_API_KEY = 'changeme' + PROCEEDINGS_VERSION_CHANGES = [ 0, # version 1 97, # version 2: meeting 97 and later (was number was NEW_PROCEEDINGS_START) @@ -1011,7 +1168,6 @@ def skip_unreadable_post(record): # CHAT_ARCHIVE_URL_PATTERN = 'https://www.ietf.org/jabber/logs/{chat_room_name}?C=M;O=D' PYFLAKES_DEFAULT_ARGS= ["ietf", ] -VULTURE_DEFAULT_ARGS= ["ietf", ] # Automatic Scheduling # @@ -1058,16 +1214,6 @@ def skip_unreadable_post(record): TEST_DATA_DIR = os.path.abspath(BASE_DIR + "/../test/data") -# Path to the email alias lists. Used by ietf.utils.aliases -DRAFT_ALIASES_PATH = os.path.join(TEST_DATA_DIR, "draft-aliases") -DRAFT_VIRTUAL_PATH = os.path.join(TEST_DATA_DIR, "draft-virtual") -DRAFT_VIRTUAL_DOMAIN = "virtual.ietf.org" - -GROUP_ALIASES_PATH = os.path.join(TEST_DATA_DIR, "group-aliases") -GROUP_VIRTUAL_PATH = os.path.join(TEST_DATA_DIR, "group-virtual") -GROUP_VIRTUAL_DOMAIN = "virtual.ietf.org" - -POSTCONFIRM_PATH = "/a/postconfirm/wrapper" USER_PREFERENCE_DEFAULTS = { "expires_soon" : "14", @@ -1083,21 +1229,23 @@ def skip_unreadable_post(record): "@ietf.org$", ] +# Configuration for django-markup MARKUP_SETTINGS = { 'restructuredtext': { 'settings_overrides': { + 'report_level': 3, # error (3) or severe (4) only 'initial_header_level': 3, 'doctitle_xform': False, 'footnote_references': 'superscript', 'trim_footnote_reference_space': True, 'default_reference_context': 'view', + 'raw_enabled': False, # critical for security + 'file_insertion_enabled': False, # critical for security 'link_base': '' } } } -MAILMAN_LIB_DIR = '/usr/lib/mailman' - # This is the number of seconds required between subscribing to an ietf # mailing list and datatracker account creation being accepted LIST_ACCOUNT_DELAY = 60*60*25 # 25 hours @@ -1123,7 +1271,7 @@ def skip_unreadable_post(record): except ImportError: pass -STATS_NAMES_LIMIT = 25 +STATS_TIMELINE_CACHE_TIMEOUT = 86400 UTILS_MEETING_CONFERENCE_DOMAINS = ['webex.com', 'zoom.us', 'jitsi.org', 'meetecho.com', 'gather.town', ] UTILS_TEST_RANDOM_STATE_FILE = '.factoryboy_random_state' @@ -1155,6 +1303,19 @@ def skip_unreadable_post(record): CELERY_BROKER_URL = 'amqp://mq/' CELERY_BEAT_SCHEDULER = 'django_celery_beat.schedulers:DatabaseScheduler' CELERY_BEAT_SYNC_EVERY = 1 # update DB after every event +CELERY_BEAT_CRON_STARTING_DEADLINE = 1800 # seconds after a missed deadline before abandoning a cron task +CELERY_BROKER_CONNECTION_RETRY_ON_STARTUP = True # the default, but setting it squelches a warning +# Use a result backend so we can chain tasks. This uses the rpc backend, see +# https://docs.celeryq.dev/en/stable/userguide/tasks.html#rpc-result-backend-rabbitmq-qpid +# Results can be retrieved only once and only by the caller of the task. Results will be +# lost if the message broker restarts. +CELERY_RESULT_BACKEND = 'django-cache' # use a Django cache for results +CELERY_CACHE_BACKEND = 'celery-results' # which Django cache to use +CELERY_RESULT_EXPIRES = datetime.timedelta(minutes=5) # how long are results valid? (Default is 1 day) +CELERY_TASK_IGNORE_RESULT = True # ignore results unless specifically enabled for a task +CELERY_TASK_ROUTES = { + "ietf.blobdb.tasks.pybob_the_blob_replicator_task": {"queue": "blobdb"} +} # Meetecho API setup: Uncomment this and provide real credentials to enable # Meetecho conference creation for interim session requests @@ -1164,12 +1325,22 @@ def skip_unreadable_post(record): # 'client_id': 'datatracker', # 'client_secret': 'some secret', # 'request_timeout': 3.01, # python-requests doc recommend slightly > a multiple of 3 seconds +# # How many minutes before/after session to enable slide update API. Defaults to 15. Set to None to disable, +# # or < 0 to _always_ send updates (useful for debugging) +# 'slides_notify_time': 15, +# 'debug': False, # if True, API calls will be echoed as debug instead of sent (only works for slides for now) # } # Meetecho URLs - instantiate with url.format(session=some_session) MEETECHO_ONSITE_TOOL_URL = "https://meetings.conf.meetecho.com/onsite{session.meeting.number}/?session={session.pk}" MEETECHO_VIDEO_STREAM_URL = "https://meetings.conf.meetecho.com/ietf{session.meeting.number}/?session={session.pk}" MEETECHO_AUDIO_STREAM_URL = "https://mp3.conf.meetecho.com/ietf{session.meeting.number}/{session.pk}.m3u" +MEETECHO_SESSION_RECORDING_URL = "https://meetecho-player.ietf.org/playout/?session={session_label}" + +# Errata system api configuration +# settings should provide +# ERRATA_METADATA_NOTIFICATION_URL +# ERRATA_METADATA_NOTIFICATION_API_KEY # Put the production SECRET_KEY in settings_local.py, and also any other # sensitive or site-specific changes. DO NOT commit settings_local.py to svn. @@ -1190,87 +1361,144 @@ def skip_unreadable_post(record): MIDDLEWARE += DEV_MIDDLEWARE TEMPLATES[0]['OPTIONS']['context_processors'] += DEV_TEMPLATE_CONTEXT_PROCESSORS -if 'CACHES' not in locals(): - if SERVER_MODE == 'production': +if "CACHES" not in locals(): + if SERVER_MODE == "production": + MEMCACHED_HOST = os.environ.get("MEMCACHED_SERVICE_HOST", "127.0.0.1") + MEMCACHED_PORT = os.environ.get("MEMCACHED_SERVICE_PORT", "11211") CACHES = { - 'default': { - 'BACKEND': 'ietf.utils.cache.LenientMemcacheCache', - 'LOCATION': '127.0.0.1:11211', - 'VERSION': __version__, - 'KEY_PREFIX': 'ietf:dt', - 'KEY_FUNCTION': lambda key, key_prefix, version: ( + "default": { + "BACKEND": "ietf.utils.cache.LenientMemcacheCache", + "LOCATION": f"{MEMCACHED_HOST}:{MEMCACHED_PORT}", + "VERSION": __version__, + "KEY_PREFIX": "ietf:dt", + # Key function is default except with sha384-encoded key + "KEY_FUNCTION": lambda key, key_prefix, version: ( f"{key_prefix}:{version}:{sha384(str(key).encode('utf8')).hexdigest()}" ), }, - 'sessions': { - 'BACKEND': 'ietf.utils.cache.LenientMemcacheCache', - 'LOCATION': '127.0.0.1:11211', + "agenda": { + "BACKEND": "ietf.utils.cache.LenientMemcacheCache", + "LOCATION": f"{MEMCACHED_HOST}:{MEMCACHED_PORT}", # No release-specific VERSION setting. - 'KEY_PREFIX': 'ietf:dt', + "KEY_PREFIX": "ietf:dt:agenda", + # Key function is default except with sha384-encoded key + "KEY_FUNCTION": lambda key, key_prefix, version: ( + f"{key_prefix}:{version}:{sha384(str(key).encode('utf8')).hexdigest()}" + ), + }, + "proceedings": { + "BACKEND": "ietf.utils.cache.LenientMemcacheCache", + "LOCATION": f"{MEMCACHED_HOST}:{MEMCACHED_PORT}", + # No release-specific VERSION setting. + "KEY_PREFIX": "ietf:dt:proceedings", + # Key function is default except with sha384-encoded key + "KEY_FUNCTION": lambda key, key_prefix, version: ( + f"{key_prefix}:{version}:{sha384(str(key).encode('utf8')).hexdigest()}" + ), + }, + "sessions": { + "BACKEND": "ietf.utils.cache.LenientMemcacheCache", + "LOCATION": f"{MEMCACHED_HOST}:{MEMCACHED_PORT}", + # No release-specific VERSION setting. + "KEY_PREFIX": "ietf:dt", }, - 'htmlized': { - 'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache', - 'LOCATION': '/a/cache/datatracker/htmlized', - 'OPTIONS': { - 'MAX_ENTRIES': 100000, # 100,000 + "htmlized": { + "BACKEND": "django.core.cache.backends.filebased.FileBasedCache", + "LOCATION": "/a/cache/datatracker/htmlized", + "OPTIONS": { + "MAX_ENTRIES": 100000, # 100,000 }, }, - 'pdfized': { - 'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache', - 'LOCATION': '/a/cache/datatracker/pdfized', - 'OPTIONS': { - 'MAX_ENTRIES': 100000, # 100,000 + "pdfized": { + "BACKEND": "django.core.cache.backends.filebased.FileBasedCache", + "LOCATION": "/a/cache/datatracker/pdfized", + "OPTIONS": { + "MAX_ENTRIES": 100000, # 100,000 }, }, - 'slowpages': { - 'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache', - 'LOCATION': '/a/cache/datatracker/slowpages', - 'OPTIONS': { - 'MAX_ENTRIES': 5000, + "slowpages": { + "BACKEND": "django.core.cache.backends.filebased.FileBasedCache", + "LOCATION": "/a/cache/datatracker/slowpages", + "OPTIONS": { + "MAX_ENTRIES": 5000, }, }, + "celery-results": { + "BACKEND": "django.core.cache.backends.memcached.PyMemcacheCache", + "LOCATION": f"{MEMCACHED_HOST}:{MEMCACHED_PORT}", + "KEY_PREFIX": "ietf:celery", + }, } else: CACHES = { - 'default': { - 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', + "default": { + "BACKEND": "django.core.cache.backends.dummy.DummyCache", #'BACKEND': 'ietf.utils.cache.LenientMemcacheCache', #'LOCATION': '127.0.0.1:11211', #'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache', - 'VERSION': __version__, - 'KEY_PREFIX': 'ietf:dt', + "VERSION": __version__, + "KEY_PREFIX": "ietf:dt", + }, + "agenda": { + "BACKEND": "django.core.cache.backends.dummy.DummyCache", + # "BACKEND": "ietf.utils.cache.LenientMemcacheCache", + # "LOCATION": "127.0.0.1:11211", + # No release-specific VERSION setting. + "KEY_PREFIX": "ietf:dt:agenda", + # Key function is default except with sha384-encoded key + "KEY_FUNCTION": lambda key, key_prefix, version: ( + f"{key_prefix}:{version}:{sha384(str(key).encode('utf8')).hexdigest()}" + ), }, - 'sessions': { - 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', + "proceedings": { + "BACKEND": "django.core.cache.backends.dummy.DummyCache", + # "BACKEND": "ietf.utils.cache.LenientMemcacheCache", + # "LOCATION": "127.0.0.1:11211", + # No release-specific VERSION setting. + "KEY_PREFIX": "ietf:dt:proceedings", + # Key function is default except with sha384-encoded key + "KEY_FUNCTION": lambda key, key_prefix, version: ( + f"{key_prefix}:{version}:{sha384(str(key).encode('utf8')).hexdigest()}" + ), }, - 'htmlized': { - 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', + "sessions": { + "BACKEND": "django.core.cache.backends.locmem.LocMemCache", + }, + "htmlized": { + "BACKEND": "django.core.cache.backends.dummy.DummyCache", #'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache', - 'LOCATION': '/var/cache/datatracker/htmlized', - 'OPTIONS': { - 'MAX_ENTRIES': 1000, + "LOCATION": "/var/cache/datatracker/htmlized", + "OPTIONS": { + "MAX_ENTRIES": 1000, }, }, - 'pdfized': { - 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', + "pdfized": { + "BACKEND": "django.core.cache.backends.dummy.DummyCache", #'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache', - 'LOCATION': '/var/cache/datatracker/pdfized', - 'OPTIONS': { - 'MAX_ENTRIES': 1000, + "LOCATION": "/var/cache/datatracker/pdfized", + "OPTIONS": { + "MAX_ENTRIES": 1000, }, }, - 'slowpages': { - 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', + "slowpages": { + "BACKEND": "django.core.cache.backends.dummy.DummyCache", #'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache', - 'LOCATION': '/var/cache/datatracker/', - 'OPTIONS': { - 'MAX_ENTRIES': 5000, + "LOCATION": "/var/cache/datatracker/", + "OPTIONS": { + "MAX_ENTRIES": 5000, }, }, + "celery-results": { + "BACKEND": "django.core.cache.backends.memcached.PyMemcacheCache", + "LOCATION": "app:11211", + "KEY_PREFIX": "ietf:celery", + }, } PUBLISH_IPR_STATES = ['posted', 'removed', 'removed_objfalse'] +ADVERTISE_VERSIONS = ["markdown", "pyang", "rfc2html", "xml2rfc"] + # We provide a secret key only for test and development modes. It's # absolutely vital that django fails to start in production mode unless a # secret key has been provided elsewhere, not in this file which is @@ -1288,11 +1516,17 @@ def skip_unreadable_post(record): NOMCOM_APP_SECRET = b'\x9b\xdas1\xec\xd5\xa0SI~\xcb\xd4\xf5t\x99\xc4i\xd7\x9f\x0b\xa9\xe8\xfeY\x80$\x1e\x12tN:\x84' ALLOWED_HOSTS = ['*',] - + try: # see https://github.com/omarish/django-cprofile-middleware - import django_cprofile_middleware # pyflakes:ignore - MIDDLEWARE = MIDDLEWARE + ['django_cprofile_middleware.middleware.ProfilerMiddleware', ] + import django_cprofile_middleware # pyflakes:ignore + + MIDDLEWARE = MIDDLEWARE + [ + "django_cprofile_middleware.middleware.ProfilerMiddleware", + ] + DJANGO_CPROFILE_MIDDLEWARE_REQUIRE_STAFF = ( + False # Do not use this setting for a public site! + ) except ImportError: pass @@ -1302,3 +1536,8 @@ def skip_unreadable_post(record): CSRF_TRUSTED_ORIGINS += ['http://localhost:8000', 'http://127.0.0.1:8000', 'http://[::1]:8000'] SESSION_COOKIE_SECURE = False SESSION_COOKIE_SAMESITE = 'Lax' + + +YOUTUBE_DOMAINS = ['www.youtube.com', 'youtube.com', 'youtu.be', 'm.youtube.com', 'youtube-nocookie.com', 'www.youtube-nocookie.com'] + +IETF_DOI_PREFIX = "10.17487" diff --git a/ietf/settings_test.py b/ietf/settings_test.py index 3f69f0ae38..e7ebc13eb2 100755 --- a/ietf/settings_test.py +++ b/ietf/settings_test.py @@ -9,9 +9,12 @@ # ./manage.py test --settings=settings_test doc.ChangeStateTestCase # -import os +import atexit +import os +import shutil +import tempfile from ietf.settings import * # pyflakes:ignore -from ietf.settings import TEST_CODE_COVERAGE_CHECKER, BASE_DIR, PHOTOS_DIRNAME +from ietf.settings import ORIG_AUTH_PASSWORD_VALIDATORS, STORAGES import debug # pyflakes:ignore debug.debug = True @@ -45,14 +48,26 @@ def __getitem__(self, item): }, } -if TEST_CODE_COVERAGE_CHECKER and not TEST_CODE_COVERAGE_CHECKER._started: # pyflakes:ignore +# test with a single DB - do not use a DB router +BLOBDB_DATABASE = "default" +DATABASE_ROUTERS = [] # type: ignore + +if TEST_CODE_COVERAGE_CHECKER: # pyflakes:ignore TEST_CODE_COVERAGE_CHECKER.start() # pyflakes:ignore -NOMCOM_PUBLIC_KEYS_DIR=os.path.abspath("tmp-nomcom-public-keys-dir") +def tempdir_with_cleanup(**kwargs): + """Utility to create a temporary dir and arrange cleanup""" + _dir = tempfile.mkdtemp(**kwargs) + atexit.register(shutil.rmtree, _dir) + return _dir + + +NOMCOM_PUBLIC_KEYS_DIR = tempdir_with_cleanup(suffix="-nomcom-public-keys-dir") -MEDIA_ROOT = os.path.join(os.path.dirname(BASE_DIR), 'test/media/') # pyflakes:ignore -MEDIA_URL = '/test/media/' -PHOTOS_DIR = MEDIA_ROOT + PHOTOS_DIRNAME # pyflakes:ignore +MEDIA_ROOT = tempdir_with_cleanup(suffix="-media") +PHOTOS_DIRNAME = "photo" +PHOTOS_DIR = os.path.join(MEDIA_ROOT, PHOTOS_DIRNAME) +os.mkdir(PHOTOS_DIR) # Undo any developer-dependent middleware when running the tests MIDDLEWARE = [ c for c in MIDDLEWARE if not c in DEV_MIDDLEWARE ] # pyflakes:ignore @@ -60,3 +75,52 @@ def __getitem__(self, item): TEMPLATES[0]['OPTIONS']['context_processors'] = [ p for p in TEMPLATES[0]['OPTIONS']['context_processors'] if not p in DEV_TEMPLATE_CONTEXT_PROCESSORS ] # pyflakes:ignore REQUEST_PROFILE_STORE_ANONYMOUS_SESSIONS = False + +# Override loggers with a safer set in case things go to the log during testing. Specifically, +# make sure there are no syslog loggers that might send things to a real syslog. +LOGGING["loggers"] = { # pyflakes:ignore + 'django': { + 'handlers': ['debug_console'], + 'level': 'INFO', + }, + 'django.request': { + 'handlers': ['debug_console'], + 'level': 'ERROR', + }, + 'django.server': { + 'handlers': ['django.server'], + 'level': 'INFO', + }, + 'django.security': { + 'handlers': ['debug_console', ], + 'level': 'INFO', + }, + 'oidc_provider': { + 'handlers': ['debug_console', ], + 'level': 'DEBUG', + }, + 'datatracker': { + 'handlers': ['debug_console'], + 'level': 'INFO', + }, + 'celery': { + 'handlers': ['debug_console'], + 'level': 'INFO', + }, +} + +# Restore AUTH_PASSWORD_VALIDATORS if they were reset in settings_local +try: + AUTH_PASSWORD_VALIDATORS = ORIG_AUTH_PASSWORD_VALIDATORS +except NameError: + pass + +# Use InMemoryStorage for red bucket and r2-rfc storages +STORAGES["red_bucket"] = { + "BACKEND": "django.core.files.storage.InMemoryStorage", + "OPTIONS": {"location": "red_bucket"}, +} +STORAGES["r2-rfc"] = { + "BACKEND": "django.core.files.storage.InMemoryStorage", + "OPTIONS": {"location": "r2-rfc"}, +} diff --git a/ietf/settings_testcrawl.py b/ietf/settings_testcrawl.py index a1b5ce8946..edb978757a 100644 --- a/ietf/settings_testcrawl.py +++ b/ietf/settings_testcrawl.py @@ -27,9 +27,14 @@ 'MAX_ENTRIES': 10000, }, }, + 'agenda': { + 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', + }, + 'proceedings': { + 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', + }, 'sessions': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', - # No version-specific VERSION setting. }, 'htmlized': { 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', diff --git a/ietf/static/css/custom-bs-import.scss b/ietf/static/css/custom-bs-import.scss index e124b56875..644efdcf10 100644 --- a/ietf/static/css/custom-bs-import.scss +++ b/ietf/static/css/custom-bs-import.scss @@ -46,6 +46,9 @@ $h4-font-size: $font-size-base * 1.4; $h5-font-size: $font-size-base * 1.2; $h6-font-size: $font-size-base; +// Default is gray-800, which is the same as the range slider background. +$light-bg-subtle-dark: mix($gray-800, $black); + @import "bootstrap/scss/maps"; @import "bootstrap/scss/mixins"; @import "bootstrap/scss/utilities"; diff --git a/ietf/static/css/datepicker.scss b/ietf/static/css/datepicker.scss index 88f9e835fd..b193ccda3a 100644 --- a/ietf/static/css/datepicker.scss +++ b/ietf/static/css/datepicker.scss @@ -4,3 +4,29 @@ $dp-cell-focus-background-color: $dropdown-link-hover-bg !default; @import "vanillajs-datepicker/sass/datepicker-bs5"; + +[data-bs-theme="dark"] .datepicker-picker { + .datepicker-header, + .datepicker-controls .btn, + .datepicker-main, + .datepicker-footer { + background-color: $gray-800; + } + + .datepicker-cell:hover { + background-color: $gray-700; + } + + .datepicker-cell.day.focused { + background-color: $gray-600; + } + + .datepicker-cell.day.selected.focused { + background-color: $blue; + } + + .datepicker-controls .btn:hover { + background-color:$gray-700; + color: $gray-400; + } +} diff --git a/ietf/static/css/document_html.scss b/ietf/static/css/document_html.scss index 0b5a70f124..47ef8d64b4 100644 --- a/ietf/static/css/document_html.scss +++ b/ietf/static/css/document_html.scss @@ -320,6 +320,11 @@ tbody.meta tr { background-color: $danger; } +.badge-generic { + color: white; + background-color: $danger; +} + #toc-nav { width: inherit; overscroll-behavior-y: none; // Prevent overscrolling from scrolling the main content diff --git a/ietf/static/css/document_html_txt.scss b/ietf/static/css/document_html_txt.scss index b0fec7c4d6..a5991056c9 100644 --- a/ietf/static/css/document_html_txt.scss +++ b/ietf/static/css/document_html_txt.scss @@ -344,7 +344,7 @@ div:is(.artwork, .sourcecode) pre { flex: 0 0 content; margin: 0; max-width: 72ch; - overflow: auto; + overflow: auto clip; } div:is(.artwork, .sourcecode) .pilcrow { flex: 0 0 1ch; diff --git a/ietf/static/css/highcharts.scss b/ietf/static/css/highcharts.scss index c556f4e118..d2f5d5e0e7 100644 --- a/ietf/static/css/highcharts.scss +++ b/ietf/static/css/highcharts.scss @@ -1 +1,6 @@ @import "npm:highcharts/css/highcharts.css"; +@import "custom-bs-import"; + +.highcharts-container { + font-family: $font-family-sans-serif; +} diff --git a/ietf/static/css/ietf.scss b/ietf/static/css/ietf.scss index ca960c0378..b8c701eae1 100644 --- a/ietf/static/css/ietf.scss +++ b/ietf/static/css/ietf.scss @@ -46,6 +46,11 @@ $bootstrap-icons-font-src: url("npm:bootstrap-icons/font/fonts/bootstrap-icons.w url("npm:bootstrap-icons/font/fonts/bootstrap-icons.woff") format("woff"); @import "bootstrap-icons/font/bootstrap-icons"; +// Disable contextual alternates (calt) +body { + font-feature-settings: "calt" off; +} + // Leave room for fixed-top navbar... body.navbar-offset { padding-top: 60px; @@ -229,16 +234,20 @@ th, .group-menu .dropdown-menu { height: auto; width: auto; - max-height: 35em; + max-height: 95vh; overflow-x: hidden; overflow-y: auto; } // Helper to constrain the size of the main logo .ietflogo { - width: 75%; + width: 100%; max-width: 300px; } +.ietflogo > img { + min-width: 100px; + width: 100%; +} // Make revision numbers pagination items fixed-width .revision-list { @@ -506,23 +515,23 @@ td.position-empty { tr.position-notready-row, tr.position-discuss-row, tr.position-block-row { - background-color: shade-color($color-discuss, 85%); + background-color: shade-color($color-discuss, 65%); } tr.position-yes-row { - background-color: shade-color($color-yes, 75%); + background-color: shade-color($color-yes, 65%); } tr.position-noobj-row { - background-color: shade-color($color-noobj, 75%); + background-color: shade-color($color-noobj, 65%); } tr.position-abstain-row { - background-color: shade-color($color-abstain, 85%); + background-color: shade-color($color-abstain, 65%); } tr.position-recuse-row { - background-color: shade-color($color-recuse, 85%); + background-color: shade-color($color-recuse, 65%); } } @@ -691,8 +700,8 @@ td.position-empty { } .edit-meeting-schedule .session.other-session-selected { - outline: var(--bs-info) solid 0.2em; - /* width matches margin on .session */ + outline: 0.3em solid var(--bs-info); + box-shadow: 0 0 1em var(--bs-info); z-index: 2; /* render above timeslot outlines */ } @@ -1183,3 +1192,49 @@ td.position-empty { } } } + +blockquote { + padding-left: 1rem; + border-left: solid 1px var(--bs-body-color); +} + +iframe.status { + background-color:transparent; + border:none; + width:100%; + height:3.5em; +} + +.overflow-shadows { + transition: box-shadow 0.5s; +} + +.overflow-shadows--both { + box-shadow: inset 0px 21px 18px -20px var(--bs-body-color), + inset 0px -21px 18px -20px var(--bs-body-color); +} + +.overflow-shadows--top-only { + box-shadow: inset 0px 21px 18px -20px var(--bs-body-color); +} + +.overflow-shadows--bottom-only { + box-shadow: inset 0px -21px 18px -20px var(--bs-body-color); +} + +#navbar-doc-search-wrapper { + position: relative; +} + +#navbar-doc-search-results { + max-height: 400px; + overflow-y: auto; + min-width: auto; + left: 0; + right: 0; + + .dropdown-item { + white-space: normal; + overflow-wrap: break-word; + } +} diff --git a/ietf/static/js/add_session_recordings.js b/ietf/static/js/add_session_recordings.js new file mode 100644 index 0000000000..c1c5932a48 --- /dev/null +++ b/ietf/static/js/add_session_recordings.js @@ -0,0 +1,30 @@ +// Copyright The IETF Trust 2024-2025, All Rights Reserved +document.addEventListener('DOMContentLoaded', () => { + const form = document.getElementById('delete_recordings_form') + const dialog = document.getElementById('delete_confirm_dialog') + const dialog_link = document.getElementById('delete_confirm_link') + const dialog_submit = document.getElementById('delete_confirm_submit') + const dialog_cancel = document.getElementById('delete_confirm_cancel') + + dialog.style.maxWidth = '30vw' + + form.addEventListener('submit', (e) => { + e.preventDefault() + dialog_submit.value = e.submitter.value + const recording_link = e.submitter.closest('tr').querySelector('a') + dialog_link.setAttribute('href', recording_link.getAttribute('href')) + dialog_link.textContent = recording_link.textContent + dialog.showModal() + }) + + dialog_cancel.addEventListener('click', (e) => { + e.preventDefault() + dialog.close() + }) + + document.addEventListener('keydown', (e) => { + if (dialog.open && e.key === 'Escape') { + dialog.close() + } + }) +}) diff --git a/ietf/static/js/announcement.js b/ietf/static/js/announcement.js new file mode 100644 index 0000000000..95465120fa --- /dev/null +++ b/ietf/static/js/announcement.js @@ -0,0 +1,57 @@ +const announcementApp = (function() { + 'use strict'; + return { + // functions for Announcement + checkToField: function() { + document.documentElement.scrollTop = 0; // For most browsers + const toField = document.getElementById('id_to'); + const toCustomInput = document.getElementById('id_to_custom'); + const toCustomDiv = toCustomInput.closest('div.row'); + + if (toField.value === 'Other...') { + toCustomDiv.style.display = 'flex'; // Show the custom field + } else { + toCustomDiv.style.display = 'none'; // Hide the custom field + toCustomInput.value = ''; // Optionally clear the input value if hidden + } + } + }; +})(); + +// Extra care is required to ensure the back button +// works properly for the optional to_custom field. +// Take the case when a user selects "Other..." for +// "To" field. The "To custom" field appears and they +// enter a new address there. +// In Chrome, when the form is submitted and then the user +// uses the back button (or browser back), the page loads +// from bfcache then the javascript DOMContentLoaded event +// handler is run, hiding the empty to_custom field, THEN the +// browser autofills the form fields. Because to_submit +// is now hidden it does not get a value. This is a very +// bad experience for the user because the to_custom field +// was unexpectedly cleared and hidden. If they notice this +// they would need to know to first select another "To" +// option, then select "Other..." again just to get the +// to_custom field visible so they can re-enter the custom +// address. +// The solution is to use setTimeout to run checkToField +// after a short delay, giving the browser time to autofill +// the form fields before it checks to see if the to_custom +// field is empty and hides it. + +document.addEventListener('DOMContentLoaded', function() { + // Run the visibility check after allowing cache to populate values + setTimeout(announcementApp.checkToField, 300); + + const toField = document.getElementById('id_to'); + toField.addEventListener('change', announcementApp.checkToField); +}); + +// Handle back/forward navigation with pageshow +window.addEventListener('pageshow', function(event) { + if (event.persisted) { + // Then apply visibility logic after cache restoration + setTimeout(announcementApp.checkToField, 300); + } +}); \ No newline at end of file diff --git a/ietf/static/js/attendees-chart.js b/ietf/static/js/attendees-chart.js new file mode 100644 index 0000000000..fed3b1289c --- /dev/null +++ b/ietf/static/js/attendees-chart.js @@ -0,0 +1,58 @@ +(function () { + var raw = document.getElementById('attendees-chart-data'); + if (!raw) return; + var chartData = JSON.parse(raw.textContent); + var chart = null; + var currentBreakdown = 'type'; + + // Override the global transparent background set by highcharts.js so the + // export menu and fullscreen view use the page background color. + var container = document.getElementById('attendees-pie-chart'); + var bodyBg = getComputedStyle(document.body).backgroundColor; + container.style.setProperty('--highcharts-background-color', bodyBg); + + function renderChart(breakdown) { + var seriesData = chartData[breakdown].map(function (item) { + return { name: item[0], y: item[1] }; + }); + if (chart) chart.destroy(); + chart = Highcharts.chart(container, { + chart: { type: 'pie', height: 400 }, + title: { text: null }, + tooltip: { pointFormat: '{point.name}: {point.y} ({point.percentage:.1f}%)' }, + plotOptions: { + pie: { + dataLabels: { + enabled: true, + format: '{point.name}
    {point.y} ({point.percentage:.1f}%)', + }, + showInLegend: false, + } + }, + series: [{ name: 'Attendees', data: seriesData }], + }); + } + + var modal = document.getElementById('attendees-chart-modal'); + + // Render (or re-render) the chart each time the modal becomes fully visible, + // so Highcharts can measure the container dimensions correctly. + modal.addEventListener('shown.bs.modal', function () { + renderChart(currentBreakdown); + }); + + // Release the chart when the modal closes to avoid stale renders. + modal.addEventListener('hidden.bs.modal', function () { + if (chart) { + chart.destroy(); + chart = null; + } + }); + + document.querySelectorAll('[name="attendees-breakdown"]').forEach(function (radio) { + radio.addEventListener('change', function () { + currentBreakdown = this.value; + renderChart(currentBreakdown); + }); + }); +})(); diff --git a/ietf/static/js/complete-review.js b/ietf/static/js/complete-review.js index a359dac237..3a58ba9700 100644 --- a/ietf/static/js/complete-review.js +++ b/ietf/static/js/complete-review.js @@ -24,6 +24,8 @@ $(document) .before(mailArchiveSearchTemplate); var mailArchiveSearch = form.find(".mail-archive-search"); + const isReviewer = mailArchiveSearch.data('isReviewer'); + const searchMailArchiveUrl = mailArchiveSearch.data('searchMailArchiveUrl'); var retrievingData = null; @@ -190,4 +192,4 @@ $(document) form.find("[name=review_submission][value=link]") .trigger("click"); } - }); \ No newline at end of file + }); diff --git a/ietf/static/js/custom_striped.js b/ietf/static/js/custom_striped.js new file mode 100644 index 0000000000..480ad7cf82 --- /dev/null +++ b/ietf/static/js/custom_striped.js @@ -0,0 +1,16 @@ +// Copyright The IETF Trust 2025, All Rights Reserved + +document.addEventListener('DOMContentLoaded', () => { + // add stripes + const firstRow = document.querySelector('.custom-stripe .row') + if (firstRow) { + const parent = firstRow.parentElement; + const allRows = Array.from(parent.children).filter(child => child.classList.contains('row')) + allRows.forEach((row, index) => { + row.classList.remove('bg-light') + if (index % 2 === 1) { + row.classList.add('bg-light') + } + }) + } +}) diff --git a/ietf/static/js/document_html.js b/ietf/static/js/document_html.js index 6e8861739a..3e609f3965 100644 --- a/ietf/static/js/document_html.js +++ b/ietf/static/js/document_html.js @@ -117,4 +117,83 @@ document.addEventListener("DOMContentLoaded", function (event) { } }); } + + // Rewrite these CSS properties so that the values are available for restyling. + document.querySelectorAll("svg [style]").forEach(el => { + // Push these CSS properties into their own attributes + const SVG_PRESENTATION_ATTRS = new Set([ + 'alignment-baseline', 'baseline-shift', 'clip', 'clip-path', 'clip-rule', + 'color', 'color-interpolation', 'color-interpolation-filters', + 'color-rendering', 'cursor', 'direction', 'display', 'dominant-baseline', + 'fill', 'fill-opacity', 'fill-rule', 'filter', 'flood-color', + 'flood-opacity', 'font-family', 'font-size', 'font-size-adjust', + 'font-stretch', 'font-style', 'font-variant', 'font-weight', + 'image-rendering', 'letter-spacing', 'lighting-color', 'marker-end', + 'marker-mid', 'marker-start', 'mask', 'opacity', 'overflow', 'paint-order', + 'pointer-events', 'shape-rendering', 'stop-color', 'stop-opacity', + 'stroke', 'stroke-dasharray', 'stroke-dashoffset', 'stroke-linecap', + 'stroke-linejoin', 'stroke-miterlimit', 'stroke-opacity', 'stroke-width', + 'text-anchor', 'text-decoration', 'text-rendering', 'unicode-bidi', + 'vector-effect', 'visibility', 'word-spacing', 'writing-mode', + ]); + + // Simple CSS splitter: respects quoted strings and parens so semicolons + // inside url(...) or "..." don't get treated as declaration boundaries. + function parseDeclarations(styleText) { + const decls = []; + let buf = ''; + let inStr = false; + let strChar = ''; + let escaped = false; + let depth = 0; + + for (const ch of styleText) { + if (inStr) { + if (escaped) { + escaped = false; + } else if (ch === '\\') { + escaped = true; + } else if (ch === strChar) { + inStr = false; + } + } else if (ch === '"' || ch === "'") { + inStr = true; + strChar = ch; + } else if (ch === '(') { + depth++; + } else if (ch === ')') { + depth--; + } else if (ch === ';' && depth === 0) { + const trimmed = buf.trim(); + if (trimmed) { + decls.push(trimmed); + } + buf = ''; + continue; + } + buf += ch; + } + const trimmed = buf.trim(); + if (trimmed) { + decls.push(trimmed); + } + return decls; + } + + const remainder = []; + for (const decl of parseDeclarations(el.getAttribute('style'))) { + const [prop, val] = decl.split(":", 2).map(v => v.trim()); + if (val && !/!important$/.test(val) && SVG_PRESENTATION_ATTRS.has(prop)) { + el.setAttribute(prop, val); + } else { + remainder.push(decl); + } + } + + if (remainder.length > 0) { + el.setAttribute('style', remainder.join('; ')); + } else { + el.removeAttribute('style'); + } + }); }); diff --git a/ietf/static/js/edit-meeting-schedule.js b/ietf/static/js/edit-meeting-schedule.js index 3f0afe0841..2a73a8c29d 100644 --- a/ietf/static/js/edit-meeting-schedule.js +++ b/ietf/static/js/edit-meeting-schedule.js @@ -50,6 +50,7 @@ $(function () { let sessionPurposeInputs = schedEditor.find('.session-purpose-toggles input'); let timeSlotGroupInputs = schedEditor.find("#timeslot-group-toggles-modal .modal-body .individual-timeslots input"); let sessionParentInputs = schedEditor.find(".session-parent-toggles input"); + let sessionParentToggleAll = schedEditor.find(".session-parent-toggles .session-parent-toggle-all") const classes_to_hide = '.hidden-timeslot-group,.hidden-timeslot-type'; // hack to work around lack of position sticky support in old browsers, see https://caniuse.com/#feat=css-sticky @@ -769,6 +770,17 @@ $(function () { sessionParentInputs.on("click", updateSessionParentToggling); updateSessionParentToggling(); + // Toggle _all_ session parents + function toggleAllSessionParents() { + if (sessionParentInputs.filter(":checked").length < sessionParentInputs.length) { + sessionParentInputs.prop("checked", true); + } else { + sessionParentInputs.prop("checked", false); + } + updateSessionParentToggling(); + } + sessionParentToggleAll.on("click", toggleAllSessionParents); + // Toggling timeslot types function updateTimeSlotTypeToggling() { const checkedTypes = jQuery.map(timeSlotTypeInputs.filter(":checked"), elt => elt.value); @@ -1020,4 +1032,4 @@ $(function () { .on("mouseleave", ".other-session", function () { sessions.filter("#session" + this.dataset.othersessionid).removeClass("highlight"); }); -}); \ No newline at end of file +}); diff --git a/ietf/static/js/highcharts.js b/ietf/static/js/highcharts.js index 268f96e50f..6c3b68051f 100644 --- a/ietf/static/js/highcharts.js +++ b/ietf/static/js/highcharts.js @@ -6,6 +6,8 @@ import Highcharts_Export_Data from "highcharts/modules/export-data"; import Highcharts_Accessibility from "highcharts/modules/accessibility"; import Highcharts_Sunburst from "highcharts/modules/sunburst"; +document.documentElement.style.setProperty("--highcharts-background-color", "transparent"); + Highcharts_Exporting(Highcharts); Highcharts_Offline_Exporting(Highcharts); Highcharts_Export_Data(Highcharts); @@ -27,7 +29,7 @@ window.Highcharts = Highcharts; window.group_stats = function (url, chart_selector) { $.getJSON(url, function (data) { $(chart_selector) - .each(function (i, e) { + .each(function (_, e) { const dataset = e.dataset.dataset; if (!dataset) { console.log("dataset data attribute not set"); diff --git a/ietf/static/js/highstock.js b/ietf/static/js/highstock.js index 15ddb5ef9c..05b1250ed0 100644 --- a/ietf/static/js/highstock.js +++ b/ietf/static/js/highstock.js @@ -5,6 +5,8 @@ import Highcharts_Offline_Exporting from "highcharts/modules/offline-exporting"; import Highcharts_Export_Data from "highcharts/modules/export-data"; import Highcharts_Accessibility from"highcharts/modules/accessibility"; +document.documentElement.style.setProperty("--highcharts-background-color", "transparent"); + Highcharts_Exporting(Highcharts); Highcharts_Offline_Exporting(Highcharts); Highcharts_Export_Data(Highcharts); diff --git a/ietf/static/js/ietf.js b/ietf/static/js/ietf.js index 215d80553c..09fa324e42 100644 --- a/ietf/static/js/ietf.js +++ b/ietf/static/js/ietf.js @@ -13,7 +13,7 @@ import "bootstrap/js/dist/scrollspy"; import "bootstrap/js/dist/tab"; // import "bootstrap/js/dist/toast"; import "bootstrap/js/dist/tooltip"; - +import { debounce } from 'lodash-es'; import jquery from "jquery"; window.$ = window.jQuery = jquery; @@ -57,7 +57,7 @@ $(document) var text = $(this) .text(); // insert some at strategic places - var newtext = text.replace(/([@._+])/g, "$1"); + var newtext = text.replace(/(\S)([@._+])(\S)/g, "$1$2$3"); if (newtext === text) { return; } @@ -91,6 +91,55 @@ $(document) // }); }); +function overflowShadows(el) { + function handleScroll(){ + const canScrollUp = el.scrollTop > 0 + const canScrollDown = el.offsetHeight + el.scrollTop < el.scrollHeight + el.classList.toggle("overflow-shadows--both", canScrollUp && canScrollDown) + el.classList.toggle("overflow-shadows--top-only", canScrollUp && !canScrollDown) + el.classList.toggle("overflow-shadows--bottom-only", !canScrollUp && canScrollDown) + } + + el.addEventListener("scroll", handleScroll, {passive: true}) + handleScroll() + + const observer = new IntersectionObserver(handleScroll) + observer.observe(el) // el won't have scrollTop etc when hidden, so we need to recalculate when it's revealed + + return () => { + el.removeEventListener("scroll", handleScroll) + observer.unobserve(el) + } +} + +function ensureDropdownOnscreen(elm) { + const handlePlacement = () => { + if(!(elm instanceof HTMLElement)) { + return + } + const rect = elm.getBoundingClientRect() + const BUFFER_PX = 5 // additional distance from bottom of viewport + const existingStyleTop = parseInt(elm.style.top, 10) + const offscreenBy = Math.round(window.innerHeight - (rect.top + rect.height) - BUFFER_PX) + if(existingStyleTop === offscreenBy) { + console.log(`Already set top to ${offscreenBy}. Ignoring`) + // already set, nothing to do + return + } + if(offscreenBy < 0) { + elm.style.top = `${offscreenBy}px` + } + } + + const debouncedHandler = debounce(handlePlacement, 100) + + const observer = new MutationObserver(debouncedHandler) + + observer.observe(elm, { + attributes: true + }) +} + $(document) .ready(function () { // load data for the menu @@ -108,7 +157,7 @@ $(document) } attachTo.find(".dropdown-menu") .remove(); - var menu = ['
  • @@ -232,38 +273,38 @@ {% endif %}
  • - Agenda
  • - Materials
  • - Floor plan
  • - Registration
  • - Important dates
  • - + Request a session
  • @@ -276,7 +317,7 @@ {% endif %}
  • - Session requests @@ -288,7 +329,7 @@
  • - {{ g.acronym }} {{ g.type.slug }} meetings + {{ g.acronym }} {{ g.type_id }} meetings
  • {% endfor %} @@ -303,7 +344,7 @@ {% endif %}
  • - Upcoming meetings @@ -318,13 +359,13 @@
  • {% endif %}
  • - Past meetings
  • - Meeting proceedings @@ -350,57 +391,64 @@
  • {% endif %}
  • - IPR disclosures
  • - Liaison statements
  • + {% if user|has_role:"Secretariat,IAB,Liaison Manager,Liaison Coordinator" %}
  • - + List of other SDO groups + +
  • + {% endif %} +
  • + IESG agenda
  • - NomComs
  • - Downref registry
  • -
  • - +
  • + Statistics - - {% endif %}" - {% endif %} - {% with label.2 as up_is_good %} - {% if prev < count %} - class="bi bi-arrow-up-right-circle{% if count %}-fill{% endif %} {{ up_is_good|yesno:'text-success,text-danger,text-body-secondary' }}" - {% elif prev > count %} - class="bi bi-arrow-down-right-circle{% if count %}-fill{% endif %} {{ up_is_good|yesno:'text-danger,text-success,text-body-secondary' }}" - {% else %} - class="bi bi-arrow-right-circle text-body-secondary" - {% endif %} - > - {% endwith %} - {% endif %} -{% endif %} \ No newline at end of file diff --git a/ietf/templates/doc/ad_list.html b/ietf/templates/doc/ad_list.html index 189754e8ac..cac709021e 100644 --- a/ietf/templates/doc/ad_list.html +++ b/ietf/templates/doc/ad_list.html @@ -3,42 +3,80 @@ {% load origin static %} {% load ietf_filters %} {% block pagehead %} - + + {% endblock %} -{% block title %}Area directors{% endblock %} +{% block morecss %} + table .border-bottom { border-bottom-color: var(--highcharts-neutral-color-80) !important; } + .highcharts-container .highcharts-axis-labels { + font-size: .7rem; + fill: var(--bs-body-color) + } + .highcharts-container .highcharts-graph { stroke-width: 2.5; } + .highcharts-container .highcharts-color-0 { + fill: var(--bs-body-color); + stroke: var(--bs-primary); + } + .highcharts-container .highcharts-data-label text { + font-size: 1rem; + font-weight: inherit; + fill: var(--bs-body-color) + } +{% endblock %} +{% block title %}IESG Dashboard{% endblock %} {% block content %} {% origin %} -

    Area Director Workload

    +

    IESG Dashboard

    {% if user|has_role:"Area Director,Secretariat" %}
    - {{ delta.days }}-day trend indicators + {{ delta }}-day trend graphs are only shown to logged-in Area Directors.
    {% endif %} - {% for group in workload %} -

    {{ group.group_type }} State Counts

    - +

    + Documents in IESG Processing + IESG view of Working Groups +

    + {% for dt in metadata %} +

    {{ dt.type.1 }} State Counts

    +
    - {% for g, desc, up_is_good in group.group_names %} - + {% endif %} + {% for state, state_name in dt.states %} + {% endfor %} - {% for ad, ad_data in group.counts %} + {% for ad in dt.ads %} - {% for label, count, prev, docs_delta in ad_data %} - + {% endif %} + {% for state, state_name in dt.states %} + {% endfor %} @@ -47,9 +85,19 @@

    {{ group.group_type }} Stat

    - {% for label, count, prev in group.sums %} - + {% endif %} + {% for state, state_name in dt.states %} + {% endfor %} @@ -69,4 +117,177 @@

    {{ group.group_type }} Stat }); }); + + {{ data|json_script:"data" }} + + + + + {% endblock %} \ No newline at end of file diff --git a/ietf/templates/doc/badge/doc-badge-draft.html b/ietf/templates/doc/badge/doc-badge-draft.html new file mode 100644 index 0000000000..f7f66b6c5e --- /dev/null +++ b/ietf/templates/doc/badge/doc-badge-draft.html @@ -0,0 +1,16 @@ +{% load origin %} +{% load static %} +{% load ietf_filters %} +{% load person_filters %} +{% origin %} +{# Non-RFC #} + +{% if doc.became_rfc %} + This is an older version of an Internet-Draft that was ultimately published as {{doc.became_rfc.name|prettystdname}}. +{% elif snapshot and doc.rev != latest_rev %} + This is an older version of an Internet-Draft whose latest revision state is "{{ doc.doc.get_state }}". +{% else %} + {% if snapshot and doc.rev == latest_rev %}{{ doc.doc.get_state }}{% else %}{{ doc.get_state }}{% endif %} Internet-Draft + {% if submission %}({{ submission|safe }}){% endif %} + {% if resurrected_by %}- resurrect requested by {{ resurrected_by }}{% endif %} +{% endif %} \ No newline at end of file diff --git a/ietf/templates/doc/badge/doc-badge-rfc.html b/ietf/templates/doc/badge/doc-badge-rfc.html new file mode 100644 index 0000000000..780f14a54f --- /dev/null +++ b/ietf/templates/doc/badge/doc-badge-rfc.html @@ -0,0 +1,13 @@ +{% load origin %} +{% load static %} +{% load ietf_filters %} +{% load person_filters %} +{% origin %} + +RFC + {% if not document_html %} + - {{ doc.std_level }} + {% else %} + {{ doc.std_level }} + {% endif %} + diff --git a/ietf/templates/doc/ballot/approvaltext.html b/ietf/templates/doc/ballot/approvaltext.html index cc47c17156..3cb632b8f8 100644 --- a/ietf/templates/doc/ballot/approvaltext.html +++ b/ietf/templates/doc/ballot/approvaltext.html @@ -29,7 +29,7 @@

    href="{% url 'ietf.doc.views_ballot.approve_ballot' name=doc.name %}">Approve ballot {% endif %} + href="{% url "ietf.doc.views_doc.document_main" name=doc.name %}"> Back diff --git a/ietf/templates/doc/ballot/approve_ballot.html b/ietf/templates/doc/ballot/approve_ballot.html index ff8ab08946..30dd05fa43 100644 --- a/ietf/templates/doc/ballot/approve_ballot.html +++ b/ietf/templates/doc/ballot/approve_ballot.html @@ -21,7 +21,7 @@

    {% endif %} + href="{% url "ietf.doc.views_doc.document_main" name=doc.name %}"> Back diff --git a/ietf/templates/doc/ballot/approve_downrefs.html b/ietf/templates/doc/ballot/approve_downrefs.html index 6dea10a335..ad528c67bf 100644 --- a/ietf/templates/doc/ballot/approve_downrefs.html +++ b/ietf/templates/doc/ballot/approve_downrefs.html @@ -12,16 +12,16 @@

    The ballot for - {{ doc }} + {{ doc }} was just approved.

    {% if not downrefs_to_rfc %}

    No downward references for - {{ doc }} + {{ doc }}

    Back + href="{% url "ietf.doc.views_doc.document_main" name=doc.name %}">Back {% else %}

    Add downward references to RFCs to the DOWNREF registry, if they were identified in the IETF Last Call and approved by the Sponsoring Area Director. @@ -41,7 +41,7 @@

    {% csrf_token %} {% bootstrap_form approve_downrefs_form %} + href="{% url "ietf.doc.views_doc.document_main" name=doc.name %}"> Add no DOWNREF entries diff --git a/ietf/templates/doc/ballot/ballot_issued.html b/ietf/templates/doc/ballot/ballot_issued.html index baf428ee6b..dfa03896e9 100644 --- a/ietf/templates/doc/ballot/ballot_issued.html +++ b/ietf/templates/doc/ballot/ballot_issued.html @@ -11,7 +11,7 @@

    Ballot for - {{ doc }} + {{ doc }} has been sent out.

    {% if doc.telechat_date %} @@ -24,5 +24,5 @@

    {% endif %} Back + href="{% url "ietf.doc.views_doc.document_main" name=doc.name %}">Back {% endblock %} diff --git a/ietf/templates/doc/ballot/clear_ballot.html b/ietf/templates/doc/ballot/clear_ballot.html index 1d5bc46005..09e7dfef1b 100644 --- a/ietf/templates/doc/ballot/clear_ballot.html +++ b/ietf/templates/doc/ballot/clear_ballot.html @@ -14,14 +14,14 @@

    {% csrf_token %}

    Clear the ballot for - {{ doc }}? + {{ doc }}?
    This will clear all ballot positions and discuss entries.

    + href="{% url "ietf.doc.views_doc.document_main" name=doc.name %}"> Back diff --git a/ietf/templates/doc/ballot/defer_ballot.html b/ietf/templates/doc/ballot/defer_ballot.html index 43fdc82a5f..ae7099e9e1 100644 --- a/ietf/templates/doc/ballot/defer_ballot.html +++ b/ietf/templates/doc/ballot/defer_ballot.html @@ -14,14 +14,14 @@

    {% csrf_token %}

    Defer the ballot for - {{ doc }}? + {{ doc }}?
    The ballot will then be put on the IESG agenda of {{ telechat_date }}.

    + href="{% url "ietf.doc.views_doc.document_main" name=doc.name %}"> Back diff --git a/ietf/templates/doc/ballot/edit_position.html b/ietf/templates/doc/ballot/edit_position.html index bf119236f5..b57e9a3652 100644 --- a/ietf/templates/doc/ballot/edit_position.html +++ b/ietf/templates/doc/ballot/edit_position.html @@ -20,24 +20,48 @@

    Ballot deferred by {{ ballot_deferred.by }} on {{ ballot_deferred.time|date:"Y-m-d" }}.

    {% endif %} +
    +
    + {% if form.errors or cc_select_form.errors or additional_cc_form.errors %} +
    + There were errors in the submitted form -- see below. Please correct these and resubmit. +
    + {% if form.errors %} +
    Position entry
    + {% bootstrap_form_errors form %} + {% endif %} + {% if cc_select_form.errors %} +
    CC selection
    + {% bootstrap_form_errors cc_select_form %} + {% endif %} + {% if additional_cc_form.errors %} +
    Additional Cc Addresses
    + {% bootstrap_form_errors additional_cc_form %} + {% endif %} + {% endif %}
    {% csrf_token %} {% for field in form %} {% if field.name == "discuss" %}
    {% endif %} {% bootstrap_field field %} {% if field.name == "discuss" and old_pos and old_pos.discuss_time %} -
    Last edited {{ old_pos.discuss_time }}
    +
    Last saved {{ old_pos.discuss_time }}
    {% elif field.name == "comment" and old_pos and old_pos.comment_time %} -
    Last edited {{ old_pos.comment_time }}
    +
    Last saved {{ old_pos.comment_time }}
    {% endif %} {% if field.name == "discuss" %}
    {% endif %} {% endfor %} + {% bootstrap_form cc_select_form %} + {% bootstrap_form additional_cc_form %}
    + - + {% if doc.type_id == "draft" or doc.type_id == "conflrev" %} {% if doc.stream.slug != "irtf" %} {% if ballot_deferred %} @@ -54,11 +78,28 @@

    {% endif %} {% endif %} + href="{% url "ietf.doc.views_doc.document_main" name=doc.name %}"> Back

    - + + + {% endblock %} {% block js %} + + {% endblock %} \ No newline at end of file diff --git a/ietf/templates/doc/ballot/lastcalltext.html b/ietf/templates/doc/ballot/lastcalltext.html index c1d7bd70d8..fe2b884c2b 100644 --- a/ietf/templates/doc/ballot/lastcalltext.html +++ b/ietf/templates/doc/ballot/lastcalltext.html @@ -39,7 +39,7 @@

    href="{% url 'ietf.doc.views_ballot.make_last_call' name=doc.name %}">Issue last call {% endif %} + href="{% url "ietf.doc.views_doc.document_main" name=doc.name %}"> Back diff --git a/ietf/templates/doc/ballot/rfceditornote.html b/ietf/templates/doc/ballot/rfceditornote.html index b4cc9dadca..8a6d57379d 100644 --- a/ietf/templates/doc/ballot/rfceditornote.html +++ b/ietf/templates/doc/ballot/rfceditornote.html @@ -31,7 +31,7 @@

    Clear + href="{% url "ietf.doc.views_doc.document_main" name=doc.name %}"> Back diff --git a/ietf/templates/doc/ballot/send_ballot_comment.html b/ietf/templates/doc/ballot/send_ballot_comment.html deleted file mode 100644 index ffd2d41191..0000000000 --- a/ietf/templates/doc/ballot/send_ballot_comment.html +++ /dev/null @@ -1,44 +0,0 @@ -{% extends "base.html" %} -{# Copyright The IETF Trust 2015, All Rights Reserved #} -{% load origin %} -{% load django_bootstrap5 %} -{% load ietf_filters %} -{% block title %}Send ballot position for {{ balloter }} on {{ doc }}{% endblock %} -{% block content %} - {% origin %} -

    - Send ballot position for {{ balloter }} -
    - {{ doc }} -

    -
    - {% csrf_token %} -
    - - -
    -
    - - -
    - {% bootstrap_form cc_select_form %} -
    - - -
    Separate email addresses with commas.
    -
    -
    - - -
    -
    -

    Body

    -
    {{ body|maybewordwrap }}
    -
    - - - Back - - -{% endblock %} diff --git a/ietf/templates/doc/ballot/undefer_ballot.html b/ietf/templates/doc/ballot/undefer_ballot.html index da74326a2f..4e86698160 100644 --- a/ietf/templates/doc/ballot/undefer_ballot.html +++ b/ietf/templates/doc/ballot/undefer_ballot.html @@ -19,7 +19,7 @@

    + href="{% url "ietf.doc.views_doc.document_main" name=doc.name %}"> Back diff --git a/ietf/templates/doc/ballot/writeupnotes.html b/ietf/templates/doc/ballot/writeupnotes.html index 9c52493d86..8e985c15c7 100644 --- a/ietf/templates/doc/ballot/writeupnotes.html +++ b/ietf/templates/doc/ballot/writeupnotes.html @@ -15,11 +15,16 @@

    {% bootstrap_form ballot_writeup_form %}
    Technical summary, Working Group summary, document quality, personnel, IANA note. This text will be appended to all announcements and messages to the IRTF or RFC Editor. - {% if ballot_issue_danger %} + {% if warn_lc %}

    This document has not completed IETF Last Call. Please do not issue the ballot early without good reason.

    {% endif %} + {% if warn_unexpected_state %} +

    + This document is in an IESG state of "{{warn_unexpected_state}}". It would be unexpected to issue a ballot while in this state. +

    + {% endif %}
    + href="{% url "ietf.doc.views_doc.document_main" name=doc.name %}"> Back diff --git a/ietf/templates/doc/ballot_popup.html b/ietf/templates/doc/ballot_popup.html index 2a04ffab69..d2589cd54c 100644 --- a/ietf/templates/doc/ballot_popup.html +++ b/ietf/templates/doc/ballot_popup.html @@ -27,7 +27,7 @@ {% if editable and user|has_role:"Area Director,Secretariat,IRSG Member,RSAB Member" %} {% if user|can_ballot:doc %} + href="{% url "ietf.doc.views_ballot.edit_position" name=doc.name ballot_id=ballot_id %}?ballot_edit_return_point={{ ballot_edit_return_point|urlencode }}"> Edit position {% endif %} diff --git a/ietf/templates/doc/bofreq/bof_requests.html b/ietf/templates/doc/bofreq/bof_requests.html index 2a806b18a5..19c2e5dd13 100644 --- a/ietf/templates/doc/bofreq/bof_requests.html +++ b/ietf/templates/doc/bofreq/bof_requests.html @@ -1,7 +1,7 @@ {% extends "base.html" %} {# Copyright The IETF Trust 2021 All Rights Reserved #} {% load origin %} -{% load person_filters %} +{% load person_filters ietf_filters %} {% load static %} {% block pagehead %} @@ -26,40 +26,42 @@

    BOF Requests

    {% else %} {% regroup reqs by get_state_slug as grouped_reqs %} {% for req_group in grouped_reqs %} -

    {{ req_group.grouper|capfirst }} BOF Requests

    -

    - - - - - - - - - - - {% for req in req_group.list %} + {% if req_group.grouper != "spam" or request.user|has_role:"Secretariat" %} +

    {{ req_group.grouper|capfirst }} BOF Requests

    +
    NameDateTitleResponsibleEditors
    + - - - - - + + + + + - {% endfor %} - -
    - {{ req.name }}-{{ req.rev }} - {{ req.latest_revision_event.time|date:"Y-m-d" }}{{ req.title }} - {% for person in req.responsible %} - {% person_link person %}{% if not forloop.last %},{% endif %} - {% endfor %} - - {% for person in req.editors %} - {% person_link person %}{% if not forloop.last %},{% endif %} - {% endfor %} - NameDateTitleResponsibleEditors
    + + + {% for req in req_group.list %} + + + {{ req.name }}-{{ req.rev }} + + {{ req.latest_revision_event.time|date:"Y-m-d" }} + {{ req.title }} + + {% for person in req.responsible %} + {% person_link person %}{% if not forloop.last %},{% endif %} + {% endfor %} + + + {% for person in req.editors %} + {% person_link person %}{% if not forloop.last %},{% endif %} + {% endfor %} + + + {% endfor %} + + + {% endif %} {% endfor %} {% endif %} {% endblock %} diff --git a/ietf/templates/doc/bofreq/bofreq_template.md b/ietf/templates/doc/bofreq/bofreq_template.md index 7039e4679b..49c5e511a5 100644 --- a/ietf/templates/doc/bofreq/bofreq_template.md +++ b/ietf/templates/doc/bofreq/bofreq_template.md @@ -1,16 +1,15 @@ -# Name: Exact MPLS Edges (EXAMPLE) (There's an acronym for anything if you really want one ;-) +# Name: EXAct MPLs Edges (EXAMPLE) (There's an acronym for anything if you really want one ;-) ## Description Replace this with a few paragraphs describing the BOF request. Fill in the details below. Keep items in the order they appear here. ## Required Details -- Status: (not) WG Forming -- Responsible AD: name +- Status: "not WG Forming" or "WG forming" +- Responsible AD: name (or at least area(s) if you know) - BOF proponents: name , name (1-3 people - who are requesting and coordinating discussion for proposal) -- BOF chairs: TBD - Number of people expected to attend: 100 -- Length of session (1 or 2 hours): 2 hours +- Length of session (1 or usually 2 hours): 2 hours - Conflicts (whole Areas and/or WGs) - Chair Conflicts: TBD - Technology Overlap: TBD @@ -28,7 +27,7 @@ To allow evaluation of your proposal, please include the following items: - Items, Internet-Drafts, speakers, timing - Or a URL -## Links to the mailing list, draft charter if any, relevant Internet-Drafts, etc. +## Links to the mailing list, draft charter if any (for WG-forming BoF), relevant Internet-Drafts, etc. - Mailing List: https://www.ietf.org/mailman/listinfo/example - Draft charter: {{ settings.IDTRACKER_BASE_URL }}{% url 'ietf.doc.views_doc.document_main' name='charter-ietf-EXAMPLE' %} - Relevant Internet-Drafts: diff --git a/ietf/templates/doc/bofreq/change_editors.html b/ietf/templates/doc/bofreq/change_editors.html index 98c4ddb3a0..0c30cdecb4 100644 --- a/ietf/templates/doc/bofreq/change_editors.html +++ b/ietf/templates/doc/bofreq/change_editors.html @@ -16,7 +16,7 @@

    {% bootstrap_form form %} + href="{% url "ietf.doc.views_doc.document_main" name=doc.name %}"> Back diff --git a/ietf/templates/doc/bofreq/change_responsible.html b/ietf/templates/doc/bofreq/change_responsible.html index aabb0048e4..8c51c6e1f4 100644 --- a/ietf/templates/doc/bofreq/change_responsible.html +++ b/ietf/templates/doc/bofreq/change_responsible.html @@ -16,7 +16,7 @@

    {% bootstrap_form form %} + href="{% url "ietf.doc.views_doc.document_main" name=doc.name %}"> Back diff --git a/ietf/templates/doc/bofreq/new_bofreq.html b/ietf/templates/doc/bofreq/new_bofreq.html index 9506d9c8e6..cda6f73b90 100644 --- a/ietf/templates/doc/bofreq/new_bofreq.html +++ b/ietf/templates/doc/bofreq/new_bofreq.html @@ -1,20 +1,29 @@ {% extends "base.html" %} -{# Copyright The IETF Trust 2021, All Rights Reserved #} +{# Copyright The IETF Trust 2021-2026, All Rights Reserved #} {% load origin django_bootstrap5 static textfilters %} {% block title %}Start a new BOF Request{% endblock %} {% block content %} {% origin %}

    Start a new BOF Request

    - The IAB will also attempt to provide BoF Shepherds as described in their document on the subject only on request from the IESG. If you feel that your BoF would benefit from an IAB BoF Shepherd, please discuss this with your Area Director. + BoF proponents are strongly encouraged to review the following sources before submitting requests:

    +

    - Choose a short descriptive title for your request. Take time to choose a good initial title - it will be used to make the filename for your request's content. The title can be changed later, but the filename will not change. + The IAB will also attempt to provide BoF Shepherds as described in their document on the subject only on request from the IESG. + If you feel that your BoF would benefit from an IAB BoF Shepherd, please discuss this with your Area Director. +

    +

    + Choose a short descriptive title for your request. Take time to choose a good initial title - it will be used to make the filename for your request's content. + The title can be changed later, but the filename will not change.

    For example, a request with a title of "A new important bit" will be saved as bofreq-{{ user.person.last_name|xslugify|slice:"64" }}-a-new-important-bit-00.md.

    -

    All the items in the template MUST be filed in.

    +

    All the items in the template MUST be filed in.

    diff --git a/ietf/templates/doc/change_ad.html b/ietf/templates/doc/change_ad.html index 24e8089510..9cc7437574 100644 --- a/ietf/templates/doc/change_ad.html +++ b/ietf/templates/doc/change_ad.html @@ -15,7 +15,7 @@

    {% bootstrap_form form %} + href="{% url "ietf.doc.views_doc.document_main" name=doc.name %}"> Back diff --git a/ietf/templates/doc/change_title.html b/ietf/templates/doc/change_title.html index 25026be351..14d7956cfe 100644 --- a/ietf/templates/doc/change_title.html +++ b/ietf/templates/doc/change_title.html @@ -15,7 +15,7 @@

    {% bootstrap_form form %} + href="{% url "ietf.doc.views_doc.document_main" name=doc.name %}"> Back diff --git a/ietf/templates/doc/charter/action_announcement_text.html b/ietf/templates/doc/charter/action_announcement_text.html index 88a1b61412..e087b175b4 100644 --- a/ietf/templates/doc/charter/action_announcement_text.html +++ b/ietf/templates/doc/charter/action_announcement_text.html @@ -21,7 +21,7 @@

    {% if user|has_role:"Secretariat" %} + href="{% url 'ietf.doc.views_charter.approve' name=charter.name %}"> Charter approval page {% endif %} diff --git a/ietf/templates/doc/charter/approve.html b/ietf/templates/doc/charter/approve.html index f109da6872..2a8654482e 100644 --- a/ietf/templates/doc/charter/approve.html +++ b/ietf/templates/doc/charter/approve.html @@ -2,16 +2,16 @@ {# Copyright The IETF Trust 2015, All Rights Reserved #} {% load origin %} {% load django_bootstrap5 %} -{% block title %}Approve {{ charter.canonical_name }}{% endblock %} +{% block title %}Approve {{ charter.name }}{% endblock %} {% block content %} {% origin %} -

    Approve {{ charter.canonical_name }}-{{ charter.rev }}

    +

    Approve {{ charter.name }}-{{ charter.rev }}

    {% csrf_token %}
    {{ announcement }}
    + href="{% url "ietf.doc.views_charter.action_announcement_text" name=charter.name %}?next=approve"> Edit/regenerate announcement Change responsible AD
    - {{ charter.canonical_name }}-{{ charter.rev }} + {{ charter.name }}-{{ charter.rev }} {% csrf_token %} {% bootstrap_form form %}
    + href="{% url "ietf.doc.views_doc.document_main" name=charter.name %}"> Back
    diff --git a/ietf/templates/doc/charter/issue_ballot_mail.txt b/ietf/templates/doc/charter/issue_ballot_mail.txt index 12fc44bbbc..914935bb12 100644 --- a/ietf/templates/doc/charter/issue_ballot_mail.txt +++ b/ietf/templates/doc/charter/issue_ballot_mail.txt @@ -1,6 +1,6 @@ -{% load ietf_filters %}{% autoescape off %}To: {{ to }} {% if cc %} -Cc: {{ cc }} -{% endif %}From: IESG Secretary +{% load ietf_filters %}{% autoescape off %}To: {{ to }}{% if cc %} +Cc: {{ cc }}{% endif %} +From: IESG Secretary Reply-To: IESG Secretary Subject: Evaluation: {{ doc.name }} diff --git a/ietf/templates/doc/conflict_review/approval_text.txt b/ietf/templates/doc/conflict_review/approval_text.txt index 8e211e21ce..a52ac11a71 100644 --- a/ietf/templates/doc/conflict_review/approval_text.txt +++ b/ietf/templates/doc/conflict_review/approval_text.txt @@ -1,9 +1,9 @@ {% load ietf_filters %}{% load mail_filters %}{% autoescape off %}From: The IESG To: {{ to }} Cc: {{ cc }} -Subject: Results of IETF-conflict review for {{conflictdoc.canonical_name}}-{{conflictdoc.rev}} +Subject: Results of IETF-conflict review for {{conflictdoc.name}}-{{conflictdoc.rev}} -{% filter wordwrap:78 %}The IESG has completed a review of {{conflictdoc.canonical_name}}-{{conflictdoc.rev}} consistent with RFC5742. +{% filter wordwrap:78 %}The IESG has completed a review of {{conflictdoc.name}}-{{conflictdoc.rev}} consistent with RFC5742. {% if review.get_state_slug == 'appr-reqnopub-pend' %} The IESG recommends that '{{ conflictdoc.title }}' {{ conflictdoc.file_tag|safe }} NOT be published as {{ conflictdoc|std_level_prompt_with_article }}. diff --git a/ietf/templates/doc/conflict_review/approve.html b/ietf/templates/doc/conflict_review/approve.html index 5283587f07..ccbac9c4cb 100644 --- a/ietf/templates/doc/conflict_review/approve.html +++ b/ietf/templates/doc/conflict_review/approve.html @@ -2,10 +2,10 @@ {# Copyright The IETF Trust 2015, All Rights Reserved #} {% load origin %} {% load django_bootstrap5 %} -{% block title %}Approve {{ review.canonical_name }}{% endblock %} +{% block title %}Approve {{ review.name }}{% endblock %} {% block content %} {% origin %} -

    Approve {{ review.canonical_name }}

    +

    Approve {{ review.name }}

    {% csrf_token %} {% bootstrap_form form %} diff --git a/ietf/templates/doc/conflict_review/start.html b/ietf/templates/doc/conflict_review/start.html index 9b96ec6179..d8abc2b811 100644 --- a/ietf/templates/doc/conflict_review/start.html +++ b/ietf/templates/doc/conflict_review/start.html @@ -3,13 +3,13 @@ {% load origin %} {% load django_bootstrap5 %} {% load ietf_filters %} -{% block title %}Begin IETF conflict review for {{ doc_to_review.canonical_name }}-{{ doc_to_review.rev }}{% endblock %} +{% block title %}Begin IETF conflict review for {{ doc_to_review.name }}-{{ doc_to_review.rev }}{% endblock %} {% block content %} {% origin %}

    Begin IETF conflict review
    - {{ doc_to_review.canonical_name }}-{{ doc_to_review.rev }} + {{ doc_to_review.name }}-{{ doc_to_review.rev }}

    {% if user|has_role:"Secretariat" %}

    diff --git a/ietf/templates/doc/conflict_review/submit.html b/ietf/templates/doc/conflict_review/submit.html index 586de93560..8259c6b12f 100644 --- a/ietf/templates/doc/conflict_review/submit.html +++ b/ietf/templates/doc/conflict_review/submit.html @@ -2,16 +2,16 @@ {# Copyright The IETF Trust 2015, All Rights Reserved #} {% load origin %} {% load django_bootstrap5 %} -{% block title %}Edit conflict review for {{ conflictdoc.canonical_name }}-{{ conflictdoc.rev }}{% endblock %} +{% block title %}Edit conflict review for {{ conflictdoc.name }}-{{ conflictdoc.rev }}{% endblock %} {% block content %} {% origin %}

    Edit conflict review
    - {{ conflictdoc.canonical_name }}-{{ conflictdoc.rev }} + {{ conflictdoc.name }}-{{ conflictdoc.rev }}

    - The text will be submitted as {{ review.canonical_name }}-{{ next_rev }} + The text will be submitted as {{ review.name }}-{{ next_rev }}

    {% csrf_token %} @@ -27,7 +27,7 @@

    Reset to template text + href="{% url "ietf.doc.views_doc.document_main" name=review.name %}"> Back diff --git a/ietf/templates/doc/disclaimer.html b/ietf/templates/doc/disclaimer.html new file mode 100644 index 0000000000..db4c42ed68 --- /dev/null +++ b/ietf/templates/doc/disclaimer.html @@ -0,0 +1,34 @@ +{# Copyright The IETF Trust 2016-2023, All Rights Reserved #} +{% load origin %} +{% load ietf_filters %} +{% origin %} +{% if doc.type_id == "rfc" %} + {% if doc.stream.slug != "ietf" and doc.stream.desc != "Legacy" and doc.std_level.slug|default:"unk" not in "bcp,ds,ps,std"|split:"," %} + + {% elif doc.stream.slug != "ietf" and doc.stream.desc == "Legacy" and doc.std_level.slug|default:"unk" not in "bcp,ds,ps,std"|split:"," %} + + {% endif %} +{% elif doc|is_in_stream %} + {% if doc.stream.slug != "ietf" and doc.std_level.slug|default:"unk" not in "bcp,ds,ps,std"|split:"," %} + + {% endif %} +{% else %} + +{% endif %} diff --git a/ietf/templates/doc/document_ballot_content.html b/ietf/templates/doc/document_ballot_content.html index 803ed84a36..e0feb78bc7 100644 --- a/ietf/templates/doc/document_ballot_content.html +++ b/ietf/templates/doc/document_ballot_content.html @@ -60,7 +60,7 @@ {% if user|can_ballot:doc %} + href="{% url "ietf.doc.views_ballot.edit_position" name=doc.name ballot_id=ballot.pk %}?ballot_edit_return_point={{ request.path|urlencode }}"> Edit position {% endif %} diff --git a/ietf/templates/doc/document_bibtex.bib b/ietf/templates/doc/document_bibtex.bib index 5dda4649eb..5e52ec3c58 100644 --- a/ietf/templates/doc/document_bibtex.bib +++ b/ietf/templates/doc/document_bibtex.bib @@ -3,7 +3,7 @@ {% load ietf_filters %} {% load textfilters %} -{% if doc.get_state_slug == "rfc" %} +{% if doc.type_id == "rfc" %} {% if doc.stream|slugify == "legacy" %} % Datatracker information for RFCs on the Legacy Stream is unfortunately often % incorrect. Please correct the bibtex below based on the information in the @@ -16,7 +16,7 @@ @misc{ publisher = {RFC Editor}, doi = {% templatetag openbrace %}{{ doi }}{% templatetag closebrace %}, url = {% templatetag openbrace %}{{ doc.rfc_number|rfceditor_info_url }}{% templatetag closebrace %},{% else %} -{% if published %}%% You should probably cite rfc{{ latest_revision.doc.rfc_number }} instead of this I-D.{% else %}{% if replaced_by %}%% You should probably cite {{replaced_by|join:" or "}} instead of this I-D.{% else %} +{% if published_as %}%% You should probably cite rfc{{ published_as.rfc_number }} instead of this I-D.{% else %}{% if replaced_by %}%% You should probably cite {{replaced_by|join:" or "}} instead of this I-D.{% else %} {% if doc.rev != latest_revision.rev %}%% You should probably cite {{latest_revision.doc.name}}-{{latest_revision.rev}} instead of this revision.{%endif%}{% endif %}{% endif %} @techreport{% templatetag openbrace %}{{doc.name|slice:"6:"}}-{{doc.rev}}, number = {% templatetag openbrace %}{{doc.name}}-{{doc.rev}}{% templatetag closebrace %}, @@ -25,11 +25,11 @@ @techreport{ publisher = {% templatetag openbrace %}Internet Engineering Task Force{% templatetag closebrace %}, note = {% templatetag openbrace %}Work in Progress{% templatetag closebrace %}, url = {% templatetag openbrace %}{{ settings.IDTRACKER_BASE_URL }}{% url 'ietf.doc.views_doc.document_main' name=doc.name rev=doc.rev %}{% templatetag closebrace %},{% endif %} - author = {% templatetag openbrace %}{% for author in doc.documentauthor_set.all %}{{ author.person.name|texescape}}{% if not forloop.last %} and {% endif %}{% endfor %}{% templatetag closebrace %}, + author = {% templatetag openbrace %}{% for author in doc.documentauthor_set.all %}{{ author.person.name|texescape}}{% if not forloop.last %} and {% endif %}{% endfor %}{% templatetag closebrace %}, title = {% templatetag openbrace %}{% templatetag openbrace %}{{doc.title|texescape}}{% templatetag closebrace %}{% templatetag closebrace %}, pagetotal = {{ doc.pages }}, year = {{ doc.pub_date.year }}, - month = {{ doc.pub_date|date:"b" }},{% if not doc.rfc_number or doc.pub_date.day == 1 and doc.pub_date.month == 4 %} + month = {{ doc.pub_date|date:"b" }},{% if not doc.type_id == "rfc" or doc.pub_date.day == 1 and doc.pub_date.month == 4 %} day = {{ doc.pub_date.day }},{% endif %} abstract = {% templatetag openbrace %}{{ doc.abstract|clean_whitespace|texescape }}{% templatetag closebrace %}, {% templatetag closebrace %} diff --git a/ietf/templates/doc/document_charter.html b/ietf/templates/doc/document_charter.html index 08584849d8..7564e1d213 100644 --- a/ietf/templates/doc/document_charter.html +++ b/ietf/templates/doc/document_charter.html @@ -227,10 +227,10 @@ {% if doc.rev != "" %}
    - {{ doc.canonical_name }}-{{ doc.rev }} + {{ doc.name }}-{{ doc.rev }}
    -
    {{ content|maybewordwrap|urlize_ietf_docs|linkify }}
    + {{ content }}
    {% endif %} diff --git a/ietf/templates/doc/document_conflict_review.html b/ietf/templates/doc/document_conflict_review.html index f70f5925f6..8a2361832b 100644 --- a/ietf/templates/doc/document_conflict_review.html +++ b/ietf/templates/doc/document_conflict_review.html @@ -27,8 +27,8 @@ - - {% if conflictdoc.get_state_slug == 'rfc' %}{{ conflictdoc.canonical_name|prettystdname }}{% else %}{{ conflictdoc.canonical_name }}-{{ conflictdoc.rev }}{% endif %} + + {% if conflictdoc.type_id == 'rfc' %}{{ conflictdoc.name|prettystdname }}{% else %}{{ conflictdoc.name }}-{{ conflictdoc.rev }}{% endif %} {{ conflictdoc.stream }} stream {% if snapshot %}Snapshot{% endif %} diff --git a/ietf/templates/doc/document_draft.html b/ietf/templates/doc/document_draft.html index 09cab69208..eab1d779fb 100644 --- a/ietf/templates/doc/document_draft.html +++ b/ietf/templates/doc/document_draft.html @@ -1,5 +1,5 @@ {% extends "base.html" %} -{# Copyright The IETF Trust 2016-2023, All Rights Reserved #} +{# Copyright The IETF Trust 2016-2024, All Rights Reserved #} {% load origin %} {% load static %} {% load ietf_filters %} @@ -13,11 +13,11 @@ title="Document changes" href="/feed/document-changes/{{ name }}/"> + content="{{ doc.title }} {% if doc.type_id == 'rfc' and not snapshot %}(RFC {{ rfc_number }}{% if published %}, {{ doc.pub_date|date:'F Y' }}{% endif %}{% if obsoleted_by %}; obsoleted by {% for rel in obsoleted_by %}{{ rel.source.name|prettystdname}}{% if not forloop.last%}, {% endif %}{% endfor %}{% endif %}){% endif %}"> {% endblock %} {% block morecss %}.inline { display: inline; }{% endblock %} {% block title %} - {% if doc.get_state_slug == "rfc" and not snapshot %} + {% if doc.type_id == "rfc" and not snapshot %} RFC {{ rfc_number }} - {{ doc.title }} {% else %} {{ name }}-{{ doc.rev }} - {{ doc.title }} @@ -27,11 +27,12 @@ {% origin %} {{ top|safe }} {% include "doc/revisions_list.html" with document_html=document_html %} + {% include "doc/disclaimer.html" with document_html=document_html %}
    {% if doc.rev != latest_rev %}
    The information below is for an old version of the document.
    {% else %} - {% if doc.get_state_slug == "rfc" and snapshot %} + {% if doc.became_rfc %}
    The information below is for an old version of the document that is already published as an RFC.
    @@ -62,7 +63,12 @@ {% if doc.stream and can_edit_stream_info and doc.stream.slug != "legacy" and not snapshot %} + {% if doc|is_doc_ietf_adoptable or doc|can_issue_ietf_wg_lc or doc|can_submit_to_iesg %} + href="{% url 'ietf.doc.views_draft.offer_wg_action_helpers' name=doc.name %}" + {% else %} + href="{% url 'ietf.doc.views_draft.change_stream_state' name=doc.name state_type=stream_state_type_slug %}" + {% endif %} + > Edit {% endif %} @@ -267,7 +273,7 @@ {% endif %} - {% if not doc.stream_id == 'iab' %} + {% if doc.stream_id != 'iab' and doc.stream_id != 'editorial' %} @@ -303,7 +309,7 @@ Action Holder{{ doc.documentactionholder_set.all|pluralize }} - {% if can_edit %} + {% if can_edit_action_holders %} Edit @@ -318,7 +324,7 @@ {% person_link action_holder.person title=action_holder.role_for_doc %} {{ action_holder|action_holder_badge }} {% endfor %} - {% if can_edit %} + {% if can_edit_action_holders %} @@ -447,125 +453,127 @@ {% endif %} - {% if can_edit_iana_state or iana_review_state or iana_experts_state or iana_experts_comment %} - - {% if iana_review_state or can_edit_iana_state %} - - - IANA - - - - IANA review state - - - - {% if can_edit_iana_state and not snapshot %} - - Edit - - {% endif %} - - - {% if not iana_review_state %} - - (None) - - {% else %} - {{ iana_review_state }} - {% endif %} - - - {% endif %} - {% if iana_action_state or can_edit_iana_state %} - - - {% if not can_edit_iana_state and not iana_review_state %}IANA{% endif %} - - - - IANA action state - - - - {% if can_edit_iana_state and not snapshot %} - - Edit + {% if doc.stream_id != 'editorial' %} + {% if can_edit_iana_state or iana_review_state or iana_experts_state or iana_experts_comment %} + + {% if iana_review_state or can_edit_iana_state %} + + + IANA + + + + IANA review state - {% endif %} - - - {% if not iana_action_state %} - - (None) - - {% else %} - {{ iana_action_state }} - {% endif %} - - - {% endif %} - {% if iana_experts_state or can_edit_iana_state %} - - - {% if not can_edit_iana_state and not iana_review_state and not iana_action_state %}IANA{% endif %} - - - - IANA expert review state - - - - {% if can_edit_iana_state and not snapshot %} - - Edit + + + {% if can_edit_iana_state and not snapshot %} + + Edit + + {% endif %} + + + {% if not iana_review_state %} + + (None) + + {% else %} + {{ iana_review_state }} + {% endif %} + + + {% endif %} + {% if iana_action_state or can_edit_iana_state %} + + + {% if not can_edit_iana_state and not iana_review_state %}IANA{% endif %} + + + + IANA action state - {% endif %} - - - {% if not iana_experts_state %} - - (None) - - {% else %} - {{ iana_experts_state }} - {% endif %} - - - {% endif %} - {% if iana_experts_comment or can_edit_iana_state %} - - - {% if not can_edit_iana_state and not iana_review_state and not iana_action_state and not iana_experts_state %} - IANA - {% endif %} - - - IANA expert review comments - - - {% if can_edit_iana_state and not snapshot %} - - Edit + + + {% if can_edit_iana_state and not snapshot %} + + Edit + + {% endif %} + + + {% if not iana_action_state %} + + (None) + + {% else %} + {{ iana_action_state }} + {% endif %} + + + {% endif %} + {% if iana_experts_state or can_edit_iana_state %} + + + {% if not can_edit_iana_state and not iana_review_state and not iana_action_state %}IANA{% endif %} + + + + IANA expert review state - {% endif %} - - - {% if not iana_experts_comment %} - - (None) - - {% else %} - {{ iana_experts_comment }} - {% endif %} - - - {% endif %} - + + + {% if can_edit_iana_state and not snapshot %} + + Edit + + {% endif %} + + + {% if not iana_experts_state %} + + (None) + + {% else %} + {{ iana_experts_state }} + {% endif %} + + + {% endif %} + {% if iana_experts_comment or can_edit_iana_state %} + + + {% if not can_edit_iana_state and not iana_review_state and not iana_action_state and not iana_experts_state %} + IANA + {% endif %} + + + IANA expert review comments + + + {% if can_edit_iana_state and not snapshot %} + + Edit + + {% endif %} + + + {% if not iana_experts_comment %} + + (None) + + {% else %} + {{ iana_experts_comment }} + {% endif %} + + + {% endif %} + + {% endif %} {% endif %} {% if rfc_editor_state %} @@ -637,14 +645,14 @@ {% endif %} References @@ -658,62 +666,42 @@ Nits - + {% if user|has_role:"Area Director" %} + {# IDNITS3 is an experimental service, so only show it to Area Directors #} + + + + Nits-v3 (Experimental) + + {% endif %} + + + + Search email archive + {% if user.is_authenticated %} Untrack Track {% endif %} - {% if user.review_teams %} + {% if user.person.review_teams %} @@ -721,7 +709,7 @@ Remove review wishes - @@ -751,10 +739,10 @@ {% endfor %} {% endif %} - {% if doc.get_state_slug == "active" or doc.get_state_slug == "rfc" %} + {% if doc.get_state_slug == "active" or doc.type_id == "rfc" or doc.became_rfc %}

    Date - Rev. + {% if doc.type_id not in "rfc,bcp,std,fyi" %}Rev.{% endif %} By Action @@ -45,7 +71,7 @@

    Document history

    {{ e.time|date:"Y-m-d" }}
    - {{ e.rev }} + {% if doc.type_id not in "rfc,bcp,std,fyi" %}{{ e.rev }}{% endif %} {{ e.by|escape }} {{ e.desc|format_history_text }} diff --git a/ietf/templates/doc/document_history_form.html b/ietf/templates/doc/document_history_form.html index 30e0d28000..646da0038b 100644 --- a/ietf/templates/doc/document_history_form.html +++ b/ietf/templates/doc/document_history_form.html @@ -11,7 +11,6 @@
    {% endif %} -{% with prev_rev=doc.rev|add:"-1"|stringformat:"02d" %} + value="{{ mail_archive_query_data.query_value }}">
    @@ -144,13 +150,5 @@

    {% endblock %} {% block js %} - {% endblock %} \ No newline at end of file diff --git a/ietf/templates/doc/review/edit_request_comment.html b/ietf/templates/doc/review/edit_request_comment.html index 420e4f038d..50afa19acb 100644 --- a/ietf/templates/doc/review/edit_request_comment.html +++ b/ietf/templates/doc/review/edit_request_comment.html @@ -14,7 +14,7 @@

    {% bootstrap_form form %} + href="{% url "ietf.doc.views_review.review_request" name=review_req.doc.name request_id=review_req.pk %}"> Back diff --git a/ietf/templates/doc/review/edit_request_deadline.html b/ietf/templates/doc/review/edit_request_deadline.html index 0f8f32550d..7706f96396 100644 --- a/ietf/templates/doc/review/edit_request_deadline.html +++ b/ietf/templates/doc/review/edit_request_deadline.html @@ -17,7 +17,7 @@

    {% bootstrap_form form %} + href="{% url "ietf.doc.views_review.review_request" name=review_req.doc.name request_id=review_req.pk %}"> Back diff --git a/ietf/templates/doc/review/mark_reviewer_assignment_no_response.html b/ietf/templates/doc/review/mark_reviewer_assignment_no_response.html index be123f5a7c..4bc4254785 100644 --- a/ietf/templates/doc/review/mark_reviewer_assignment_no_response.html +++ b/ietf/templates/doc/review/mark_reviewer_assignment_no_response.html @@ -19,7 +19,7 @@

    name="action" value="noresponse">Mark assignment as no-response + href="{% url "ietf.doc.views_review.review_request" name=assignment.review_request.doc.name request_id=assignment.review_request.pk %}"> Back diff --git a/ietf/templates/doc/review/reject_reviewer_assignment.html b/ietf/templates/doc/review/reject_reviewer_assignment.html index 71bdf8ee43..7fbde360cd 100644 --- a/ietf/templates/doc/review/reject_reviewer_assignment.html +++ b/ietf/templates/doc/review/reject_reviewer_assignment.html @@ -18,7 +18,7 @@

    {% csrf_token %} {% bootstrap_form form %} + href="{% url "ietf.doc.views_review.review_request" name=doc.name request_id=review_req.pk %}"> Back diff --git a/ietf/templates/doc/review/request_info.html b/ietf/templates/doc/review/request_info.html index ee46916b43..51aea10a02 100644 --- a/ietf/templates/doc/review/request_info.html +++ b/ietf/templates/doc/review/request_info.html @@ -74,13 +74,13 @@ {% person_link review_req.requested_by %} {% endif %} - {% if review_req.doc.authors %} + {% if review_req.doc.author_persons_or_names %} Authors - {% for author in review_req.doc.authors %} - {% person_link author %}{% if not forloop.last %},{% endif %} + {% for person, tp_name in review_req.doc.author_persons_or_names %} + {% if person %}{% person_link person %}{% else %}{{ tp_name }}{% endif %}{% if not forloop.last %},{% endif %} {% endfor %} @@ -96,7 +96,7 @@ {% endif %} - {% if doc.time %} + {% if review_req.doc.time %} @@ -104,7 +104,10 @@ I-D last updated - {{ doc.time|date:"Y-m-d" }} + {{ review_req.doc.time|date:"Y-m-d" }} + {% if review_req.doc.pub_date %} + (Latest revision {{ review_req.doc.pub_date|date:"Y-m-d" }}) + {% endif %} {% endif %} diff --git a/ietf/templates/doc/review/request_review.html b/ietf/templates/doc/review/request_review.html index d7980b1a56..b765871d65 100644 --- a/ietf/templates/doc/review/request_review.html +++ b/ietf/templates/doc/review/request_review.html @@ -44,7 +44,7 @@

    {% bootstrap_field form.comment layout="horizontal" %} + href="{% url "ietf.doc.views_doc.document_main" name=doc.name %}"> Back diff --git a/ietf/templates/doc/review/submit_unsolicited_review.html b/ietf/templates/doc/review/submit_unsolicited_review.html index 615694f3e0..5db7e9cd62 100644 --- a/ietf/templates/doc/review/submit_unsolicited_review.html +++ b/ietf/templates/doc/review/submit_unsolicited_review.html @@ -18,7 +18,7 @@

    {% bootstrap_form form layout="horizontal" %} + href="{% url "ietf.doc.views_doc.document_main" name=doc.name %}"> Back diff --git a/ietf/templates/doc/review/withdraw_reviewer_assignment.html b/ietf/templates/doc/review/withdraw_reviewer_assignment.html index db598532f2..aed48ba78b 100644 --- a/ietf/templates/doc/review/withdraw_reviewer_assignment.html +++ b/ietf/templates/doc/review/withdraw_reviewer_assignment.html @@ -16,7 +16,7 @@

    {% csrf_token %} + href="{% url "ietf.doc.views_review.review_request" name=assignment.review_request.doc.name request_id=assignment.review_request.pk %}"> Back diff --git a/ietf/templates/doc/revisions_list.html b/ietf/templates/doc/revisions_list.html index cde628ddae..761d4cd04b 100644 --- a/ietf/templates/doc/revisions_list.html +++ b/ietf/templates/doc/revisions_list.html @@ -6,31 +6,46 @@

  • {% endif %} {# Notes #} - {% if use_notes %} + {% if meeting.uses_notes %}
  • Notepad for note-takers @@ -303,16 +304,16 @@
  • {% else %} {# chat logs #} - {% if meeting.number|add:"0" >= 60 %} + {% if meeting.has_chat_logs and session.chatlog %}
  • + href="/doc/{{ session.chatlog.document.name }}"> Chat logs
  • {% endif %} {# Recordings #} - {% if meeting.number|add:"0" >= 80 %} + {% if meeting.has_recordings %} {% with session.recordings as recordings %} {% if recordings %} {# There's no guaranteed order, so this is a bit messy: #} @@ -351,7 +352,7 @@ {% if session.video_stream_url %}
  • + href="{{ session.session_recording_url }}"> Session recording
  • diff --git a/ietf/templates/meeting/session_cancel_notification.txt b/ietf/templates/meeting/session_cancel_notification.txt new file mode 100644 index 0000000000..3de67fc8f4 --- /dev/null +++ b/ietf/templates/meeting/session_cancel_notification.txt @@ -0,0 +1,4 @@ +{# Copyright The IETF Trust 2025, All Rights Reserved #} +{% autoescape off %}{% load ams_filters %} + +A request to cancel a meeting session has just been submitted by {{ requester }}.{% endautoescape %} diff --git a/ietf/templates/meeting/session_details.html b/ietf/templates/meeting/session_details.html index 571715b4b4..a4d9ba1090 100644 --- a/ietf/templates/meeting/session_details.html +++ b/ietf/templates/meeting/session_details.html @@ -1,5 +1,5 @@ {% extends "base.html" %} -{# Copyright The IETF Trust 2015, All Rights Reserved #} +{# Copyright The IETF Trust 2015-2026, All Rights Reserved #} {% load origin ietf_filters static %} {% block title %}{{ meeting }} : {{ group.acronym }}{% endblock %} {% block morecss %} @@ -31,93 +31,58 @@

    Scheduled Sessions

    {% include 'meeting/session_details_panel.html' with sessions=scheduled_sessions %}

    Unscheduled Sessions

    {% include 'meeting/session_details_panel.html' with sessions=unscheduled_sessions %} - {% if pending_suggestions %} -

    + {% for s in pending_suggestions %} + {% if forloop.first %}

    {% if can_manage_materials %} Proposed slides awaiting your approval {% else %} Your proposed slides awaiting chair approval {% endif %}

    -
    - {% for s in pending_suggestions %} - {% if can_manage_materials %} -

    - - {{ s.submitter }} - {{ s.title }} ({{ s.time }}) - -

    - {% else %} -

    - {{ s.title }} ({{ s.time }}) -

    - {% endif %} - {% endfor %} +
    {% endif %} + {% if can_manage_materials %} +

    + + {{ s.submitter }} - {{ s.title }} ({{ s.time }}) + +

    + {% else %} +

    + {{ s.title }} ({{ s.time }}) +

    + {% endif %} + {% if forloop.last %}
    {% endif %} + {% endfor %} + {% if user|has_role:"Secretariat" %} +
    +
    + Secretariat Only +
    +
    +
    + {% csrf_token %} + +
    +
    {% endif %} + {% comment %} + The existence of an element with id canManageMaterialsFlag is checked in + session_details.js to determine whether it should init the sortable tables. + Not the most elegant approach, but it works. + {% endcomment %} + {% if can_manage_materials %}
    {% endif %} {% endblock %} {% block js %} - {% if can_manage_materials %} - {% endif %} + {% endblock %} \ No newline at end of file diff --git a/ietf/templates/meeting/session_details_form.html b/ietf/templates/meeting/session_details_form.html index 6b59e7dacd..9cd1b6e85c 100644 --- a/ietf/templates/meeting/session_details_form.html +++ b/ietf/templates/meeting/session_details_form.html @@ -1,42 +1,48 @@ -{# Copyright The IETF Trust 2007-2020, All Rights Reserved #} +{# Copyright The IETF Trust 2007-2025, All Rights Reserved #} +{% load django_bootstrap5 %} +
    {% if hidden %} {{ form.name.as_hidden }}{{ form.purpose.as_hidden }}{{ form.type.as_hidden }}{{ form.requested_duration.as_hidden }} {{ form.has_onsite_tool.as_hidden }} {% else %} - - {% comment %} The form-group class is used by session_details_form.js to identify the correct element to hide the name / purpose / type fields when not needed. This is a bootstrap class - the secr app does not use it, so this (and the hidden class, also needed by session_details_form.js) are defined in edit.html and new.html as a kludge to make this work. {% endcomment %} - - - - - - - - - - - - - {% if not hide_onsite_tool_prompt %} - - - - - {% endif %} - -
    {{ form.name.label_tag }}{{ form.name }}{{ form.purpose.errors }}
    {{ form.purpose.label_tag }} - {{ form.purpose }}
    {{ form.type }}
    - {{ form.purpose.errors }}{{ form.type.errors }} -
    {{ form.requested_duration.label_tag }}{{ form.requested_duration }}{{ form.requested_duration.errors }}
    {{ form.has_onsite_tool.label_tag }}{{ form.has_onsite_tool }}{{ form.has_onsite_tool.errors }}
    - {% if hide_onsite_tool_prompt %}{{ form.has_onsite_tool.as_hidden }}{% endif %} + +
    + {% bootstrap_field form.name layout="horizontal" %} +
    + +
    +
    + +
    {{ form.purpose }}
    +
    {{ form.type }}
    + {{ form.purpose.errors }}{{ form.type.errors }} +
    +
    + + {% bootstrap_field form.requested_duration layout="horizontal" %} + {% if not hide_onsite_tool_prompt %} + {% bootstrap_field form.has_onsite_tool layout="horizontal" %} + {% endif %} + + {% if hide_onsite_tool_prompt %} + {{ form.has_onsite_tool.as_hidden }} + {% endif %} {% endif %} + {# hidden fields included whether or not the whole form is hidden #} - {{ form.attendees.as_hidden }}{{ form.comments.as_hidden }}{{ form.id.as_hidden }}{{ form.on_agenda.as_hidden }}{{ form.DELETE.as_hidden }}{{ form.remote_instructions.as_hidden }}{{ form.short.as_hidden }}{{ form.agenda_note.as_hidden }} -
    \ No newline at end of file + {{ form.attendees.as_hidden }} + {{ form.comments.as_hidden }} + {{ form.id.as_hidden }} + {{ form.on_agenda.as_hidden }} + {{ form.DELETE.as_hidden }} + {{ form.remote_instructions.as_hidden }} + {{ form.short.as_hidden }} + {{ form.agenda_note.as_hidden }} +
    diff --git a/ietf/templates/meeting/session_details_panel.html b/ietf/templates/meeting/session_details_panel.html index 3ff09fc33b..7c52ac0b4a 100644 --- a/ietf/templates/meeting/session_details_panel.html +++ b/ietf/templates/meeting/session_details_panel.html @@ -9,7 +9,7 @@ {% if meeting.type.slug == 'interim' %} {% include "meeting/interim_session_buttons.html" with show_agenda=False show_empty=False %} {% else %} - {% include "meeting/session_buttons_include.html" with show_agenda=False item=session.official_timeslotassignment use_notes=session.meeting.use_notes %} + {% include "meeting/session_buttons_include.html" with show_agenda=False item=session.official_timeslotassignment %} {% endif %} {% endif %} @@ -62,7 +62,7 @@

    {% endif %}

    Agenda, Minutes, and Bluesheets

    - {% if session.filtered_artifacts %} + {% if session.filtered_artifacts or session.bluesheet_title %} {% for pres in session.filtered_artifacts %} @@ -73,6 +73,8 @@

    Agenda, Minutes, and Bluesheets

    {% if user|has_role:"Secretariat" or can_manage_materials %} {% if pres.document.type.slug == 'minutes' %} {% url 'ietf.meeting.views.upload_session_minutes' session_id=session.pk num=session.meeting.number as upload_url %} + {% elif pres.document.type.slug == 'narrativeminutes' %} + {% url 'ietf.meeting.views.upload_session_narrativeminutes' session_id=session.pk num=session.meeting.number as upload_url %} {% elif pres.document.type.slug == 'agenda' %} {% url 'ietf.meeting.views.upload_session_agenda' session_id=session.pk num=session.meeting.number as upload_url %} {% else %} @@ -89,6 +91,13 @@

    Agenda, Minutes, and Bluesheets

    {% endfor %} + {% if session.bluesheet_title %} + + {% endif %} {% endif %}
    + + {{ session.bluesheet_title }} + +
    @@ -100,12 +109,18 @@

    Agenda, Minutes, and Bluesheets

    {% endif %} {% if not session.type_counter.minutes %} - Import minutes from notes.ietf.org + Import minutes from notes.ietf.org Upload minutes {% endif %} + {% if not session.type_counter.narrativeminutes and session.group.acronym == "iesg" %} + + Upload narrative minutes + + {% endif %} {% endif %} {% if user|has_role:"Secretariat" and not session.type_counter.bluesheets or meeting.type.slug == 'interim' and can_manage_materials and not session.type_counter.bluesheets %} Chatlog and polls {% url 'ietf.doc.views_doc.document_main' name=pres.document.name as url %} - {{ pres.document.title }} - ({{ pres.document.name }}) + {{ pres.document.title }} + ( as json ) {% endfor %} @@ -172,7 +187,7 @@

    Slides

    {% elif request.user.is_authenticated and not session.is_material_submission_cutoff %} + href="{% url 'ietf.meeting.views.upload_session_slides' session_id=session.pk num=session.meeting.number %}"> Propose slides {% endif %} @@ -215,7 +230,7 @@

    Meeting tools

    - {% if use_notes %} + {% if meeting.uses_notes %} {% for day in time_slices %} {% endfor %} {% endif %} @@ -121,6 +125,7 @@

    diff --git a/ietf/templates/meeting/upcoming.html b/ietf/templates/meeting/upcoming.html index 802b1b03ca..13a27ed910 100644 --- a/ietf/templates/meeting/upcoming.html +++ b/ietf/templates/meeting/upcoming.html @@ -89,7 +89,7 @@

    Upcoming Meetings

    Cancelled {% else %} - + {% endif %} {% endwith %} {% else %} diff --git a/ietf/templates/meeting/upcoming.ics b/ietf/templates/meeting/upcoming.ics deleted file mode 100644 index fb5b37d772..0000000000 --- a/ietf/templates/meeting/upcoming.ics +++ /dev/null @@ -1,32 +0,0 @@ -{% load humanize tz %}{% autoescape off %}{% load ietf_filters textfilters %}BEGIN:VCALENDAR -VERSION:2.0 -METHOD:PUBLISH -PRODID:-//IETF//datatracker.ietf.org ical upcoming//EN -{{vtimezones}}{% for item in assignments %}BEGIN:VEVENT -UID:ietf-{{item.session.meeting.number}}-{{item.timeslot.pk}} -SUMMARY:{% if item.session.name %}{{item.session.group.acronym|lower}} - {{item.session.name|ics_esc}}{% else %}{{item.session.group.acronym|lower}} - {{item.session.group.name}}{%endif%} -{% if item.schedule.meeting.city %}LOCATION:{{item.schedule.meeting.city}},{{item.schedule.meeting.country}} -{% endif %}STATUS:{{item.session.ical_status}} -CLASS:PUBLIC -DTSTART{% ics_date_time item.timeslot.local_start_time item.schedule.meeting.time_zone %} -DTEND{% ics_date_time item.timeslot.local_end_time item.schedule.meeting.time_zone %} -DTSTAMP{% ics_date_time item.timeslot.modified|utc 'utc' %}{% if item.session.agenda %} -URL:{{item.session.agenda.get_href}}{% endif %} -DESCRIPTION:{% if item.timeslot.name %}{{item.timeslot.name|ics_esc}}\n{% endif %}{% if item.session.agenda_note %} - Note: {{item.session.agenda_note|ics_esc}}\n{% endif %}{% for material in item.session.materials.all %} - \n{{material.type}}{% if material.type.name != "Agenda" %} - ({{material.title|ics_esc}}){% endif %}: - {{material.get_href}}\n{% endfor %}{% if item.session.remote_instructions %} - Remote instructions: {{ item.session.remote_instructions }}\n{% endif %} -END:VEVENT -{% endfor %}{% for meeting in ietfs %}BEGIN:VEVENT -UID:ietf-{{ meeting.number }} -SUMMARY:IETF {{ meeting.number }}{% if meeting.city %} -LOCATION:{{ meeting.city }},{{ meeting.country }}{% endif %} -CLASS:PUBLIC -DTSTART;VALUE=DATE{% if meeting.time_zone %};TZID={{ meeting.time_zone|ics_esc }}{% endif %}:{{ meeting.date|date:"Ymd" }} -DTEND;VALUE=DATE{% if meeting.time_zone %};TZID={{ meeting.time_zone|ics_esc }}{% endif %}:{{ meeting.end_date|date:"Ymd" }} -DTSTAMP{% ics_date_time meeting.cached_updated|utc 'utc' %} -URL:{{ request.scheme }}://{{ request.get_host }}{% url 'agenda' num=meeting.number %} -END:VEVENT -{% endfor %}END:VCALENDAR{% endautoescape %} diff --git a/ietf/templates/meeting/upload_session_agenda.html b/ietf/templates/meeting/upload_session_agenda.html index 1856a75bdb..57cba6b53c 100644 --- a/ietf/templates/meeting/upload_session_agenda.html +++ b/ietf/templates/meeting/upload_session_agenda.html @@ -1,5 +1,5 @@ {% extends "base.html" %} -{# Copyright The IETF Trust 2015, All Rights Reserved #} +{# Copyright The IETF Trust 2015-2023, All Rights Reserved #} {% load origin static django_bootstrap5 tz %} {% block title %} {% if agenda_sp %} @@ -29,6 +29,9 @@

    Session {{ session_number }} : {{ session.official_timeslotassignment.timesl
    {% csrf_token %} {% bootstrap_form form %} - + +{% endblock %} +{% block js %} + {% endblock %} \ No newline at end of file diff --git a/ietf/templates/meeting/upload_session_minutes.html b/ietf/templates/meeting/upload_session_minutes.html index 30eadda277..324440681f 100644 --- a/ietf/templates/meeting/upload_session_minutes.html +++ b/ietf/templates/meeting/upload_session_minutes.html @@ -26,6 +26,11 @@

    {% if session_number %}

    Session {{ session_number }} : {{ session.official_timeslotassignment.timeslot.time|timezone:session.meeting.time_zone|date:"D M-d-Y Hi" }}

    {% endif %} + {% if future %} +

    + Caution: Session has not ended yet +

    + {% endif %} {% csrf_token %} {% bootstrap_form form %} diff --git a/ietf/templates/meeting/upload_session_narrativeminutes.html b/ietf/templates/meeting/upload_session_narrativeminutes.html new file mode 100644 index 0000000000..d990985510 --- /dev/null +++ b/ietf/templates/meeting/upload_session_narrativeminutes.html @@ -0,0 +1,34 @@ +{% extends "base.html" %} +{# Copyright The IETF Trust 2024, All Rights Reserved #} +{% load origin static django_bootstrap5 tz %} +{% block title %} + {% if narrativeminutes_sp %} + Revise + {% else %} + Upload + {% endif %} + Narrative Minutes for {{ session.meeting }} : {{ session.group.acronym }} +{% endblock %} +{% block content %} + {% origin %} +

    + {% if narrativeminutes_sp %} + Revise + {% else %} + Upload + {% endif %} + Narrative Minutes for {{ session.meeting }} +
    + {{ session.group.acronym }} + {% if session.name %}: {{ session.name }}{% endif %} + +

    + {% if session_number %} +

    Session {{ session_number }} : {{ session.official_timeslotassignment.timeslot.time|timezone:session.meeting.time_zone|date:"D M-d-Y Hi" }}

    + {% endif %} + + {% csrf_token %} + {% bootstrap_form form %} + + +{% endblock %} \ No newline at end of file diff --git a/ietf/templates/meeting/upload_session_slides.html b/ietf/templates/meeting/upload_session_slides.html index 8e3e064df3..059ffae16f 100644 --- a/ietf/templates/meeting/upload_session_slides.html +++ b/ietf/templates/meeting/upload_session_slides.html @@ -17,15 +17,21 @@

    {% else %} Upload new {% endif %} - slides for {{ session.meeting }} -
    + slides for {{ session.meeting }}
    {{ session.group.acronym }} {% if session.name %}: {{ session.name }}{% endif %}

    {% if session_number %} -

    Session {{ session_number }} : {{ session.official_timeslotassignment.timeslot.time|timezone:session.meeting.time_zone|date:"D M-d-Y Hi" }}

    +

    + Session {{ session_number }} : {{ session.official_timeslotassignment.timeslot.time|timezone:session.meeting.time_zone|date:"D M-d-Y Hi" }} +

    + {% endif %} + {% if not manage %} +

    + This form will allow you to propose a slide deck to the session chairs. After you upload your proposal, mail will be sent to the session chairs asking for their approval. +

    {% endif %} {% if slides_sp %}

    {{ slides_sp.document.name }}

    {% endif %} diff --git a/ietf/templates/minimal.html b/ietf/templates/minimal.html index 87f661f501..15c432505e 100644 --- a/ietf/templates/minimal.html +++ b/ietf/templates/minimal.html @@ -9,8 +9,8 @@ {{ title }} - - + + {# load this in the head, to prevent flickering #} diff --git a/ietf/templates/nomcom/announcements.html b/ietf/templates/nomcom/announcements.html index aa67fbb68e..771f2b4fb0 100644 --- a/ietf/templates/nomcom/announcements.html +++ b/ietf/templates/nomcom/announcements.html @@ -15,7 +15,7 @@

    NomCom

    {% for regime in regimes %}

    - Messages from {{ regime.group.start_year }}/{{ regime.group.end_year }} + Messages from {{ regime.group.start_year }} NomCom

    {# use person email address here rather than the generic nomcom-chair@ietf.org #}

    @@ -56,9 +56,14 @@

    References

  • - IAB, IESG, IETF Trust, and IETF LLC Selection, Confirmation, and Recall Process: Operation of the IETF Nominating and Recall Committees (RFC 8713) (Also BCP10) + IAB, IESG, IETF Trust, and IETF LLC Selection, Confirmation, and Recall Process: Operation of the IETF Nominating and Recall Committees (RFC 8713)
  • +
  • + + Nominating Committee Eligibility (RFC 9389) + +
  • Publicly Verifiable Nominations Committee (NomCom) Random Selection (RFC 3797) diff --git a/ietf/templates/nomcom/feedback.html b/ietf/templates/nomcom/feedback.html index effacdf67a..8c9e8c824f 100644 --- a/ietf/templates/nomcom/feedback.html +++ b/ietf/templates/nomcom/feedback.html @@ -81,7 +81,7 @@

    - Provide feedback about {% person_link form.nominee.person %} + Provide feedback about {% person_link form.nominee.person with_email=False %} for the {{ form.position.name }} position.

    {% elif form.topic %} @@ -97,8 +97,8 @@

    {% endif %}

    This feedback will only be available to - NomCom {{ year }}. - You may have the feedback mailed back to you by selecting the option below. + the current NomCom. + You can have the feedback mailed back to you by selecting the option below.

    {% csrf_token %} diff --git a/ietf/templates/nomcom/history.html b/ietf/templates/nomcom/history.html index 09cdeae98c..8262876b11 100644 --- a/ietf/templates/nomcom/history.html +++ b/ietf/templates/nomcom/history.html @@ -11,7 +11,7 @@

    NomCom Membership History

    Note: The data for concluded NomComs is occasionally incorrect.

    {% for regime in regimes %} -

    {{ regime.label }}

    +

    {{ regime.year }} NomCom

    {% for slug, label, roles in regime.nomcom.personnel %}
    @@ -25,7 +25,7 @@

    {{ regime.label }}

    {% endfor %}
    {% endfor %} -

    2012/2013

    +

    2012 NomCom

    Chair @@ -64,7 +64,7 @@

    2012/2013

    Rudi Vansnick (ISOC Liaison)
    -

    2011/2012

    +

    2011 NomCom

    Chair @@ -104,7 +104,7 @@

    2011/2012

    Jason Livingood (ISOC Liaison)
    -

    2010/2011

    +

    2010 NomCom

    Chair @@ -143,7 +143,7 @@

    2010/2011

    Eric Burger (ISOC Liaison)
    -

    2009/2010

    +

    2009 NomCom

    Chair @@ -184,7 +184,7 @@

    2009/2010

    - 2008/2009 + 2008 NomCom

    @@ -224,7 +224,7 @@

    - 2007/2008 + 2007 NomCom

    @@ -264,7 +264,7 @@

    - 2006/2007 + 2006 NomCom

    @@ -304,7 +304,7 @@

    - 2005/2006 + 2005 NomCom

    @@ -344,7 +344,7 @@

    - 2004/2005 + 2004 NomCom

    @@ -385,7 +385,7 @@

    - 2003/2004 + 2003 NomCom

    @@ -424,7 +424,7 @@

    - 2002/2003 + 2002 NomCom

    @@ -463,7 +463,7 @@

    - 2001/2002 + 2001 NomCom

    @@ -502,7 +502,7 @@

    - 2000/2001 + 2000 NomCom

    @@ -541,7 +541,7 @@

    - 1999/2000 + 1999 NomCom

    @@ -580,7 +580,7 @@

    - 1998/1999 + 1998 NomCom

    @@ -620,7 +620,7 @@

    - 1997/1998 + 1997 NomCom

    @@ -659,7 +659,7 @@

    - 1996/1997 + 1996 NomCom

    @@ -700,7 +700,7 @@

    - 1995/1996 + 1995 NomCom

    @@ -739,7 +739,7 @@

    - 1994/1995 + 1994 NomCom

    @@ -771,7 +771,7 @@

    - 1993/1994 + 1993 NomCom

    @@ -802,7 +802,7 @@

    - 1992/1993 + 1992 NomCom

    diff --git a/ietf/templates/nomcom/index.html b/ietf/templates/nomcom/index.html index 91f63f74eb..e765dd7c2e 100644 --- a/ietf/templates/nomcom/index.html +++ b/ietf/templates/nomcom/index.html @@ -12,7 +12,7 @@

    NomComs

  • @@ -295,7 +310,7 @@

    Notes and recordings

    - {% if use_notes %} + {% if meeting.uses_notes %} {% endif %} {# Recordings #} - {% if meeting.number|add:"0" >= 80 %} - {% with session.recordings as recordings %} - {% if recordings %} - {# There's no guaranteed order, so this is a bit messy: #} - {# First, the audio recordings, if any #} - {% for r in recordings %} - {% if r.get_href and 'audio' in r.get_href %} - - - - {% endif %} - {% endfor %} - {# Then the youtube recordings #} - {% for r in recordings %} - {% if r.get_href and 'youtu' in r.get_href %} - - - - {% endif %} - {% endfor %} - {# Finally, any other recordings #} - {% for r in recordings %} - {% if r.get_href and not 'audio' in r.get_href and not 'youtu' in r.get_href %} - - - - {% endif %} - {% endfor %} - {% endif %} - {% endwith %} - {% if session.video_stream_url %} - - - + {% with session.recordings as recordings %} + {% if recordings %} + {# There's no guaranteed order, so this is a bit messy: #} + {# First, the audio recordings, if any #} + {% for r in recordings %} + {% if r.get_href and 'audio' in r.get_href %} + + + + {% endif %} + {% endfor %} + {# Then the youtube recordings #} + {% for r in recordings %} + {% if r.get_href and 'youtu' in r.get_href %} + + + + {% endif %} + {% endfor %} + {# Finally, any other recordings #} + {% for r in recordings %} + {% if r.get_href and not 'audio' in r.get_href and not 'youtu' in r.get_href %} + + + + {% endif %} + {% endfor %} {% endif %} + {% endwith %} + {% if session.session_recording_url %} + + + {% endif %}
    @@ -305,54 +320,61 @@

    Notes and recordings

    - {{ r.title }} -
    - {{ r.title }} -
    - {{ r.title }} -
    - - Session recording - -
    + {{ r.title }} +
    + {{ r.title }} +
    + {{ r.title }} +
    + + + Meetecho session recording + +
    {% endif %} + + {% if can_manage_materials %} +
    + Link additional recordings to session + + {% endif %} + {% endwith %}{% endwith %} -{% endfor %} \ No newline at end of file +{% endfor %} diff --git a/ietf/secr/templates/sreq/not_meeting_notification.txt b/ietf/templates/meeting/session_not_meeting_notification.txt similarity index 83% rename from ietf/secr/templates/sreq/not_meeting_notification.txt rename to ietf/templates/meeting/session_not_meeting_notification.txt index 1120f8480c..0e5c940708 100644 --- a/ietf/secr/templates/sreq/not_meeting_notification.txt +++ b/ietf/templates/meeting/session_not_meeting_notification.txt @@ -1,3 +1,4 @@ +{# Copyright The IETF Trust 2025, All Rights Reserved #} {% load ams_filters %} {{ login|smart_login }} {{ group.acronym }} working group, indicated that the {{ group.acronym }} working group does not plan to hold a session at IETF {{ meeting.number }}. diff --git a/ietf/templates/meeting/session_request_confirm.html b/ietf/templates/meeting/session_request_confirm.html new file mode 100644 index 0000000000..09043d3d0c --- /dev/null +++ b/ietf/templates/meeting/session_request_confirm.html @@ -0,0 +1,38 @@ +{# Copyright The IETF Trust 2025, All Rights Reserved #} +{% extends "base.html" %} +{% load static ietf_filters django_bootstrap5 %} +{% block title %}Confirm Session Request{% endblock %} + +{% block content %} +

    Confirm Session Request - IETF {{ meeting.number }}

    + + + +
    + +
    + + {% include "meeting/session_request_view_table.html" %} + +
    + {% csrf_token %} + {{ form }} + {{ form.session_forms.management_form }} + {% for sf in form.session_forms %} + {% include 'meeting/session_details_form.html' with form=sf hidden=True only %} + {% endfor %} + + + + +
    + +
    + +{% endblock %} + +{% block js %} + +{% endblock %} \ No newline at end of file diff --git a/ietf/templates/meeting/session_request_form.html b/ietf/templates/meeting/session_request_form.html new file mode 100644 index 0000000000..ecf5cb7268 --- /dev/null +++ b/ietf/templates/meeting/session_request_form.html @@ -0,0 +1,206 @@ +{# Copyright The IETF Trust 2025, All Rights Reserved #} +{% extends "base.html" %} +{% load static ietf_filters django_bootstrap5 %} +{% block title %}{% if is_create %}New {% else %}Edit {% endif %}Session Request{% endblock %} +{% block morecss %}{{ block.super }} + .hidden {display: none !important;} + div.form-group {display: inline;} +{% endblock %} +{% block content %} +

    {% if is_create %}New {% else %}Edit {% endif %}Session Request

    + + {% if is_create %} + + {% endif %} + +
    + +
    + {% csrf_token %} + {{ form.session_forms.management_form }} + {% if form.non_field_errors %} +
    {{ form.non_field_errors }}
    + {% endif %} + +
    + +
    + +
    +
    + +
    + +
    + +
    +
    + + {% bootstrap_field form.num_session layout="horizontal" %} + + {% if group.features.acts_like_wg %} + +
    +
    Session 1
    +
    + {% include 'meeting/session_details_form.html' with form=form.session_forms.0 hide_onsite_tool_prompt=True only %} +
    +
    + +
    +
    Session 2
    +
    + {% include 'meeting/session_details_form.html' with form=form.session_forms.1 hide_onsite_tool_prompt=True only %} +
    +
    + + {% if not is_virtual %} + {% bootstrap_field form.session_time_relation layout="horizontal" %} + {% endif %} + +
    +
    Additional Session Request
    +
    +
    + {{ form.third_session }} + +
    Additional slot may be available after agenda scheduling has closed and with the approval of an Area Director.
    +
    + +
    +
    + +
    +
    Third session request
    +
    + {% include 'meeting/session_details_form.html' with form=form.session_forms.2 hide_onsite_tool_prompt=True only %} +
    +
    + + {% else %}{# else not group.features.acts_like_wg #} + {% for session_form in form.session_forms %} +
    +
    Session {{ forloop.counter }}
    +
    + {% include 'meeting/session_details_form.html' with form=session_form only %} +
    +
    + {% endfor %} + {% endif %} + + {% bootstrap_field form.attendees layout="horizontal" %} + + {% bootstrap_field form.bethere layout="horizontal" %} + +
    +
    Conflicts to avoid
    +
    +
    +
    Other WGs that included {{ group.acronym }} in their conflict lists
    +
    {{ session_conflicts.inbound|default:"None" }}
    +
    +
    +
    WG Sessions
    You may select multiple WGs within each category
    +
    + {% for cname, cfield, cselector in form.wg_constraint_fields %} +
    +
    +
    +
    +
    + {{ cselector }} +
    +
    + +
    +
    +
    +
    + {{ cfield.errors }}{{ cfield }} +
    +
    +
    +
    + {% empty %}{# shown if there are no constraint fields #} +
    +
    No constraints are enabled for this meeting.
    + {% endfor %} +
    +
    + + {% if form.inactive_wg_constraint_count %} +
    +
    Disabled for this meeting
    +
    + {% for cname, value, field in form.inactive_wg_constraints %} +
    +
    {{ cname|title }}
    +
    +
    +
    + +
    +
    + + +
    +
    +
    +
    + {% endfor %} +
    +
    + {% endif %} + +
    +
    BOF Sessions
    +
    If the sessions can not be found in the fields above, please enter free form requests in the Special Requests field below.
    +
    +
    +
    + + {% if not is_virtual %} + + {% bootstrap_field form.resources layout="horizontal" %} + + {% bootstrap_field form.timeranges layout="horizontal" %} + + {% bootstrap_field form.adjacent_with_wg layout="horizontal" %} + +
    +
    Joint session with: (To request one session for multiple WGs together)
    +
    To request a joint session with another group, please contact the secretariat.
    +
    + + {% endif %} + + {% bootstrap_field form.comments layout="horizontal" %} + + {% if form.notifications_optional %} +
    + +
    +
    + + +
    +
    +
    + {% endif %} + + + Cancel +
    + +{% endblock %} +{% block js %} + + {{ form.media }} +{% endblock %} \ No newline at end of file diff --git a/ietf/templates/meeting/session_request_info.txt b/ietf/templates/meeting/session_request_info.txt new file mode 100644 index 0000000000..2e96efb31f --- /dev/null +++ b/ietf/templates/meeting/session_request_info.txt @@ -0,0 +1,26 @@ +{# Copyright The IETF Trust 2025, All Rights Reserved #} +{% load ams_filters %} +--------------------------------------------------------- +Working Group Name: {{ group.name }} +Area Name: {{ group.parent }} +Session Requester: {{ login }} +{% if session.joint_with_groups %}{{ session.joint_for_session_display }} joint with: {{ session.joint_with_groups }}{% endif %} + +Number of Sessions: {{ session.num_session }} +Length of Session(s): {% for session_length in session_lengths %}{{ session_length.total_seconds|display_duration }}{% if not forloop.last %}, {% endif %}{% endfor %} +Number of Attendees: {{ session.attendees }} +Conflicts to Avoid: +{% for line in session.outbound_conflicts %} {{line}} +{% endfor %}{% if session.session_time_relation_display %} {{ session.session_time_relation_display }}{% endif %} +{% if session.adjacent_with_wg %} Adjacent with WG: {{ session.adjacent_with_wg }}{% endif %} +{% if session.timeranges_display %} Can't meet: {{ session.timeranges_display|join:", " }}{% endif %} + +Participants who must be present: +{% for person in session.bethere %} {{ person.ascii_name }} +{% endfor %} +Resources Requested: +{% for resource in session.resources %} {{ resource.desc }} +{% endfor %} +Special Requests: + {{ session.comments }} +--------------------------------------------------------- diff --git a/ietf/templates/meeting/session_request_list.html b/ietf/templates/meeting/session_request_list.html new file mode 100644 index 0000000000..789b7006e5 --- /dev/null +++ b/ietf/templates/meeting/session_request_list.html @@ -0,0 +1,65 @@ +{# Copyright The IETF Trust 2025, All Rights Reserved #} +{% extends "base.html" %} +{% load static %} +{% load ietf_filters %} +{% load django_bootstrap5 %} +{% block title %}Session Requests{% endblock %} +{% block content %} +

    Session Requests IETF {{ meeting.number }}

    + +
    + Instructions + + View list of timeslot requests + {% if user|has_role:"Secretariat" %} + {% if is_locked %} + Unlock Tool + {% else %} + Lock Tool + {% endif %} + {% endif %} +
    + +
    +
    + Request New Session +
    +
    +

    The list below includes those working groups that you currently chair which do not already have a session scheduled. You can click on an acronym to complete a request for a new session at the upcoming IETF meeting. Click "Group will not meet" to send a notification that the group does not plan to meet.

    +
      + {% for group in unscheduled_groups %} +
    • + {{ group.acronym }} + {% if group.not_meeting %} + (Currently, this group does not plan to hold a session at IETF {{ meeting.number }}) + {% endif %} +
    • + {% empty %} +
    • NONE
    • + {% endfor %} +
    +
    +
    + + +
    +
    + Edit / Cancel Previously Requested Sessions +
    +
    +

    The list below includes those working groups for which you or your co-chair has requested sessions at the upcoming IETF meeting. You can click on an acronym to initiate changes to a session, or cancel a session.

    + +
    +
    + +{% endblock %} + +{% block footer-extras %} + {% include "includes/sessions_footer.html" %} +{% endblock %} \ No newline at end of file diff --git a/ietf/templates/meeting/session_request_locked.html b/ietf/templates/meeting/session_request_locked.html new file mode 100644 index 0000000000..15c023ce33 --- /dev/null +++ b/ietf/templates/meeting/session_request_locked.html @@ -0,0 +1,21 @@ +{# Copyright The IETF Trust 2025, All Rights Reserved #} +{% extends "base.html" %} +{% load static ietf_filters django_bootstrap5 %} +{% block title %}Session Request{% endblock %} + +{% block content %} +

    Session Request - IETF {{ meeting.number }}

    + + View list of timeslot requests + +
    + +
    +

    {{ message }}

    + +
    + +
    +
    + +{% endblock %} diff --git a/ietf/templates/meeting/session_request_notification.txt b/ietf/templates/meeting/session_request_notification.txt new file mode 100644 index 0000000000..49dbbfc42c --- /dev/null +++ b/ietf/templates/meeting/session_request_notification.txt @@ -0,0 +1,6 @@ +{# Copyright The IETF Trust 2025, All Rights Reserved #} +{% autoescape off %}{% load ams_filters %} + +{% filter wordwrap:78 %}{{ header }} meeting session request has just been submitted by {{ requester }}.{% endfilter %} + +{% include "meeting/session_request_info.txt" %}{% endautoescape %} diff --git a/ietf/templates/meeting/session_request_status.html b/ietf/templates/meeting/session_request_status.html new file mode 100644 index 0000000000..65e98d6d23 --- /dev/null +++ b/ietf/templates/meeting/session_request_status.html @@ -0,0 +1,28 @@ +{# Copyright The IETF Trust 2025, All Rights Reserved #} +{% extends "base.html" %} +{% load static %} +{% load ietf_filters %} +{% load django_bootstrap5 %} +{% block title %}Session Request Status{% endblock %} +{% block content %} +

    Session Request Status

    + +
    +
    + Session Request Status +
    +
    +

    Enter the message that you would like displayed to the WG Chair when this tool is locked.

    +
    {% csrf_token %} + {% bootstrap_form form %} + {% if is_locked %} + + {% else %} + + {% endif %} + +
    +
    +
    + +{% endblock %} diff --git a/ietf/templates/meeting/session_request_view.html b/ietf/templates/meeting/session_request_view.html new file mode 100644 index 0000000000..3db16f56cb --- /dev/null +++ b/ietf/templates/meeting/session_request_view.html @@ -0,0 +1,59 @@ +{# Copyright The IETF Trust 2025, All Rights Reserved #} +{% extends "base.html" %} +{% load static ietf_filters django_bootstrap5 %} +{% block title %}Session Request{% endblock %} + +{% block content %} +

    Session Request - IETF {{ meeting.number }}

    + + + +
    + +
    + + {% include "meeting/session_request_view_table.html" %} + +
    + +

    Activities Log

    +
    + + + + + + + + + + + {% for entry in activities %} + + + + + + + {% endfor %} + +
    DateTimeActionName
    {{ entry.act_date }}{{ entry.act_time }}{{ entry.activity }}{{ entry.act_by }}
    +
    + + + + {% if show_approve_button %} + Approve Third Session + {% endif %} + + Back + +
    + +{% endblock %} + +{% block js %} + +{% endblock %} \ No newline at end of file diff --git a/ietf/templates/meeting/session_request_view_formset.html b/ietf/templates/meeting/session_request_view_formset.html new file mode 100644 index 0000000000..72811b8c2c --- /dev/null +++ b/ietf/templates/meeting/session_request_view_formset.html @@ -0,0 +1,49 @@ +{# Copyright The IETF Trust 2025, All Rights Reserved #} +{% load ams_filters %}{# keep this in sync with sessions_request_view_session_set.html #} +{% for sess_form in formset %} + {% if sess_form.cleaned_data and not sess_form.cleaned_data.DELETE %} +
    +
    + Session {{ forloop.counter }} +
    +
    +
    +
    Length
    +
    {{ sess_form.cleaned_data.requested_duration.total_seconds|display_duration }}
    +
    + {% if sess_form.cleaned_data.name %} +
    +
    Name
    +
    {{ sess_form.cleaned_data.name }}
    +
    + {% endif %} + {% if sess_form.cleaned_data.purpose.slug != 'regular' %} +
    +
    Purpose
    +
    + {{ sess_form.cleaned_data.purpose }} + {% if sess_form.cleaned_data.purpose.timeslot_types|length > 1 %}({{ sess_form.cleaned_data.type }} + ){% endif %} +
    +
    +
    +
    Onsite tool?
    +
    {{ sess_form.cleaned_data.has_onsite_tool|yesno }}
    +
    + {% endif %} +
    +
    + + {% if group.features.acts_like_wg and forloop.counter == 2 and not is_virtual %} +
    +
    + Time between sessions +
    +
    + {% if session.session_time_relation_display %}{{ session.session_time_relation_display }}{% else %}No + preference{% endif %} +
    +
    + {% endif %} + {% endif %} +{% endfor %} \ No newline at end of file diff --git a/ietf/templates/meeting/session_request_view_session_set.html b/ietf/templates/meeting/session_request_view_session_set.html new file mode 100644 index 0000000000..0b8412b04f --- /dev/null +++ b/ietf/templates/meeting/session_request_view_session_set.html @@ -0,0 +1,47 @@ +{# Copyright The IETF Trust 2025, All Rights Reserved #} +{% load ams_filters %}{# keep this in sync with sessions_request_view_formset.html #} +{% for sess in session_set %} +
    +
    + Session {{ forloop.counter }} +
    +
    +
    +
    Length
    +
    {{ sess.requested_duration.total_seconds|display_duration }}
    +
    + {% if sess.name %} +
    +
    Name
    +
    {{ sess.name }}
    +
    + {% endif %} + {% if sess.purpose.slug != 'regular' %} +
    +
    Purpose
    +
    + {{ sess.purpose }} + {% if sess.purpose.timeslot_types|length > 1 %}({{ sess.type }}){% endif %} +
    +
    +
    +
    Onsite tool?
    +
    {{ sess.has_onsite_tool|yesno }}
    +
    + {% endif %} +
    +
    + +{% if group.features.acts_like_wg and forloop.counter == 2 and not is_virtual %} +
    +
    + Time between sessions +
    +
    + {% if session.session_time_relation_display %}{{ session.session_time_relation_display }}{% else %}No + preference{% endif %} +
    +
    +{% endif %} + +{% endfor %} \ No newline at end of file diff --git a/ietf/templates/meeting/session_request_view_table.html b/ietf/templates/meeting/session_request_view_table.html new file mode 100644 index 0000000000..a5cb85c252 --- /dev/null +++ b/ietf/templates/meeting/session_request_view_table.html @@ -0,0 +1,146 @@ +{# Copyright The IETF Trust 2025, All Rights Reserved #} +{% load ams_filters %} + +
    +
    + Working Group Name +
    +
    + {{ group.name }} ({{ group.acronym }}) +
    +
    + +
    +
    + Area Name +
    +
    + {{ group.parent }} +
    +
    + +
    +
    + Number of Sessions Requested +
    +
    + {% if session.third_session %}3{% else %}{{ session.num_session }}{% endif %} +
    +
    + +{% if form %} + {% include 'meeting/session_request_view_formset.html' with formset=form.session_forms group=group session=session only %} +{% else %} + {% include 'meeting/session_request_view_session_set.html' with session_set=sessions group=group session=session only %} +{% endif %} + + +
    +
    + Number of Attendees +
    +
    + {{ session.attendees }} +
    +
    + +
    +
    + Conflicts to Avoid +
    +
    + {% if session_conflicts.outbound %} + {% for conflict in session_conflicts.outbound %} +
    +
    + {{ conflict.name|title }} +
    +
    + {{ conflict.groups }} +
    +
    + {% endfor %} + {% else %}None{% endif %} +
    +
    + +
    +
    + Other WGs that included {{ group }} in their conflict list +
    +
    + {% if session_conflicts.inbound %}{{ session_conflicts.inbound }}{% else %}None so far{% endif %} +
    +
    + +{% if not is_virtual %} +
    +
    + Resources requested +
    +
    + {% if session.resources %}
      {% for resource in session.resources %}
    • {{ resource.desc }}
    • {% endfor %}
    {% else %}None so far{% endif %} +
    +
    +{% endif %} + +
    +
    + Participants who must be present +
    +
    + {% if session.bethere %}
      {% for person in session.bethere %}
    • {{ person }}
    • {% endfor %}
    {% else %}None{% endif %} +
    +
    + +
    +
    + Can not meet on +
    +
    + {% if session.timeranges_display %}{{ session.timeranges_display|join:', ' }}{% else %}No constraints{% endif %} +
    +
    + +{% if not is_virtual %} +
    +
    + Adjacent with WG +
    +
    + {{ session.adjacent_with_wg|default:'No preference' }} +
    +
    +
    +
    + Joint session +
    +
    + {% if session.joint_with_groups %} + {{ session.joint_for_session_display }} with: {{ session.joint_with_groups }} + {% else %} + Not a joint session + {% endif %} +
    +
    +{% endif %} + +
    +
    + Special Requests +
    +
    + {{ session.comments }} +
    +
    + +{% if form and form.notifications_optional %} +
    +
    + {{ form.send_notifications.label}} +
    +
    + {% if form.cleaned_data.send_notifications %}Yes{% else %}No{% endif %} +
    +
    +{% endif %} diff --git a/ietf/templates/meeting/slides_approved.txt b/ietf/templates/meeting/slides_approved.txt index db288ad853..61ffafcd18 100644 --- a/ietf/templates/meeting/slides_approved.txt +++ b/ietf/templates/meeting/slides_approved.txt @@ -1,4 +1,4 @@ -{% load ietf_filters %}{% autoescape off %}Your proposed slides have been approved for {{ submission.session.meeting }} : {{ submission.session.group.acronym }}{% if submission.session.name %} : {{submission.session.name}}{% endif %} +{% load ietf_filters %}{% autoescape off %}Your proposed slides have been approved for {{ submission.session.meeting }} : {{ submission.session.group.acronym }}{% if submission.session.name %} : {{submission.session.name}}{% endif %} by {{approver}} Title: {{submission.title}} diff --git a/ietf/templates/meeting/timeslot_edit.html b/ietf/templates/meeting/timeslot_edit.html index 46b4eb946f..3259dba9da 100644 --- a/ietf/templates/meeting/timeslot_edit.html +++ b/ietf/templates/meeting/timeslot_edit.html @@ -11,20 +11,22 @@ {% endcomment %} .timeslot-edit { overflow: auto; height: max(30rem, calc(100vh - 25rem));} .tstable { width: 100%; border-collapse: separate; } {# "separate" to ensure sticky cells keep their borders #} -.tstable thead { position: sticky; top: 0; z-index: 3; background-color: white;} -.tstable th:first-child, .tstable td:first-child { - background-color: white; {# needs to match the lighter of the striped-table colors! #} -position: sticky; -left: 0; - z-index: 2; {# render above other cells / borders but below thead (z-index 3, above) #} -} -.tstable tbody > tr:nth-of-type(odd) > th:first-child { - background-color: rgb(249, 249, 249); {# needs to match the darker of the striped-table colors! #} -} -.tstable th { white-space: nowrap;} -.tstable td { white-space: nowrap;} -.capacity { font-size:80%; font-weight: normal;} -a.new-timeslot-link { color: lightgray; font-size: large;} + .tstable tr th:first-child { min-width: 25rem; max-width: 25rem; overflow: hidden; } + .tstable thead { position: sticky; top: 0; z-index: 3; background-color: white;} + .tstable thead th span.day { position: sticky; left: 25.5rem; } + .tstable th:first-child, .tstable td:first-child { + background-color: white; {# needs to match the lighter of the striped-table colors! #} + position: sticky; + left: 0; + z-index: 2; {# render above other cells / borders but below thead (z-index 3, above) #} + } + .tstable tbody > tr:nth-of-type(odd) > th:first-child { + background-color: rgb(249, 249, 249); {# needs to match the darker of the striped-table colors! #} + } + .tstable th { white-space: nowrap;} + .tstable td { white-space: nowrap;} + .capacity { font-size:80%; font-weight: normal;} + a.new-timeslot-link { color: lightgray; font-size: large;} {% endblock %} {% block content %} {% origin %} @@ -84,12 +86,14 @@

    - {{ day|date:'D' }} ({{ day }}) - - + + {{ day|date:'D' }} ({{ day }}) + + +
    {{ room.name }} + {% if room.functional_name and room.name != room.functional_name %} - {{ room.functional_name }}{% endif %} {% if room.capacity %}({{ room.capacity }}){% endif %} {% include "meeting/interim_session_buttons.html" with show_agenda=True use_notes=meeting.uses_notes %}{% include "meeting/interim_session_buttons.html" with show_agenda=True %}
    - + diff --git a/ietf/templates/nomcom/nomcom_public_base.html b/ietf/templates/nomcom/nomcom_public_base.html index f2448c9ee3..8c5fa16984 100644 --- a/ietf/templates/nomcom/nomcom_public_base.html +++ b/ietf/templates/nomcom/nomcom_public_base.html @@ -1,5 +1,5 @@ {% extends "base.html" %} -{# Copyright The IETF Trust 2015, All Rights Reserved #} +{# Copyright The IETF Trust 2015-2024, All Rights Reserved #} {% load origin static %} {% load nomcom_tags %} {% load ietf_filters %} @@ -12,6 +12,7 @@ {% endblock %} {% block content %} {% origin %} + {% block nomcom_announce %}{% endblock %} {% with selected=request.path|split:'/'|slice:'3:-1'|join:'-' %}

    NomCom {{ year }} diff --git a/ietf/templates/nomcom/year_index.html b/ietf/templates/nomcom/year_index.html index 43a03d9459..74a8cda9a0 100644 --- a/ietf/templates/nomcom/year_index.html +++ b/ietf/templates/nomcom/year_index.html @@ -1,7 +1,13 @@ {% extends "nomcom/nomcom_public_base.html" %} -{# Copyright The IETF Trust 2015, All Rights Reserved #} +{# Copyright The IETF Trust 2015-2024, All Rights Reserved #} {% load origin %} {% block subtitle %}- Home{% endblock %} +{% block nomcom_announce %} +

    + Announcements from this nomcom are available + here +

    +{% endblock %} {% block nomcom_content %} {% origin %} diff --git a/ietf/templates/person/merge.html b/ietf/templates/person/merge.html index 36499ecdbc..5c3e6b0938 100644 --- a/ietf/templates/person/merge.html +++ b/ietf/templates/person/merge.html @@ -1,5 +1,5 @@ +{# Copyright The IETF Trust 2018-2025, All Rights Reserved #} {% extends "base.html" %} -{# Copyright The IETF Trust 2015, All Rights Reserved #} {% load static %} {% load django_bootstrap5 %} {% block title %}Merge Persons{% endblock %} @@ -8,45 +8,17 @@

    Merge Person Records

    This tool will merge two Person records into one. If both records have logins and you want to retain the one on the left, use the Swap button to swap source and target records.

    - - {% if method == 'post' %} - {% csrf_token %} - {% endif %} +
    {% bootstrap_field form.source %} - {% if source %} - {% with person=source %} - {% include "person/person_info.html" %} - {% endwith %} - {% endif %}
    {% bootstrap_field form.target %} - {% if target %} - {% with person=target %} - {% include "person/person_info.html" %} - {% endwith %} - {% endif %}
    - {% if change_details %}{% endif %} - {% if warn_messages %} - {% for message in warn_messages %}{% endfor %} - {% endif %} - {% if method == 'post' %} - - Swap - - {% endif %} - {% endblock %} \ No newline at end of file diff --git a/ietf/templates/person/merge_request_email.txt b/ietf/templates/person/merge_request_email.txt new file mode 100644 index 0000000000..0a695f036c --- /dev/null +++ b/ietf/templates/person/merge_request_email.txt @@ -0,0 +1,23 @@ +Hello, + +We have identified multiple IETF Datatracker accounts that may represent a single person: + +https://datatracker.ietf.org/person/{{ source_account }} + +and + +https://datatracker.ietf.org/person/{{ target_account }} + +If this is so then it is important that we merge the accounts. + +This email is being sent to the primary emails associated with each Datatracker account. + +Please respond to this message individually from the email account(s) you control so we can take the appropriate action. + +If these should be merged, please identify which account you would like to keep the login credentials from. + +If you are associated with but no longer have access to one of the email accounts, then please let us know and we will follow up to determine how to proceed. + + +{{ sender_name }} +IETF Support \ No newline at end of file diff --git a/ietf/templates/person/merge_submit.html b/ietf/templates/person/merge_submit.html new file mode 100644 index 0000000000..30e1999f81 --- /dev/null +++ b/ietf/templates/person/merge_submit.html @@ -0,0 +1,57 @@ +{# Copyright The IETF Trust 2025, All Rights Reserved #} +{% extends "base.html" %} +{% load static %} +{% load django_bootstrap5 %} +{% block title %}Merge Persons{% endblock %} +{% block content %} +

    Merge Person Records

    +

    + This tool will merge two Person records into one. If both records have logins and you want to retain the one on the left, use the Swap button to swap source and target records. +

    + + {% csrf_token %} +
    +
    + {% bootstrap_field form.source %} + {% if source %} + {% with person=source %} + {% include "person/person_info.html" %} + {% endwith %} + {% endif %} +
    +
    + {% bootstrap_field form.target %} + {% if target %} + {% with person=target %} + {% include "person/person_info.html" %} + {% endwith %} + {% endif %} +
    +
    + {% if change_details %}{% endif %} + {% if warn_messages %} + {% for message in warn_messages %}{% endfor %} + {% endif %} + + + Swap + + + + + + Send Email + + + Back + + +{% endblock %} \ No newline at end of file diff --git a/ietf/templates/person/person_link.html b/ietf/templates/person/person_link.html index 74ab43b945..f3f7e1a5b7 100644 --- a/ietf/templates/person/person_link.html +++ b/ietf/templates/person/person_link.html @@ -1,8 +1,8 @@ {% if email and email == "system@datatracker.ietf.org" or name and name == "(System)" %}(System){% else %}{% if email or name %}{{ name }}{% if email and with_email %} + aria-label="Compose email to {{ email }}" + title="Compose email to {{ email }}"> {% endif %}{% else %}(None){% endif %}{% endif %} \ No newline at end of file diff --git a/ietf/templates/person/photo.html b/ietf/templates/person/photo.html index 4cea8ccb1f..62c1ed991c 100644 --- a/ietf/templates/person/photo.html +++ b/ietf/templates/person/photo.html @@ -2,7 +2,7 @@ {% load origin static person_filters %} {% for role in person.role_set.all|active_roles %} - + - + {% for s in manual %} - {% if user.is_authenticated %} - - {% else %} - - {% endif %} +
    YearYear Convened Chair
    {{ role.name.name }}{{ role.name.name }} + {% if role.name.name == 'Reviewer' %} + (See reviews) + {% endif %} + {{ role.group.name }} ({{ role.group.acronym }}) @@ -106,13 +110,13 @@

    RFC {{ doc.rfc_number }} {{ doc.pub_date|date:"b Y"|title }}{{ doc.pub_date|date:"b Y"|title }} {{ doc.title|urlize_ietf_docs }} - {% with doc.referenced_by_rfcs.count as refbycount %} + {% with doc.referenced_by_rfcs_as_rfc_or_draft.count as refbycount %} {% if refbycount %} {{ refbycount }} RFC{{ refbycount|pluralize }} @@ -133,7 +137,7 @@

    @@ -149,7 +153,7 @@

    {% if not doc.replaced_by %}
  • - {{ doc.canonical_name }} + {{ doc.name }}
  • {% endif %} diff --git a/ietf/templates/person/send_merge_request.html b/ietf/templates/person/send_merge_request.html new file mode 100644 index 0000000000..f0c6272dca --- /dev/null +++ b/ietf/templates/person/send_merge_request.html @@ -0,0 +1,20 @@ +{# Copyright The IETF Trust 2025, All Rights Reserved #} +{% extends "base.html" %} +{% load static %} +{% load django_bootstrap5 %} +{% block title %}Send Merge Notice{% endblock %} +{% block content %} +

    Send Merge Notice

    + {% if form.non_field_errors %}
    {{ form.non_field_errors }}
    {% endif %} +
    + {% csrf_token %} + {% bootstrap_field form.to layout='horizontal' %} + {% bootstrap_field form.frm layout='horizontal' %} + {% bootstrap_field form.reply_to layout='horizontal' %} + {% bootstrap_field form.subject layout='horizontal' %} + {% bootstrap_field form.body layout='horizontal' %} + + Cancel +
    +{% endblock %} diff --git a/ietf/templates/registration/change_password.html b/ietf/templates/registration/change_password.html index 21c102bd0a..58bc2d2587 100644 --- a/ietf/templates/registration/change_password.html +++ b/ietf/templates/registration/change_password.html @@ -34,11 +34,14 @@

    Change password

    - Online attack: This password form uses the + Password strength requirements: + You must choose a password at least 12 characters long that scores at least a 3 according to the zxcvbn - password strength estimator to give an indication of password strength. - The crack time estimate given above assume online attack without rate - limiting, at a rate of 10 attempts per second. + password strength estimator. A warning will appear if your password does not meet this standard. +
    + Online attack: + The crack time estimate given above assumes an online attack at a rate of 10 attempts per second. + It is only a very rough guideline.
    Offline cracking: The datatracker currently uses the {{ hasher.algorithm }} diff --git a/ietf/templates/registration/edit_profile.html b/ietf/templates/registration/edit_profile.html index 1837016b15..1e4ab169e1 100644 --- a/ietf/templates/registration/edit_profile.html +++ b/ietf/templates/registration/edit_profile.html @@ -32,12 +32,19 @@

    Your account

    Change password
    - {% if person.photo %} + {% if person.photo or person.role_set.exists %}
    -
    {% include "person/photo.html" with person=person %}
    +
    + {% if person.photo %} + {% include "person/photo.html" with person=person %} + {% endif %} + {% if person.role_set.exists %} +

    Email support@ietf.org + to update your photo.

    + {% endif %}
    {% endif %}
    diff --git a/ietf/templates/release/about.html b/ietf/templates/release/about.html index 23c2c9205c..85a74bb6d9 100644 --- a/ietf/templates/release/about.html +++ b/ietf/templates/release/about.html @@ -23,6 +23,7 @@

    About the IETF Datatracker

    moments. For the nitty-gritty week-to-week code changes, please check the release notes or the commit log.

    +

    Version 12.0.0: RFCs and Subseries as Document types

    Version 11.0.0: Django 4

    Version 10.0.0: Migration to PostgreSQL as the backend database engine

    Version 9.0.0: Timezone Aware Data

    diff --git a/ietf/templates/review/completed_review.txt b/ietf/templates/review/completed_review.txt index bdbe321ca1..7d10d8bf13 100644 --- a/ietf/templates/review/completed_review.txt +++ b/ietf/templates/review/completed_review.txt @@ -1,7 +1,9 @@ {% load ietf_filters %}{% autoescape off %}{% filter maybewordwrap:80 %}{% if assignment.state_id == "part-completed" %} Review is partially done. Another assignment may be needed to complete it. -{% endif %}Reviewer: {{ assignment.reviewer.person }} +{% endif %}Document: {{ assignment.review_request.doc.name }} +Title: {{ assignment.review_request.doc.title }} +Reviewer: {{ assignment.reviewer.person }} Review result: {{ assignment.result.name }} {{ content }} diff --git a/ietf/templates/stats/document_stats.html b/ietf/templates/stats/document_stats.html deleted file mode 100644 index 4e66bed37e..0000000000 --- a/ietf/templates/stats/document_stats.html +++ /dev/null @@ -1,86 +0,0 @@ -{% extends "base.html" %} -{% load origin %} -{% load ietf_filters static %} -{% block title %}{{ stats_title }}{% endblock %} -{% block pagehead %} - - -{% endblock %} -{% block content %} - {% origin %} -

    Internet-Draft and RFC statistics

    -
    - -
    - {% for slug, label, url in possible_document_stats_types %} - {{ label }} - {% endfor %} -
    -
    -
    - -
    - {% for slug, label, url in possible_author_stats_types %} - {{ label }} - {% endfor %} -
    -
    -
    - -
    - {% for slug, label, url in possible_yearly_stats_types %} - {{ label }} - {% endfor %} -
    -
    -

    Options

    -
    - -
    - {% for slug, label, url in possible_document_types %} - {{ label }} - {% endfor %} -
    -
    -
    - -
    - {% for slug, label, url in possible_time_choices %} - {{ label }} - {% endfor %} -
    -
    -
    - Please Note: The author information in the datatracker about RFCs - with numbers lower than about 1300 and Internet-Drafts from before 2001 is - unreliable and in many cases absent. For this reason, statistics on these - pages does not show correct author stats for corpus selections that involve such - documents. -
    - {% include content_template %} -{% endblock %} -{% block js %} - - - -{% endblock %} \ No newline at end of file diff --git a/ietf/templates/stats/document_stats_author_affiliation.html b/ietf/templates/stats/document_stats_author_affiliation.html deleted file mode 100644 index 9c798cb924..0000000000 --- a/ietf/templates/stats/document_stats_author_affiliation.html +++ /dev/null @@ -1,113 +0,0 @@ -{% load origin %} -{% origin %} -
    - -

    Data

    - - - - - - - - - {% if table_data %} - - {% for affiliation, percentage, count, names in table_data %} - - - - - - {% endfor %} - - {% endif %} -
    AffiliationPercentage of authorsAuthors
    {{ affiliation|default:"(unknown)" }}{{ percentage|floatformat:2 }}%{% include "stats/includes/number_with_details_cell.html" %}
    -

    - The statistics are based entirely on the author affiliation - provided with each Internet-Draft. Since this may vary across documents, an - author may be counted with more than one affiliation, making the - total sum more than 100%. -

    -

    Affiliation Aliases

    -

    - In generating the above statistics, some heuristics have been - applied to determine the affiliations of each author. -

    -{% if request.GET.showaliases %} -

    - Hide generated aliases -

    - {% if request.user.is_staff %} -

    - Note: since you're an admin, you can - add an extra known alias - or see the - existing known aliases - and - generally ignored endings. -

    - {% endif %} - {% if alias_data %} - - - - - - - - {% if alias_data %} - - {% for name, alias in alias_data %} - - - - - {% endfor %} - - {% endif %} -
    AffiliationAlias
    {{ name|default:"(unknown)" }}{{ alias }}
    - {% endif %} -{% else %} -

    - Show generated aliases -

    -{% endif %} \ No newline at end of file diff --git a/ietf/templates/stats/document_stats_author_citations.html b/ietf/templates/stats/document_stats_author_citations.html deleted file mode 100644 index ae89335fae..0000000000 --- a/ietf/templates/stats/document_stats_author_citations.html +++ /dev/null @@ -1,72 +0,0 @@ -{% load origin %}{% origin %} -
    - - - -

    Data

    - - - - - - - - - - {% if table_data %} - - {% for citations, percentage, count, names in table_data %} - - - - - - {% endfor %} - - {% endif %} -
    CitationsPercentage of authorsAuthors
    {{ citations }}{{ percentage|floatformat:2 }}%{% include "stats/includes/number_with_details_cell.html" with content_limit=10 %}
    - -

    Note that the citation counts do not exclude self-references.

    diff --git a/ietf/templates/stats/document_stats_author_continent.html b/ietf/templates/stats/document_stats_author_continent.html deleted file mode 100644 index 5554ac341e..0000000000 --- a/ietf/templates/stats/document_stats_author_continent.html +++ /dev/null @@ -1,69 +0,0 @@ -{% load origin %} -{% origin %} -
    - -

    Data

    - - - - - - - - - {% if table_data %} - - {% for continent, percentage, count, names in table_data %} - - - - - - {% endfor %} - - {% endif %} -
    ContinentPercentage of authorsAuthors
    {{ continent|default:"(unknown)" }}{{ percentage|floatformat:2 }}%{% include "stats/includes/number_with_details_cell.html" %}
    -

    - The statistics are based entirely on the author addresses provided - with each Internet-Draft. Since this varies across documents, a traveling - author may be counted in more than country, making the total sum - more than 100%. -

    \ No newline at end of file diff --git a/ietf/templates/stats/document_stats_author_country.html b/ietf/templates/stats/document_stats_author_country.html deleted file mode 100644 index 72299cc397..0000000000 --- a/ietf/templates/stats/document_stats_author_country.html +++ /dev/null @@ -1,136 +0,0 @@ -{% load origin %} -{% origin %} -
    - -

    Data

    - - - - - - - - - {% if table_data %} - - {% for country, percentage, count, names in table_data %} - - - - - - {% endfor %} - - {% endif %} -
    CountryPercentage of authorsAuthors
    {{ country|default:"(unknown)" }}{{ percentage|floatformat:2 }}%{% include "stats/includes/number_with_details_cell.html" %}
    -

    - The statistics are based entirely on the author addresses provided - with each Internet-Draft. Since this varies across documents, a traveling - author may be counted in more than country, making the total sum - more than 100%. -

    -

    - In case no country information is found for an author in the time - period, the author is counted as (unknown). -

    -

    - EU (European Union) is not a country, but has been added for reference, as the sum of - all current EU member countries: - {% for c in eu_countries %} - {{ c.name }}{% if not forloop.last %},{% endif %} - {% endfor %} - . -

    -

    Country Aliases

    -

    - In generating the above statistics, some heuristics have been - applied to figure out which country each author is from. -

    -{% if request.GET.showaliases %} -

    - Hide generated aliases -

    - {% if request.user.is_staff %} -

    - Note: since you're an admin, some extra links are visible. You - can either correct a document author entry directly in case the - information is obviously missing or add an alias if an unknown - country name - is being used. -

    - {% endif %} - {% if alias_data %} - - - - - - {% if alias_data %} - - {% for name, alias, country in alias_data %} - - - - - {% endfor %} - - {% endif %} -
    CountryAlias
    - {% if country and request.user.is_staff %} - {{ name|default:"(unknown)" }} - {% else %} - {{ name|default:"(unknown)" }} - {% endif %} - - {{ alias }} - {% if request.user.is_staff and name != "EU" %} - - Matching authors - - {% endif %} -
    - {% endif %} -{% else %} -

    - Show generated aliases -

    -{% endif %} \ No newline at end of file diff --git a/ietf/templates/stats/document_stats_author_documents.html b/ietf/templates/stats/document_stats_author_documents.html deleted file mode 100644 index 28e33e6737..0000000000 --- a/ietf/templates/stats/document_stats_author_documents.html +++ /dev/null @@ -1,69 +0,0 @@ -{% load origin %} -{% origin %} -
    - -

    Data

    - - - - - - - - - {% if table_data %} - - {% for document_count, percentage, count, names in table_data %} - - - - - - {% endfor %} - - {% endif %} -
    DocumentsPercentage of authorsAuthors
    {{ document_count }}{{ percentage|floatformat:2 }}%{% include "stats/includes/number_with_details_cell.html" with content_limit=10 %}
    diff --git a/ietf/templates/stats/document_stats_author_hindex.html b/ietf/templates/stats/document_stats_author_hindex.html deleted file mode 100644 index ab3215d355..0000000000 --- a/ietf/templates/stats/document_stats_author_hindex.html +++ /dev/null @@ -1,83 +0,0 @@ -{% load origin %} -{% origin %} -
    - -

    Data

    - - - - - - - - - {% if table_data %} - - {% for h_index, percentage, count, names in table_data %} - - - - - - {% endfor %} - - {% endif %} -
    h-indexPercentage of authorsAuthors
    {{ h_index }}{{ percentage|floatformat:2 }}%{% include "stats/includes/number_with_details_cell.html" with content_limit=25 %}
    -

    - Hirsch index or h-index is a - - measure of the - productivity and impact of the publications of an author - . - An - author with an h-index of 5 has had 5 publications each cited at - least 5 times - to increase the index to 6, the 5 publications plus - 1 more would have to have been cited at least 6 times, each. Thus a - high h-index requires many highly-cited publications. -

    -

    - Note that the h-index calculations do not exclude self-references. -

    diff --git a/ietf/templates/stats/document_stats_authors.html b/ietf/templates/stats/document_stats_authors.html deleted file mode 100644 index 5c1bbbdf4c..0000000000 --- a/ietf/templates/stats/document_stats_authors.html +++ /dev/null @@ -1,68 +0,0 @@ -{% load origin %} -{% origin %} -
    - -

    Data

    - - - - - - - - - {% if table_data %} - - {% for author_count, percentage, count, names in table_data %} - - - - - - {% endfor %} - - {% endif %} -
    AuthorsPercentage of {{ doc_label }}s{{ doc_label|capfirst }}s
    {{ author_count }}{{ percentage|floatformat:2 }}%{% include "stats/includes/number_with_details_cell.html" %}
    diff --git a/ietf/templates/stats/document_stats_format.html b/ietf/templates/stats/document_stats_format.html deleted file mode 100644 index 32c25fe378..0000000000 --- a/ietf/templates/stats/document_stats_format.html +++ /dev/null @@ -1,63 +0,0 @@ -{% load origin %} -{% origin %} -
    - -

    Data

    - - - - - - - - - {% if table_data %} - - {% for pages, percentage, count, names in table_data %} - - - - - - {% endfor %} - - {% endif %} -
    FormatPercentage of {{ doc_label }}s{{ doc_label|capfirst }}s
    {{ pages }}{{ percentage|floatformat:2 }}%{% include "stats/includes/number_with_details_cell.html" %}
    \ No newline at end of file diff --git a/ietf/templates/stats/document_stats_formlang.html b/ietf/templates/stats/document_stats_formlang.html deleted file mode 100644 index 217d79e3ef..0000000000 --- a/ietf/templates/stats/document_stats_formlang.html +++ /dev/null @@ -1,63 +0,0 @@ -{% load origin %} -{% origin %} -
    - -

    Data

    - - - - - - - - - {% if table_data %} - - {% for formal_language, percentage, count, names in table_data %} - - - - - - {% endfor %} - - {% endif %} -
    Formal languagePercentage of {{ doc_label }}s{{ doc_label|capfirst }}s
    {{ formal_language }}{{ percentage|floatformat:2 }}%{% include "stats/includes/number_with_details_cell.html" %}
    \ No newline at end of file diff --git a/ietf/templates/stats/document_stats_pages.html b/ietf/templates/stats/document_stats_pages.html deleted file mode 100644 index 73231b0e90..0000000000 --- a/ietf/templates/stats/document_stats_pages.html +++ /dev/null @@ -1,62 +0,0 @@ -{% load origin %} -{% origin %} -
    - -

    Data

    - - - - - - - - - {% if table_data %} - - {% for pages, percentage, count, names in table_data %} - - - - - - {% endfor %} - - {% endif %} -
    PagesPercentage of {{ doc_label }}s{{ doc_label|capfirst }}s
    {{ pages }}{{ percentage|floatformat:2 }}%{% include "stats/includes/number_with_details_cell.html" %}
    diff --git a/ietf/templates/stats/document_stats_words.html b/ietf/templates/stats/document_stats_words.html deleted file mode 100644 index 4e8c15e937..0000000000 --- a/ietf/templates/stats/document_stats_words.html +++ /dev/null @@ -1,62 +0,0 @@ -{% load origin %} -{% origin %} -
    - -

    Data

    - - - - - - - - - {% if table_data %} - - {% for pages, percentage, count, names in table_data %} - - - - - - {% endfor %} - - {% endif %} -
    WordsPercentage of {{ doc_label }}s{{ doc_label|capfirst }}s
    {{ pages }}{{ percentage|floatformat:2 }}%{% include "stats/includes/number_with_details_cell.html" %}
    diff --git a/ietf/templates/stats/document_stats_yearly.html b/ietf/templates/stats/document_stats_yearly.html deleted file mode 100644 index b819255ced..0000000000 --- a/ietf/templates/stats/document_stats_yearly.html +++ /dev/null @@ -1,52 +0,0 @@ -{% load origin %} -{% origin %} -
    - \ No newline at end of file diff --git a/ietf/templates/stats/includes/number_with_details_cell.html b/ietf/templates/stats/includes/number_with_details_cell.html deleted file mode 100644 index a5e88113ca..0000000000 --- a/ietf/templates/stats/includes/number_with_details_cell.html +++ /dev/null @@ -1,15 +0,0 @@ -{% load person_filters %} -{% if content_limit and count <= content_limit %} - {% for n in names %} - {% with n|person_by_name as person %} - {% if person %} - {% person_link person %} - {% else %} - {{ n }} - {% endif %} -
    - {% endwith %} - {% endfor %} -{% else %} - {{ count }} -{% endif %} \ No newline at end of file diff --git a/ietf/templates/stats/index.html b/ietf/templates/stats/index.html index 1c5026013c..38c8069507 100644 --- a/ietf/templates/stats/index.html +++ b/ietf/templates/stats/index.html @@ -11,14 +11,17 @@

    +

    + Statistics on authorship are not currently available. +

    {% endblock %} \ No newline at end of file diff --git a/ietf/templates/stats/meeting_stats.html b/ietf/templates/stats/meeting_stats.html index 606caffde0..fc41949a2e 100644 --- a/ietf/templates/stats/meeting_stats.html +++ b/ietf/templates/stats/meeting_stats.html @@ -1,35 +1,58 @@ {% extends "base.html" %} {% load origin %} +{% origin %} {% load ietf_filters static django_bootstrap5 %} -{% block title %}{{ stats_title }}{% endblock %} -{% block pagehead %} - - +{% block js %} + {{ total_chart_data|json_script:"total-chart-data" }} + {{ in_person_chart_data|json_script:"in-person-chart-data" }} + {% endblock %} {% block content %} {% origin %} -

    Meeting Statistics

    - {% if meeting %} -

    - « Back to overview -

    - {% endif %} +

    + {% block title %} + Statistics for IETF-{{ meeting_number }} ({{ meeting_date }}, {{ meeting_city }}, {{ meeting_country }}) Registrations + {% endblock %} +

    - -
    + +
    {% for slug, label, url in possible_stats_types %} {{ label }} + {% if slug == stats_type %} + active + {% endif %}" + href="{{ url }}">{{ label }} + {% endfor %} +
    + +
    + {% for num, url in possible_meeting_numbers %} + {{ num }} {% endfor %}
    -
    {% include content_template %}
    -{% endblock %} -{% block js %} - - - +

    + This page provides a visual representation of the total registrations for IETF-{{ meeting_number }} by {{ stats_type }}. + Only categories having more than {{ minimum_required }} registrations are displayed separately, + else they are grouped under "Other". +

    +
    +
    +

    Total Registrations by {{ stats_type|title }} ({{ total_total}} in total)

    +
    + +
    +
    +
    +

    In Person Registrations by {{ stats_type|title }} ({{ in_person_total}} in total)

    +
    + +
    +
    +
    {% endblock %} \ No newline at end of file diff --git a/ietf/templates/stats/meeting_stats_continent.html b/ietf/templates/stats/meeting_stats_continent.html deleted file mode 100644 index 42ca03a409..0000000000 --- a/ietf/templates/stats/meeting_stats_continent.html +++ /dev/null @@ -1,61 +0,0 @@ -{% load origin %} -{% origin %} -
    - -

    Data

    - - - - - - - - - - {% for continent, percentage, count, names in table_data %} - - - - - - {% endfor %} - -
    ContinentPercentage of attendeesAttendees
    {{ continent|default:"(unknown)" }}{{ percentage|floatformat:2 }}%{% include "stats/includes/number_with_details_cell.html" %}
    diff --git a/ietf/templates/stats/meeting_stats_country.html b/ietf/templates/stats/meeting_stats_country.html deleted file mode 100644 index cebbad3c9f..0000000000 --- a/ietf/templates/stats/meeting_stats_country.html +++ /dev/null @@ -1,97 +0,0 @@ -{% load origin %} -{% origin %} -
    - -
    - -

    Data

    - - - - - - - - - - {% for country, percentage, count, names in table_data %} - - - - - - {% endfor %} - -
    CountryPercentage of attendeesAttendees
    {{ country|default:"(unknown)" }}{{ percentage|floatformat:2 }}%{% include "stats/includes/number_with_details_cell.html" %}
    -

    - EU (European Union) is not a country, but has been added for reference, as the sum of - all current EU member countries: - {% for c in eu_countries %} - {{ c.name }}{% if not forloop.last %},{% endif %} - {% endfor %} - . -

    \ No newline at end of file diff --git a/ietf/templates/stats/meeting_stats_overview.html b/ietf/templates/stats/meeting_stats_overview.html deleted file mode 100644 index 1136e458b8..0000000000 --- a/ietf/templates/stats/meeting_stats_overview.html +++ /dev/null @@ -1,160 +0,0 @@ -{% load origin %} -{% origin %} -
    - -{% if table_data %} -

    Data

    - - - - - - - - - - - - - {% for meeting, url, count, country in table_data %} - - {% if meeting.get_number > 71 %} - - - - - - - {% else %} - - - - - - - {% endif %} - - {% endfor %} - -
    MeetingDateCityCountryContinentAttendees
    - {{ meeting.number }} - {{ meeting.date }} - {{ meeting.city }} - {{ country.name }}{{ country.continent }}{% include "stats/includes/number_with_details_cell.html" %}{{ meeting.number }}{{ meeting.date }}{{ meeting.city }}{{ country.name }}{{ country.continent }}{% include "stats/includes/number_with_details_cell.html" %}
    -{% endif %} diff --git a/ietf/templates/stats/meetings_timeline.html b/ietf/templates/stats/meetings_timeline.html new file mode 100644 index 0000000000..40f46880cc --- /dev/null +++ b/ietf/templates/stats/meetings_timeline.html @@ -0,0 +1,74 @@ +{% extends "base.html" %} +{% load origin %} +{% origin %} +{% load ietf_filters static django_bootstrap5 %} +{% block js %} + {{ total_chart_data|json_script:"total-chart-data" }} + {{ in_person_chart_data|json_script:"in-person-chart-data" }} + {{ stats_type|json_script:"stats-type-data" }} + +{% endblock %} +{% block content %} + {% origin %} +

    + {% block title %} + Statistics for IETF Meeting Registrations + {% endblock %} +

    + +
    + +
    + {% for slug, label, url in possible_stats_types %} + {{ label }} + {% endfor %} +
    + +
    + {% for num, url in possible_meeting_numbers %} + {{ num }} + {% endfor %} +
    +
    +

    + {% if stats_type == 'total' %} + This page provides a timeline of meeting registrations. + {% else %} + This page provides a timeline of meeting registrations by {{ stats_type }} with a limit of {{ top_n }} categories. + {% endif %} + Panning can be done via the mouse or with a finger. Zooming is done via the mouse wheel or via a pinch gesture. Press ESC + or click to reset panning/zooming. +

    +
    +
    + {% if stats_type == 'total' %} +

    Total Registrations

    + {% else %} +

    Total Registrations by {{ stats_type|title }}

    + {% endif %} +
    + +
    +
    + {% if stats_type != 'total' %} +
    + {% if stats_type == 'total' %} +

    Total In Person Registrations

    + {% else %} +

    In Person Registrations by {{ stats_type|title }}

    + {% endif %} +
    + +
    +
    + {% endif %} +
    +{% endblock %} \ No newline at end of file diff --git a/ietf/templates/status/latest.html b/ietf/templates/status/latest.html new file mode 100644 index 0000000000..3ad54c9dc3 --- /dev/null +++ b/ietf/templates/status/latest.html @@ -0,0 +1,18 @@ +{% load origin %} +{% load ietf_filters static %} +{% origin %} + + + +{% if title %} +

    {{ title }}

    +

    {{ body }} read more
    {{ date }}

    +{% else %} +

    No site status message.

    +{% endif %} diff --git a/ietf/templates/status/status.html b/ietf/templates/status/status.html new file mode 100644 index 0000000000..18f64c49a8 --- /dev/null +++ b/ietf/templates/status/status.html @@ -0,0 +1,15 @@ +{% extends "base.html" %} +{% load origin %} +{% load ietf_filters static %} +{% block content %} + {% origin %} +

    + {% block title %} {{ status.title }} {% endblock %} + {% if status.active == False %} + inactive + {% endif %} +

    +
    + {{ status_page_html }} +
    +{% endblock %} \ No newline at end of file diff --git a/ietf/templates/submit/add_submit_email.html b/ietf/templates/submit/add_submit_email.html deleted file mode 100644 index 9ca07ec7fe..0000000000 --- a/ietf/templates/submit/add_submit_email.html +++ /dev/null @@ -1,32 +0,0 @@ -{% extends "base.html" %} -{# Copyright The IETF Trust 2015, All Rights Reserved #} -{% load origin %} -{% load django_bootstrap5 %} -{% block title %} - {% if submission == None %} - Add new submission request email - {% else %} - Add submission request email to {{ submission.name }} - {% endif %} -{% endblock %} -{% block content %} - {% origin %} -

    Add email

    -

    - {% if submission == None %} - A new submission request will be created for the given name and revision. The - name must take the form draft-xxx-nn where xxx is lowercase letters, digits or dashes - and nn is the revision number, 00 for the initial revision. For example, - draft-my-spec-00. - {% else %} - The email will be added to the submission history for {{ submission.name }}. - {% endif %} -

    -
    - {% csrf_token %} - {% bootstrap_form form %} - - Back -
    -{% endblock %} \ No newline at end of file diff --git a/ietf/templates/submit/announce_to_lists.txt b/ietf/templates/submit/announce_to_lists.txt index d7a3739af6..cf1c8b325b 100644 --- a/ietf/templates/submit/announce_to_lists.txt +++ b/ietf/templates/submit/announce_to_lists.txt @@ -1,7 +1,7 @@ {% autoescape off %}{% filter wordwrap:78 %}Internet-Draft {{ submission.name }}-{{ submission.rev }}.txt is now available.{% if submission.group %} It is a work item of the {{ submission.group.name }} ({{ submission.group.acronym|upper }}){% if submission.group.type.name %} {{ submission.group.type.name }}{% endif %} of the {% if submission.group.type_id == "rg" %}IRTF{% else %}IETF{% endif %}.{% endif %}{% endfilter %} Title: {{ submission.title }} - Author{{ submission.authors|pluralize:",s" }}: {% if submission.authors|length == 1 %} {% endif %}{% for author in submission.authors %}{{ author.name }}{% if not forloop.last %} + Author{{ submission.authors|pluralize:",s" }}: {% if submission.authors|length == 1 %} {% endif %}{% for author in submission.authors %}{% firstof author.name author.affiliation "Unknown" %}{% if not forloop.last %} {% endif %}{% endfor %} Name: {{ submission.name }}-{{ submission.rev }}.txt Pages: {{ submission.pages }} diff --git a/ietf/templates/submit/api_submit_info.html b/ietf/templates/submit/api_submit_info.html index cd0d52410b..75fc1abfc2 100644 --- a/ietf/templates/submit/api_submit_info.html +++ b/ietf/templates/submit/api_submit_info.html @@ -1,56 +1,13 @@ {% extends "base.html" %} -{# Copyright The IETF Trust 2015-2022, All Rights Reserved #} +{# Copyright The IETF Trust 2015-2024, All Rights Reserved #} {% load origin ietf_filters %} -{% block title %}I-D submission API instructions{% endblock %} +{% block title %}Obsolete I-D submission API notice{% endblock %} {% block content %} {% origin %} -

    Internet-Draft submission API instructions

    +

    Obsolete Internet-Draft submission API notice

    - Note: API endpoint described here is known to have a slow response time or to fail - due to timeout for some Internet-Draft submissions, particularly those with large file sizes. - It is recommended to use the new API endpoint - instead for increased reliability. + The API endpoint previously available here is obsolete and is no longer supported. + Please use the new API endpoint + instead.

    -

    - A simplified Internet-Draft submission interface, intended for automation, - is available at {% absurl 'ietf.submit.views.api_submit' %}. -

    -

    - The interface accepts only XML uploads that can be processed on the server, and - requires the user to have a datatracker account. A successful submit still requires - the same email confirmation round-trip as submissions done through the regular - submission tool. -

    -

    - This interface does not provide all the options which the regular submission tool does. - Some limitations: -

    -
      -
    • Only XML-only uploads are supported, not text or combined.
    • -
    • Document replacement information cannot be supplied.
    • -
    • - The server expects multipart/form-data, supported by curl but not by wget. -
    • -
    -

    - It takes two parameters: -

    -
      -
    • - user which is the user login -
    • -
    • - xml, which is the submitted file -
    • -
    -

    - It returns an appropriate http result code, and a brief explanatory text message. -

    -

    - Here is an example: -

    -
    -$ curl -S -F "user=user.name@example.com" -F "xml=@~/draft-user-example.xml" {% absurl 'ietf.submit.views.api_submit' %}
    -Upload of draft-user-example OK, confirmation requests sent to:
    -User Name <user.name@example.com>
    -{% endblock %} \ No newline at end of file +{% endblock %} diff --git a/ietf/templates/submit/approval_request.txt b/ietf/templates/submit/approval_request.txt index 9186e556fc..7b9609a129 100644 --- a/ietf/templates/submit/approval_request.txt +++ b/ietf/templates/submit/approval_request.txt @@ -24,7 +24,7 @@ To approve the Internet-Draft, go to this URL (note: you need to login to be abl Authors: -{% for author in submission.authors %} {{ author.name }}{% if author.email %} <{{ author.email }}>{% endif%} +{% for author in submission.authors %} {% if author.name or author.affiliation %}{% firstof author.name author.affiliation %} {% endif %}{% if author.email %}<{{ author.email }}>{% endif %} {% endfor %} {% endautoescape %} diff --git a/ietf/templates/submit/email.html b/ietf/templates/submit/email.html deleted file mode 100644 index dde4fa50c1..0000000000 --- a/ietf/templates/submit/email.html +++ /dev/null @@ -1,25 +0,0 @@ -{% extends "base.html" %} -{# Copyright The IETF Trust 2015, All Rights Reserved #} -{% load origin %} -{% load static %} -{% load django_bootstrap5 %} -{% block title %}Email related to{{ submission.name }}{% endblock %} -{% block pagehead %} - -{% endblock %} -{% block content %} - {% origin %} -

    - Email related to -
    - {{ submission.name }} -

    -
    - {% csrf_token %} - {% bootstrap_form form %} - -
    -{% endblock %} -{% block js %} - -{% endblock %} \ No newline at end of file diff --git a/ietf/templates/submit/manual_post.html b/ietf/templates/submit/manual_post.html index 1f74af4a61..0da83e750f 100644 --- a/ietf/templates/submit/manual_post.html +++ b/ietf/templates/submit/manual_post.html @@ -1,5 +1,5 @@ {% extends "submit/submit_base.html" %} -{# Copyright The IETF Trust 2015, All Rights Reserved #} +{# Copyright The IETF Trust 2015-2026, All Rights Reserved #} {% load origin static %} {% block pagehead %} @@ -27,17 +27,9 @@

    Submissions needing manual posting

    - - {{ s.name }}-{{ s.rev }} - - - {{ s.name }}-{{ s.rev }} - + {{ s.name }}-{{ s.rev }} + {{ s.submission_date }} {% if s.passes_checks %} @@ -58,76 +50,6 @@

    Submissions needing manual posting

    {% endif %} -

    Submissions awaiting Internet-Draft upload

    - {% if not waiting_for_draft %} -

    - There are no submissions awaiting Internet-Draft upload. -

    - {% else %} - - - - - - - - - - - {% for s in waiting_for_draft %} - - {% if user.is_authenticated %} - - {% else %} - - {% endif %} - - - - - {% endfor %} - -
    NameRevSubmitted
    - - {{ s.name }} - - - {{ s.name }} - {{ s.rev }}{{ s.submission_date }} - {% if user|has_role:"Secretariat" %} -
    - {% csrf_token %} - - - -
    - {% endif %} - {% if user|has_role:"Secretariat" %} - - Add email - - {% endif %} -
    - {% endif %} - {% if user|has_role:"Secretariat" %} -
    - New submission from email - - {% endif %} {% endblock %} {% block js %} diff --git a/ietf/templates/submit/manual_post_request.txt b/ietf/templates/submit/manual_post_request.txt index 7b515fbeb0..7fe19e9a23 100644 --- a/ietf/templates/submit/manual_post_request.txt +++ b/ietf/templates/submit/manual_post_request.txt @@ -33,7 +33,7 @@ I-D Submission Tool URL: Authors: -{% for author in submission.authors %} {{ author.name }}{% if author.email %} <{{ author.email }}>{% endif%} +{% for author in submission.authors %} {% if author.name or author.affiliation %}{% firstof author.name author.affiliation %} {% endif %}{% if author.email %}<{{ author.email }}>{% endif %} {% endfor %} Comment to the secretariat: diff --git a/ietf/templates/submit/search_submission.html b/ietf/templates/submit/search_submission.html index 3f827abbea..807d6d7d86 100644 --- a/ietf/templates/submit/search_submission.html +++ b/ietf/templates/submit/search_submission.html @@ -12,14 +12,8 @@
    {% csrf_token %}
    - - + {% bootstrap_form form %}
    - {% if error %} -

    - {{ error }} -

    - {% endif %}
    {% endblock %} \ No newline at end of file diff --git a/ietf/templates/submit/submission_email.html b/ietf/templates/submit/submission_email.html deleted file mode 100644 index 801ace3b9b..0000000000 --- a/ietf/templates/submit/submission_email.html +++ /dev/null @@ -1,61 +0,0 @@ -{% extends "submit/submit_base.html" %} -{# Copyright The IETF Trust 2015, All Rights Reserved #} -{% load origin static textfilters ietf_filters %} -{% block title %}Submission email{% endblock %} -{% load ietf_filters %} -{% block submit_content %} - {% origin %} -

    Email for {{ submission.name }}

    -
    -
    - Uploaded -
    -
    - {{ message.time }} -
    -
    - Date -
    -
    - {{ message.message.time }} -
    -
    - From -
    -
    - {{ message.message.frm|linkify }} -
    -
    - Subject -
    -
    - {{ message.message.subject }} -
    -
    - Message -
    -
    -
    {{ message.message.body|urlize_ietf_docs|linkify|linebreaksbr }}
    -
    -
    - Attachment -
    -
    - {% for a in attachments %} - - {{ a.filename }} - -
    - {% endfor %} -
    -
    - {% if user|has_role:"Secretariat" %} - - Reply - - {% endif %} -{% endblock %} \ No newline at end of file diff --git a/ietf/templates/submit/submission_status.html b/ietf/templates/submit/submission_status.html index 6f18d70946..cdc5dd4007 100644 --- a/ietf/templates/submit/submission_status.html +++ b/ietf/templates/submit/submission_status.html @@ -285,7 +285,7 @@

    Meta-data from the submission

    Author {{ forloop.counter }} - {{ author.name }} + {% if author.name %}{{ author.name }}{% endif %} {% if author.email %}<{{ author.email|linkify }}>{% endif %}
    {% if author.affiliation %} @@ -520,14 +520,6 @@

    {% endif %} - {% if user|has_role:"Secretariat" %} - - Send Email - - {% endif %} {% if show_send_full_url %}

    @@ -584,39 +576,10 @@

    {% endif %} - {% if e.desc|startswith:"Received message" or e.desc|startswith:"Sent message" %} - {% with m=e.submissionemailevent.message %} - {% if user.is_authenticated %} - - {% if e.desc|startswith:"Received message" and user|has_role:"Secretariat" %} - - Reply - - {% endif %} - Email: - - {{ e.desc }} - - - {% else %} - - Email: - - {{ e.desc }} - - - {% endif %} - {% endwith %} - {% else %} - - {{ e.desc|urlize_ietf_docs|linkify }} - - {% endif %} + + + {{ e.desc|urlize_ietf_docs|linkify }} + {% endfor %} diff --git a/ietf/templates/submit/submit_base.html b/ietf/templates/submit/submit_base.html index 0613cf5d6a..a2c7be1a3b 100644 --- a/ietf/templates/submit/submit_base.html +++ b/ietf/templates/submit/submit_base.html @@ -4,7 +4,7 @@ {% block pagehead %}{{ block.super }}{% endblock %} {% block content %} {% origin %} -

    Internet-Draft submission

    +

    Submit an Internet-Draft

    {% load ietf_filters %} {% for author in submission.authors %} - + {% if author.name %} + + {% endif %} {% endfor %} {% bootstrap_form_errors submitter_form %} {% bootstrap_field submitter_form.name %} diff --git a/ietf/templates/submit/upload_submission.html b/ietf/templates/submit/upload_submission.html index 7313d8f000..b8b1aca29c 100644 --- a/ietf/templates/submit/upload_submission.html +++ b/ietf/templates/submit/upload_submission.html @@ -73,6 +73,11 @@ $(document).ready(function() { if ($("#checkbox").is(':checked')) $("#other-formats").collapse('show') + + $("form").one('submit', function() { + $("button").attr('disabled', 'disabled'); + return true; + }) }); {% endblock %} diff --git a/ietf/templates/sync/bcp-index.txt b/ietf/templates/sync/bcp-index.txt new file mode 100644 index 0000000000..dd19920eba --- /dev/null +++ b/ietf/templates/sync/bcp-index.txt @@ -0,0 +1,52 @@ + + +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + BCP INDEX + ------------- + +(CREATED ON: {{created_on}}.) + +This file contains citations for all BCPs in numeric order. The BCPs +form a sub-series of the RFC document series, specifically those RFCs +with the status BEST CURRENT PRACTICE. + +BCP citations appear in this format: + + [BCP#] Best Current Practice #, + . + At the time of writing, this BCP comprises the following: + + Author 1, Author 2, "Title of the RFC", BCP #, RFC №, + DOI DOI string, Issue date, + . + +For example: + + [BCP3] Best Current Practice 3, + . + At the time of writing, this BCP comprises the following: + + F. Kastenholz, "Variance for The PPP Compression Control Protocol + and The PPP Encryption Control Protocol", BCP 3, RFC 1915, + DOI 10.17487/RFC1915, February 1996, + . + +Key to fields: + +# is the BCP number. + +№ is the RFC number. + +BCPs and other RFCs may be obtained from https://www.rfc-editor.org. + +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + BCP INDEX + --------- + + + +{% for bcp in bcps %}{{bcp|safe}} + +{% endfor %} diff --git a/ietf/templates/sync/fyi-index.txt b/ietf/templates/sync/fyi-index.txt new file mode 100644 index 0000000000..cf9d57d570 --- /dev/null +++ b/ietf/templates/sync/fyi-index.txt @@ -0,0 +1,52 @@ + + +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + FYI INDEX + ------------- + +(CREATED ON: {{created_on}}.) + +This file contains citations for all FYIs in numeric order. The FYIs +(For Your Information) documents form a sub-series of the RFC series, +specifically those documents that may be of particular interest +to Internet users. The corresponding RFCs have status INFORMATIONAL. + +FYI citations appear in this format: + + [FYI#] For Your Information #, + . + At the time of writing, this FYI comprises the following: + + Author 1, Author 2, "Title of the RFC", FYI #, RFC №, + DOI DOI string, Issue date, + . + +For example: + + [FYI8] For Your Information 8, + . + At the time of writing, this FYI comprises the following: + + B. Fraser, "Site Security Handbook", FYI 8, RFC 2196, + DOI 10.17487/RFC2196, September 1997, + . + +Key to fields: + +# is the FYI number. + +№ is the RFC number. + +FYIs and other RFCs may be obtained from https://www.rfc-editor.org. + +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + FYI INDEX + --------- + + + +{% for fyi in fyis %}{{fyi|safe}} + +{% endfor %} diff --git a/ietf/templates/sync/rfc-index.txt b/ietf/templates/sync/rfc-index.txt new file mode 100644 index 0000000000..0f01ddfa90 --- /dev/null +++ b/ietf/templates/sync/rfc-index.txt @@ -0,0 +1,69 @@ + + +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + RFC INDEX + ------------- + +(CREATED ON: {{created_on}}.) + +This file contains citations for all RFCs in numeric order. + +RFC citations appear in this format: + + #### Title of RFC. Author 1, Author 2, Author 3. Issue date. + (Format: ASCII) (Obsoletes xxx) (Obsoleted by xxx) (Updates xxx) + (Updated by xxx) (Also FYI ####) (Status: ssssss) (DOI: ddd) + +or + + #### Not Issued. + +For example: + + 1129 Internet Time Synchronization: The Network Time Protocol. D.L. + Mills. October 1989. (Format: TXT, PS, PDF, HTML) (Also RFC1119) + (Status: INFORMATIONAL) (DOI: 10.17487/RFC1129) + +Key to citations: + +#### is the RFC number. + +Following the RFC number are the title, the author(s), and the +publication date of the RFC. Each of these is terminated by a period. + +Following the number are the title (terminated with a period), the +author, or list of authors (terminated with a period), and the date +(terminated with a period). + +The format follows in parentheses. One or more of the following formats +are listed: text (TXT), PostScript (PS), Portable Document Format +(PDF), HTML, XML. + +Obsoletes xxxx refers to other RFCs that this one replaces; +Obsoleted by xxxx refers to RFCs that have replaced this one. +Updates xxxx refers to other RFCs that this one merely updates (but +does not replace); Updated by xxxx refers to RFCs that have updated +(but not replaced) this one. Generally, only immediately succeeding +and/or preceding RFCs are indicated, not the entire history of each +related earlier or later RFC in a related series. + +The (Also FYI ##) or (Also STD ##) or (Also BCP ##) phrase gives the +equivalent FYI, STD, or BCP number if the RFC is also in those +document sub-series. The Status field gives the document's +current status (see RFC 2026). The (DOI ddd) field gives the +Digital Object Identifier. + +RFCs may be obtained in a number of ways, using HTTP, FTP, or email. +See the RFC Editor Web page http://www.rfc-editor.org + +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + RFC INDEX + --------- + + + +{% for rfc in rfcs %}{{rfc|safe}} + +{% endfor %} diff --git a/ietf/templates/sync/std-index.txt b/ietf/templates/sync/std-index.txt new file mode 100644 index 0000000000..a4a5fba946 --- /dev/null +++ b/ietf/templates/sync/std-index.txt @@ -0,0 +1,51 @@ + + +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + STD INDEX + ------------- + +(CREATED ON: {{created_on}}.) + +This file contains citations for all STDs in numeric order. Each +STD represents a single Internet Standard technical specification, +composed of one or more RFCs with Internet Standard status. + +STD citations appear in this format: + + [STD#] Internet Standard #, + . + At the time of writing, this STD comprises the following: + + Author 1, Author 2, "Title of the RFC", STD #, RFC №, + DOI DOI string, Issue date, + . + +For example: + + [STD6] Internet Standard 6, + . + At the time of writing, this STD comprises the following: + + J. Postel, "User Datagram Protocol", STD 6, RFC 768, + DOI 10.17487/RFC0768, August 1980, + . + +Key to fields: + +# is the STD number. + +№ is the RFC number. + +STDs and other RFCs may be obtained from https://www.rfc-editor.org. + +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + STD INDEX + --------- + + + +{% for std in stds %}{{std|safe}} + +{% endfor %} diff --git a/ietf/urls.py b/ietf/urls.py index 58179fbd0a..e822b2042e 100644 --- a/ietf/urls.py +++ b/ietf/urls.py @@ -5,6 +5,7 @@ from django.contrib import admin from django.contrib.sitemaps import views as sitemap_views from django.contrib.staticfiles.urls import staticfiles_urlpatterns +from django.http import HttpResponse from django.urls import include, path from django.views import static as static_view from django.views.generic import TemplateView @@ -19,8 +20,6 @@ from ietf.utils.urls import url -admin.autodiscover() - # sometimes, this code gets called more than once, which is an # that seems impossible to work around. try: @@ -35,6 +34,7 @@ urlpatterns = [ url(r'^$', views_search.frontpage), + url(r'^health/', lambda _: HttpResponse()), url(r'^accounts/', include('ietf.ietfauth.urls')), url(r'^admin/', admin.site.urls), url(r'^admin/docs/', include('django.contrib.admindocs.urls')), @@ -61,11 +61,12 @@ url(r'^sitemap-(?P
    .+).xml$', sitemap_views.sitemap, {'sitemaps': sitemaps}), url(r'^sitemap.xml$', sitemap_views.index, { 'sitemaps': sitemaps}), url(r'^stats/', include('ietf.stats.urls')), + url(r'^status/', include('ietf.status.urls')), url(r'^stream/', include(stream_urls)), url(r'^submit/', include('ietf.submit.urls')), url(r'^sync/', include('ietf.sync.urls')), url(r'^templates/', include('ietf.dbtemplate.urls')), - url(r'^(?P(wg|rg|ag|rag|team|dir|review|area|program|iabasg|adhoc|ise|adm|rfcedtyp|edwg|edappr))/', include(grouptype_urls)), + url(r'^(?P(wg|rg|ag|rag|team|dir|review|area|program|iabasg|iabworkshop|adhoc|ise|adm|rfcedtyp|edwg|edappr))/', include(grouptype_urls)), # Redirects url(r'^(?Ppublic)/', include('ietf.redirects.urls')), diff --git a/ietf/utils/__init__.py b/ietf/utils/__init__.py index 7f1df97602..fbe55eb043 100644 --- a/ietf/utils/__init__.py +++ b/ietf/utils/__init__.py @@ -1 +1,29 @@ -# Copyright The IETF Trust 2007, All Rights Reserved +# Copyright The IETF Trust 2007-2024, All Rights Reserved +import subprocess + + +class _ToolVersionManager: + _known = [ + "pyang", + "xml2rfc", + "xym", + "yanglint", + ] + _versions: dict[str, str] = dict() + + def __getitem__(self, item): + if item not in self._known: + return "Unknown" + elif item not in self._versions: + try: + self._versions[item] = subprocess.run( + [item, "--version"], + capture_output=True, + check=True, + ).stdout.decode().strip() + except subprocess.CalledProcessError: + return "Unknown" + return self._versions[item] + + +tool_version = _ToolVersionManager() diff --git a/ietf/utils/admin.py b/ietf/utils/admin.py index fa1ebb7081..cb8841cdc6 100644 --- a/ietf/utils/admin.py +++ b/ietf/utils/admin.py @@ -1,65 +1,30 @@ -# Copyright The IETF Trust 2011-2020, All Rights Reserved -# -*- coding: utf-8 -*- +# Copyright The IETF Trust 2011-2026, All Rights Reserved from django.contrib import admin -from django.utils.encoding import force_str +from .models import DumpInfo, DirtyBits -from ietf.utils.models import VersionInfo -def name(obj): - if hasattr(obj, 'abbrev'): - return obj.abbrev() - elif hasattr(obj, 'name'): - if callable(obj.name): - name = obj.name() - else: - name = force_str(obj.name) - if name: - return name - return str(obj) - -def admin_link(field, label=None, ordering="", display=name, suffix=""): - if not label: - label = field.capitalize().replace("_", " ").strip() - if ordering == "": - ordering = field - def _link(self): - obj = self - for attr in field.split("__"): - obj = getattr(obj, attr) - if callable(obj): - obj = obj() - if hasattr(obj, "all"): - objects = obj.all() - elif callable(obj): - objects = obj() - if not hasattr(objects, "__iter__"): - objects = [ objects ] - elif hasattr(obj, "__iter__"): - objects = obj - else: - objects = [ obj ] - chunks = [] - for obj in objects: - app = obj._meta.app_label - model = obj.__class__.__name__.lower() - id = obj.pk - chunks += [ '%(display)s' % - {'app':app, "model": model, "id":id, "display": display(obj), "suffix":suffix, } ] - return ", ".join(chunks) - _link.allow_tags = True - _link.short_description = label - _link.admin_order_field = ordering - return _link +class SaferStackedInline(admin.StackedInline): + """StackedInline without delete by default""" -from .models import DumpInfo + can_delete = False # no delete button + show_change_link = True # show a link to the resource (where it can be deleted) + + +class SaferTabularInline(admin.TabularInline): + """TabularInline without delete by default""" + + can_delete = False # no delete button + show_change_link = True # show a link to the resource (where it can be deleted) + + +@admin.register(DumpInfo) class DumpInfoAdmin(admin.ModelAdmin): - list_display = ['date', 'host', 'tz'] - list_filter = ['date'] -admin.site.register(DumpInfo, DumpInfoAdmin) + list_display = ["date", "host", "tz"] + list_filter = ["date"] -class VersionInfoAdmin(admin.ModelAdmin): - list_display = ['command', 'switch', 'version', 'time', ] -admin.site.register(VersionInfo, VersionInfoAdmin) +@admin.register(DirtyBits) +class DirtyBitsAdmin(admin.ModelAdmin): + list_display = ["slug", "dirty_time", "processed_time"] diff --git a/ietf/utils/aiosmtpd.py b/ietf/utils/aiosmtpd.py new file mode 100644 index 0000000000..3e4cd65dd9 --- /dev/null +++ b/ietf/utils/aiosmtpd.py @@ -0,0 +1,73 @@ +# Copyright The IETF Trust 2014-2025, All Rights Reserved +"""aiosmtpd-related utilities + +These are for testing / dev use. If you're using this for production code, think very +hard about the choices you're making... +""" +from aiosmtpd import handlers +from aiosmtpd.controller import Controller +from aiosmtpd.smtp import SMTP +from email.utils import parseaddr +from typing import Optional, TextIO + + +class SMTPTestHandler: + + def __init__(self, inbox: list): + self.inbox = inbox + + async def handle_DATA(self, server, session, envelope): + """Handle the DATA command and 'deliver' the message""" + + self.inbox.append(envelope.content) + # Per RFC2033: https://datatracker.ietf.org/doc/html/rfc2033.html#section-4.2 + # ...after the final ".", the server returns one reply + # for each previously successful RCPT command in the mail transaction, + # in the order that the RCPT commands were issued. Even if there were + # multiple successful RCPT commands giving the same forward-path, there + # must be one reply for each successful RCPT command. + return "\n".join("250 OK" for _ in envelope.rcpt_tos) + + async def handle_RCPT(self, server, session, envelope, address, rcpt_options): + """Handle an RCPT command and add the address to the envelope if it is acceptable""" + _, address = parseaddr(address) + if address == "": + return "501 Syntax: RCPT TO:
    " + if "poison" in address: + return "550 Error: Not touching that" + # At this point the address is acceptable + envelope.rcpt_tos.append(address) + return "250 OK" + + +class SMTPTestServerDriver: + + def __init__(self, address: str, port: int, inbox: Optional[list] = None): + # Allow longer lines than the 1001 that RFC 5321 requires. As of 2025-04-16 the + # datatracker emits some non-compliant messages. + # See https://aiosmtpd.aio-libs.org/en/latest/smtp.html + SMTP.line_length_limit = 4000 # tests start failing between 3000 and 4000 + self.controller = Controller( + hostname=address, + port=port, + handler=SMTPTestHandler(inbox=[] if inbox is None else inbox), + ) + + def start(self): + self.controller.start() + + def stop(self): + self.controller.stop() + + +class DevDebuggingHandler(handlers.Debugging): + """Debugging handler for use in dev ONLY""" + def __init__(self, stream: Optional[TextIO] = None): + # Allow longer lines than the 1001 that RFC 5321 requires. As of 2025-04-16 the + # datatracker emits some non-compliant messages. + # See https://aiosmtpd.aio-libs.org/en/latest/smtp.html + # Doing this in a handler class is a huge hack. Tests all pass with this set + # to 4000, but make the limit longer for dev just in case. + SMTP.line_length_limit = 10000 + super().__init__(stream) + diff --git a/ietf/utils/aliases.py b/ietf/utils/aliases.py deleted file mode 100644 index 9f9aebc0b9..0000000000 --- a/ietf/utils/aliases.py +++ /dev/null @@ -1,82 +0,0 @@ -#!/usr/bin/env python -# Copyright The IETF Trust 2013-2020, All Rights Reserved -# -*- coding: utf-8 -*- -# -*- Python -*- -# -# $Id: aliasutil.py $ -# -# Author: Markus Stenberg -# - - -""" - -Mailing list alias dumping utilities - -""" - - -from django.conf import settings -from ietf.utils.log import log - -import debug # pyflakes:ignore - -def rewrite_email_address(email): - """ Prettify the email address (and if it's empty, skip it by - returning None). """ - if not email: - return - email = email.strip() - if not email: - return - if email[0]=='<' and email[-1] == '>': - email = email[1:-1] - # If it doesn't look like email, skip - if '@' not in email and '?' not in email: - return - return email - -def rewrite_address_list(l): - """ This utility function makes sure there is exactly one instance - of an address within the result list, and preserves order - (although it may not be relevant to start with) """ - h = {} - for address in l: - #address = address.strip() - if address in h: continue - h[address] = True - yield address - -def dump_sublist(afile, vfile, alias, adomains, vdomain, emails): - if not emails: - return emails - # Nones in the list should be skipped - emails = [_f for _f in emails if _f] - - # Make sure emails are sane and eliminate the Nones again for - # non-sane ones - emails = [rewrite_email_address(e) for e in emails] - emails = [_f for _f in emails if _f] - - # And we'll eliminate the duplicates too but preserve order - emails = list(rewrite_address_list(emails)) - if not emails: - return emails - try: - filtername = 'xfilter-%s' % (alias, ) # in aliases, --> | expandname - expandname = 'expand-%s' % (alias, ) # in virtual, --> email list - - for domain in adomains: - aliasaddr = '%s@%s' % (alias, domain) # in virtual, --> filtername - vfile.write('%-64s %s\n' % (aliasaddr, filtername)) - afile.write('%-64s "|%s filter %s %s"\n' % (filtername+':', settings.POSTCONFIRM_PATH, expandname, vdomain)) - vfile.write('%-64s %s\n' % ("%s@%s"%(expandname, vdomain), ', '.join(emails))) - - except UnicodeEncodeError: - # If there's unicode in email address, something is badly - # wrong and we just silently punt - # XXX - is there better approach? - log('Error encoding email address for an %s alias: %s' % (alias, repr(emails))) - return [] - return emails - diff --git a/ietf/utils/coverage.py b/ietf/utils/coverage.py new file mode 100644 index 0000000000..bd205ce586 --- /dev/null +++ b/ietf/utils/coverage.py @@ -0,0 +1,90 @@ +# Copyright The IETF Trust 2025, All Rights Reserved +from coverage import Coverage, CoverageData, FileReporter +from coverage.control import override_config as override_coverage_config +from coverage.results import Numbers +from coverage.report_core import get_analysis_to_report +from coverage.results import Analysis +from django.conf import settings + + +class CoverageManager: + checker: Coverage | None = None + started = False + + def start(self): + if settings.SERVER_MODE != "production" and not self.started: + self.checker = Coverage( + source=[settings.BASE_DIR], + cover_pylib=False, + omit=settings.TEST_CODE_COVERAGE_EXCLUDE_FILES, + ) + for exclude_regex in getattr( + settings, + "TEST_CODE_COVERAGE_EXCLUDE_LINES", + [], + ): + self.checker.exclude(exclude_regex) + self.checker.start() + self.started = True + + def stop(self): + if self.checker is not None: + self.checker.stop() + + def save(self): + if self.checker is not None: + self.checker.save() + + def report(self, include: list[str] | None = None): + if self.checker is None: + return None + reporter = CustomDictReporter() + with override_coverage_config( + self.checker, + report_include=include, + ): + return reporter.report(self.checker) + + +class CustomDictReporter: # pragma: no cover + total = Numbers() + + def report(self, coverage): + coverage_data = coverage.get_data() + coverage_data.set_query_contexts(None) + measured_files = {} + for file_reporter, analysis in get_analysis_to_report(coverage, None): + measured_files[file_reporter.relative_filename()] = self.report_one_file( + coverage_data, + analysis, + file_reporter, + ) + tot_numer, tot_denom = self.total.ratio_covered + return { + "coverage": 1 if tot_denom == 0 else tot_numer / tot_denom, + "covered": measured_files, + "format": 5, + } + + def report_one_file( + self, + coverage_data: CoverageData, + analysis: Analysis, + file_reporter: FileReporter, + ): + """Extract the relevant report data for a single file.""" + nums = analysis.numbers + self.total += nums + n_statements = nums.n_statements + numer, denom = nums.ratio_covered + fraction_covered = 1 if denom == 0 else numer / denom + missing_line_nums = sorted(analysis.missing) + # Extract missing lines from source files + source_lines = file_reporter.source().splitlines() + missing_lines = [source_lines[num - 1] for num in missing_line_nums] + return ( + n_statements, + fraction_covered, + missing_line_nums, + missing_lines, + ) diff --git a/ietf/utils/db.py b/ietf/utils/db.py index d451f6cfd8..49c89da13a 100644 --- a/ietf/utils/db.py +++ b/ietf/utils/db.py @@ -1,28 +1,67 @@ -# Copyright The IETF Trust 2021, All Rights Reserved -# -*- coding: utf-8 -*- - -# Taken from/inspired by -# https://stackoverflow.com/questions/55147169/django-admin-jsonfield-default-empty-dict-wont-save-in-admin -# -# JSONField should recognize {}, (), and [] as valid, non-empty JSON -# values. However, the base Field class excludes them +# Copyright The IETF Trust 2021-2025, All Rights Reserved + import jsonfield +from django.db import models + +from ietf.utils.fields import ( + IETFJSONField as FormIETFJSONField, + EmptyAwareJSONField as FormEmptyAwareJSONField, +) + + +class EmptyAwareJSONField(models.JSONField): + """JSONField that allows empty JSON values when model specifies empty=False + + Taken from/inspired by + https://stackoverflow.com/questions/55147169/django-admin-jsonfield-default-empty-dict-wont-save-in-admin + + JSONField should recognize {}, (), and [] as valid, non-empty JSON values. -from ietf.utils.fields import IETFJSONField as FormIETFJSONField + If customizing the formfield, the field must accept the `empty_values` argument. + """ + + def __init__( + self, + *args, + empty_values=FormEmptyAwareJSONField.empty_values, + accepted_empty_values=None, + **kwargs, + ): + if accepted_empty_values is None: + accepted_empty_values = [] + self.empty_values = [x for x in empty_values if x not in accepted_empty_values] + super().__init__(*args, **kwargs) + + def formfield(self, **kwargs): + defaults = { + "form_class": FormEmptyAwareJSONField, + "empty_values": self.empty_values, + } + defaults.update(kwargs) + return super().formfield(**defaults) -class IETFJSONField(jsonfield.JSONField): +class IETFJSONField(jsonfield.JSONField): # pragma: no cover + # Deprecated - use EmptyAwareJSONField instead (different base class requires a + # new field name) + # Remove this class when migrations are squashed and it is no longer referenced form_class = FormIETFJSONField - def __init__(self, *args, empty_values=FormIETFJSONField.empty_values, accepted_empty_values=None, **kwargs): + def __init__( + self, + *args, + empty_values=FormIETFJSONField.empty_values, + accepted_empty_values=None, + **kwargs, + ): if accepted_empty_values is None: accepted_empty_values = [] - self.empty_values = [x - for x in empty_values - if x not in accepted_empty_values] + self.empty_values = [x for x in empty_values if x not in accepted_empty_values] super().__init__(*args, **kwargs) def formfield(self, **kwargs): - if 'form_class' not in kwargs or issubclass(kwargs['form_class'], FormIETFJSONField): - kwargs.setdefault('empty_values', self.empty_values) + if "form_class" not in kwargs or issubclass( + kwargs["form_class"], FormIETFJSONField + ): + kwargs.setdefault("empty_values", self.empty_values) return super().formfield(**{**kwargs}) diff --git a/ietf/utils/decorators.py b/ietf/utils/decorators.py index 254854a6cd..b50e0e7f96 100644 --- a/ietf/utils/decorators.py +++ b/ietf/utils/decorators.py @@ -4,7 +4,6 @@ import datetime -from decorator import decorator, decorate from functools import wraps from django.conf import settings @@ -16,36 +15,28 @@ import debug # pyflakes:ignore -from ietf.utils.test_runner import set_coverage_checking from ietf.person.models import Person, PersonalApiKey, PersonApiKeyEvent from ietf.utils import log -@decorator -def skip_coverage(f, *args, **kwargs): - if settings.TEST_CODE_COVERAGE_CHECKER: - set_coverage_checking(False) - result = f(*args, **kwargs) - set_coverage_checking(True) - return result - else: - return f(*args, **kwargs) - -@decorator -def person_required(f, request, *args, **kwargs): - if not request.user.is_authenticated: - raise ValueError("The @person_required decorator should be called after @login_required.") - try: - request.user.person - except Person.DoesNotExist: - return render(request, 'registration/missing_person.html') - return f(request, *args, **kwargs) + +def person_required(f): + @wraps(f) + def _wrapper(request, *args, **kwargs): + if not request.user.is_authenticated: + raise ValueError("The @person_required decorator should be called after @login_required.") + try: + request.user.person + except Person.DoesNotExist: + return render(request, 'registration/missing_person.html') + return f(request, *args, **kwargs) + return _wrapper def require_api_key(f): @wraps(f) def _wrapper(request, *args, **kwargs): def err(code, text): - return HttpResponse(text, status=code, content_type='text/plain') + return HttpResponse(text, status=code, content_type=f"text/plain; charset={settings.DEFAULT_CHARSET}") # Check method and get hash if request.method == 'POST': hash = request.POST.get('apikey') @@ -90,27 +81,56 @@ def err(code, text): return _wrapper -def _memoize(func, self, *args, **kwargs): - '''Memoize wrapper for instance methods. Use @lru_cache for functions.''' - if kwargs: # frozenset is used to ensure hashability - key = args, frozenset(list(kwargs.items())) - else: - key = args - # instance method, set up cache if needed - if not hasattr(self, '_cache'): - self._cache = {} - if not func in self._cache: - self._cache[func] = {} - # - cache = self._cache[func] - if key not in cache: - cache[key] = func(self, *args, **kwargs) - return cache[key] def memoize(func): + @wraps(func) + def _memoize(self, *args, **kwargs): + '''Memoize wrapper for instance methods. Use @lru_cache for functions.''' + if kwargs: # frozenset is used to ensure hashability + key = args, frozenset(list(kwargs.items())) + else: + key = args + # instance method, set up cache if needed + if not hasattr(self, '_cache'): + self._cache = {} + if not func in self._cache: + self._cache[func] = {} + # + cache = self._cache[func] + if key not in cache: + cache[key] = func(self, *args, **kwargs) + return cache[key] + if not hasattr(func, '__class__'): raise NotImplementedError("Use @lru_cache instead of memoize() for functions.") # For methods, we want the cache on the object, not on the class, in order # to not having to think about cache bloat and content becoming stale, so # we cannot set up the cache here. - return decorate(func, _memoize) + return _memoize + + +def ignore_view_kwargs(*args): + """Ignore the specified kwargs if they are present + + Usage: + @ignore_view_kwargs("ignore_arg1", "ignore_arg2") + def my_view(request, good_arg): + ... + + This will allow my_view() to be used in url() paths that have zero, one, or both of + ignore_arg1 and ignore_arg2 captured. These will be ignored, while good_arg will still + be captured as usual. + """ + kwargs_to_ignore = args + + def decorate(view): + @wraps(view) + def wrapped(*args, **kwargs): + for kwarg in kwargs_to_ignore: + kwargs.pop(kwarg, None) + return view(*args, **kwargs) + + return wrapped + + return decorate + diff --git a/ietf/utils/draft.py b/ietf/utils/draft.py index a1e79760ea..53d3d40811 100755 --- a/ietf/utils/draft.py +++ b/ietf/utils/draft.py @@ -65,7 +65,6 @@ opt_debug = False opt_timestamp = False opt_trace = False -opt_authorinfo = False opt_attributes = False # Don't forget to add the option variable to the globals list in _main below @@ -131,6 +130,24 @@ def acronym_match(s, l): #_debug(" s:%s; l:%s => %s; %s" % (s, l, acronym, s==acronym)) return s == acronym +def get_status_from_draft_text(text): + + # Take prefix to shortcut work over very large drafts + # 5000 is conservatively much more than a full page of characters and we + # only want the first 10 lines. + text = text.strip()[:5000] # Take prefix to shortcut work over very large drafts + text = re.sub(".\x08", "", text) # Get rid of inkribbon backspace-emphasis + text = text.replace("\r\n", "\n") # Convert DOS to unix + text = text.replace("\r", "\n") # Convert MAC to unix + lines = text.split("\n")[:10] + status = None + for line in lines: + status_match = re.search(r"^\s*Intended [Ss]tatus:\s*(.*?) ", line) + if status_match: + status = status_match.group(1) + break + return status + class Draft: """Base class for drafts @@ -1314,8 +1331,6 @@ def getmeta(fn): # ---------------------------------------------------------------------- def _output(docname, fields, outfile=sys.stdout): - global company_domain - if opt_attributes: def outputkey(key, fields): field = fields[key] @@ -1355,9 +1370,8 @@ def _printmeta(fn, outfile=sys.stdout): # Main # ---------------------------------------------------------------------- -company_domain = {} # type: Dict[str, str] def _main(outfile=sys.stdout): - global opt_debug, opt_timestamp, opt_trace, opt_authorinfo, files, company_domain, opt_attributes + global opt_debug, opt_timestamp, opt_trace, files, opt_attributes # set default values, if any # ---------------------------------------------------------------------- # Option processing @@ -1405,8 +1419,6 @@ def _main(outfile=sys.stdout): elif opt in ["-T", "--trace"]: # Emit trace information while working opt_trace = True - company_domain = {} - if not files: files = [ "-" ] diff --git a/ietf/utils/fields.py b/ietf/utils/fields.py index 95d8a2aa7e..6e8765612f 100644 --- a/ietf/utils/fields.py +++ b/ietf/utils/fields.py @@ -1,12 +1,11 @@ -# Copyright The IETF Trust 2012-2020, All Rights Reserved +# Copyright The IETF Trust 2012-2025, All Rights Reserved # -*- coding: utf-8 -*- import datetime import json import re - -import jsonfield +from email.utils import parseaddr import debug # pyflakes:ignore @@ -14,10 +13,11 @@ from django import forms from django.db import models # pyflakes:ignore -from django.core.validators import validate_email +from django.core.validators import ProhibitNullCharactersValidator, validate_email from django.core.exceptions import ValidationError from django.utils.dateparse import parse_duration + class MultiEmailField(forms.Field): def to_python(self, value): "Normalize data to a list of strings." @@ -40,6 +40,25 @@ def validate(self, value): for email in value: validate_email(email) + +def validate_name_addr_email(value): + "Validate name-addr style email address" + name, addr = parseaddr(value) + if not addr: + raise ValidationError("Invalid email format.") + try: + validate_email(addr) # validate the actual address part + except ValidationError: + raise ValidationError("Invalid email address.") + + +class NameAddrEmailField(forms.CharField): + def validate(self, value): + "Check if value consists only of valid emails." + super().validate(value) + validate_name_addr_email(value) + + def yyyymmdd_to_strftime_format(fmt): translation_table = sorted([ ("yyyy", "%Y"), @@ -328,8 +347,21 @@ def has_changed(self, initial, data): return super().has_changed(initial, data) -class IETFJSONField(jsonfield.fields.forms.JSONField): - def __init__(self, *args, empty_values=jsonfield.fields.forms.JSONField.empty_values, +class IETFJSONField(forms.JSONField): # pragma: no cover + # Deprecated - use EmptyAwareJSONField instead + def __init__(self, *args, empty_values=forms.JSONField.empty_values, + accepted_empty_values=None, **kwargs): + if accepted_empty_values is None: + accepted_empty_values = [] + self.empty_values = [x + for x in empty_values + if x not in accepted_empty_values] + + super().__init__(*args, **kwargs) + + +class EmptyAwareJSONField(forms.JSONField): + def __init__(self, *args, empty_values=forms.JSONField.empty_values, accepted_empty_values=None, **kwargs): if accepted_empty_values is None: accepted_empty_values = [] @@ -353,3 +385,20 @@ def update_dimension_fields(self, *args, **kwargs): super().update_dimension_fields(*args, **kwargs) except FileNotFoundError: pass # don't do anything if the file has gone missing + + +class ModelMultipleChoiceField(forms.ModelMultipleChoiceField): + """ModelMultipleChoiceField that rejects null characters cleanly""" + validate_no_nulls = ProhibitNullCharactersValidator() + + def clean(self, value): + try: + for item in value: + self.validate_no_nulls(item) + except TypeError: + # A TypeError probably means value is not iterable, which most commonly comes up + # with None as a value. If it's something more exotic, we don't know how to test + # for null characters anyway. Either way, trust the superclass clean() method to + # handle it. + pass + return super().clean(value) diff --git a/ietf/utils/html.py b/ietf/utils/html.py index 9d0cd7c84f..3f3efe2f37 100644 --- a/ietf/utils/html.py +++ b/ietf/utils/html.py @@ -5,11 +5,7 @@ import bleach -import copy import html2text -import lxml.etree -import lxml.html -import lxml.html.clean import debug # pyflakes:ignore @@ -17,62 +13,66 @@ from django.utils.functional import keep_lazy from ietf.utils.mime import get_mime_type -from ietf.utils.text import bleach_cleaner, tags as acceptable_tags -acceptable_protocols = ['http', 'https', 'mailto', 'xmpp', ] -def unescape(text): - """ - Returns the given text with ampersands, quotes and angle brackets decoded - for use in URLs. +# Allow the protocols/tags/attributes we specifically want, plus anything that bleach declares +# to be safe. As of 2025-01-27, the explicit lists for protocols and tags are a strict superset +# of bleach's defaults. +acceptable_protocols = bleach.sanitizer.ALLOWED_PROTOCOLS.union( + {"http", "https", "mailto", "ftp", "xmpp"} +) +acceptable_tags = bleach.sanitizer.ALLOWED_TAGS.union( + { + # fmt: off + "a", "abbr", "acronym", "address", "b", "big", + "blockquote", "body", "br", "caption", "center", "cite", "code", "col", + "colgroup", "dd", "del", "dfn", "dir", "div", "dl", "dt", "em", "font", + "h1", "h2", "h3", "h4", "h5", "h6", "head", "hr", "html", "i", "ins", "kbd", + "li", "ol", "p", "pre", "q", "s", "samp", "small", "span", "strike", "style", + "strong", "sub", "sup", "table", "title", "tbody", "td", "tfoot", "th", "thead", + "tr", "tt", "u", "ul", "var" + # fmt: on + } +) +acceptable_attributes = bleach.sanitizer.ALLOWED_ATTRIBUTES | { + "*": ["id"], + "ol": ["start"], +} + + +# Instantiate sanitizer classes +_bleach_cleaner = bleach.sanitizer.Cleaner( + tags=acceptable_tags, + attributes=acceptable_attributes, + protocols=acceptable_protocols, + strip=True, +) + + +_liberal_bleach_cleaner = bleach.sanitizer.Cleaner( + tags=acceptable_tags.union({"img", "figure", "figcaption"}), + attributes=acceptable_attributes | {"img": ["src", "alt"]}, + protocols=acceptable_protocols, + strip=True, +) + + +def clean_html(text: str): + """Clean the HTML in a string""" + return _bleach_cleaner.clean(text) + + +def liberal_clean_html(text: str): + """More permissively clean the HTML in a string""" + return _liberal_bleach_cleaner.clean(text) - This function undoes what django.utils.html.escape() does - """ - return text.replace('&', '&').replace(''', "'").replace('"', '"').replace('>', '>').replace('<', '<' ) @keep_lazy(str) def remove_tags(html, tags): """Returns the given HTML sanitized, and with the given tags removed.""" - allowed = set(acceptable_tags) - set([ t.lower() for t in tags ]) + allowed = acceptable_tags - set(t.lower() for t in tags) return bleach.clean(html, tags=allowed, strip=True) -# ---------------------------------------------------------------------- -# Html fragment cleaning - -def sanitize_fragment(html): - return bleach_cleaner.clean(html) - -# ---------------------------------------------------------------------- -# Page cleaning - - -class Cleaner(lxml.html.clean.Cleaner): - charset = 'utf-8' - def __init__(self, charset='utf-8', **kw): - self.charset = charset - super(Cleaner, self).__init__(**kw) - - # Copied from lxml 4.2.0 and modified to insert charset meta: - def clean_html(self, html): - result_type = type(html) - if isinstance(html, (str, bytes)): - doc = lxml.html.fromstring(html) - else: - doc = copy.deepcopy(html) - self(doc) - head = doc.find('head') - if head != None: - meta = lxml.etree.Element('meta', charset=self.charset) - meta.tail = '\n' - head.insert(0, meta) - return lxml.html._transform_result(result_type, doc) - -# We will be saving as utf-8 later, so set that in the meta tag. -lxml_cleaner = Cleaner(allow_tags=acceptable_tags, remove_unknown_tags=None, style=False, page_structure=False, charset='utf-8') - -def sanitize_document(html): - return lxml_cleaner.clean_html(html) - # ---------------------------------------------------------------------- # Text field cleaning @@ -86,4 +86,15 @@ def clean_text_field(text): else: raise forms.ValidationError("Unexpected text field mime type: %s" % mime_type) return text - + + +def unescape(text): + """ + Returns the given text with ampersands, quotes and angle brackets decoded + for use in URLs. + + This function undoes what django.utils.html.escape() does + """ + return text.replace('&', '&').replace(''', "'").replace('"', '"').replace('>', '>').replace('<', '<' ) + + diff --git a/ietf/utils/http.py b/ietf/utils/http.py index 6e6409e31f..cda51680ab 100644 --- a/ietf/utils/http.py +++ b/ietf/utils/http.py @@ -1,6 +1,8 @@ -# Copyright The IETF Trust 2023, All Rights Reserved +# Copyright The IETF Trust 2023-2024, All Rights Reserved # -*- coding: utf-8 -*- +from django.urls import resolve as urlresolve, Resolver404 + def is_ajax(request): """Checks whether a request was an AJAX call @@ -8,3 +10,25 @@ def is_ajax(request): exact reproduction of the deprecated method suggested there. """ return request.headers.get("x-requested-with") == "XMLHttpRequest" + +def validate_return_to_path(path, get_default_path, allowed_path_handlers): + if path is None: + path = get_default_path() + + # we need to ensure the path isn't used for attacks (eg phishing). + # `path` can be used in HttpResponseRedirect() which could redirect to Datatracker or offsite. + # Eg http://datatracker.ietf.org/...?ballot_edit_return_point=https://example.com/phish + # offsite links could be phishing attempts so let's reject them all, and require valid Datatracker + # routes + try: + # urlresolve will throw if the url doesn't match a route known to Django + match = urlresolve(path) + # further restrict by whether it's in the list of valid routes to prevent + # (eg) redirecting to logout + if match.url_name not in allowed_path_handlers: + raise ValueError("Invalid return to path not among valid matches") + pass + except Resolver404: + raise ValueError("Invalid return to path doesn't match a route") + + return path diff --git a/ietf/utils/jsonlogger.py b/ietf/utils/jsonlogger.py new file mode 100644 index 0000000000..589132977d --- /dev/null +++ b/ietf/utils/jsonlogger.py @@ -0,0 +1,34 @@ +# Copyright The IETF Trust 2024, All Rights Reserved +from pythonjsonlogger.json import JsonFormatter +import time + + +class DatatrackerJsonFormatter(JsonFormatter): + converter = time.gmtime # use UTC + default_msec_format = "%s.%03d" # '.' instead of ',' + + +class GunicornRequestJsonFormatter(DatatrackerJsonFormatter): + """Only works with Gunicorn's logging""" + def add_fields(self, log_record, record, message_dict): + super().add_fields(log_record, record, message_dict) + log_record.setdefault("method", record.args["m"]) + log_record.setdefault("proto", record.args["H"]) + log_record.setdefault("remote_ip", record.args["h"]) + path = record.args["U"] # URL path + if record.args["q"]: # URL query string + path = "?".join([path, record.args["q"]]) + log_record.setdefault("path", path) + log_record.setdefault("status", record.args["s"]) + log_record.setdefault("referer", record.args["f"]) + log_record.setdefault("user_agent", record.args["a"]) + log_record.setdefault("len_bytes", record.args["B"]) + log_record.setdefault("duration_s", record.args["L"]) # decimal seconds + log_record.setdefault("host", record.args["{host}i"]) + log_record.setdefault("x_request_start", record.args["{x-request-start}i"]) + log_record.setdefault("x_forwarded_for", record.args["{x-forwarded-for}i"]) + log_record.setdefault("x_forwarded_proto", record.args["{x-forwarded-proto}i"]) + log_record.setdefault("cf_connecting_ip", record.args["{cf-connecting-ip}i"]) + log_record.setdefault("cf_ray", record.args["{cf-ray}i"]) + log_record.setdefault("asn", record.args["{x-ip-src-asnum}i"]) + log_record.setdefault("is_authenticated", record.args["{x-datatracker-is-authenticated}o"]) diff --git a/ietf/utils/jstest.py b/ietf/utils/jstest.py index a901df66f5..cf242fc4eb 100644 --- a/ietf/utils/jstest.py +++ b/ietf/utils/jstest.py @@ -1,7 +1,10 @@ # Copyright The IETF Trust 2014-2021, All Rights Reserved # -*- coding: utf-8 -*- +import os + from django.conf import settings +from django.contrib.staticfiles.testing import StaticLiveServerTestCase from django.urls import reverse as urlreverse from unittest import skipIf @@ -9,19 +12,24 @@ skip_message = "" try: from selenium import webdriver - from selenium.webdriver.chrome.service import Service - from selenium.webdriver.chrome.options import Options + from selenium.webdriver.firefox.service import Service + from selenium.webdriver.firefox.options import Options + from selenium.webdriver.support.ui import WebDriverWait + from selenium.webdriver.support import expected_conditions from selenium.webdriver.common.by import By - from selenium.webdriver.common.desired_capabilities import DesiredCapabilities except ImportError as e: skip_selenium = True skip_message = "Skipping selenium tests: %s" % e from ietf.utils.pipe import pipe -from ietf.utils.test_runner import IetfLiveServerTestCase +from ietf.utils.test_runner import ( + set_template_coverage, + set_url_coverage, + load_and_run_fixtures, +) -executable_name = 'chromedriver' +executable_name = 'geckodriver' code, out, err = pipe('{} --version'.format(executable_name)) if code != 0: skip_selenium = True @@ -30,20 +38,11 @@ print(" "+skip_message) def start_web_driver(): - service = Service(executable_path="chromedriver", - log_path=settings.TEST_GHOSTDRIVER_LOG_PATH) - service.start() + service = Service(executable_path=f"/usr/bin/{executable_name}", log_output=f"{executable_name}.log", service_args=['--log-no-truncate']) options = Options() - options.add_argument("headless") - options.add_argument("disable-extensions") - options.add_argument("disable-gpu") # headless needs this - options.add_argument("no-sandbox") # docker needs this - dc = DesiredCapabilities.CHROME - dc["goog:loggingPrefs"] = {"browser": "ALL"} - # For selenium 3: - return webdriver.Chrome("chromedriver", options=options, desired_capabilities=dc) - # For selenium 4: - # return webdriver.Chrome(service=service, options=options, desired_capabilities=dc) + options.add_argument("--headless") + os.environ["MOZ_REMOTE_SETTINGS_DEVTOOLS"] = "1" + return webdriver.Firefox(service=service, options=options) def selenium_enabled(): @@ -56,17 +55,44 @@ def ifSeleniumEnabled(func): return skipIf(skip_selenium, skip_message)(func) -class IetfSeleniumTestCase(IetfLiveServerTestCase): +class IetfSeleniumTestCase(StaticLiveServerTestCase): # pragma: no cover login_view = 'ietf.ietfauth.views.login' + @classmethod + def setUpClass(cls): + set_template_coverage(False) + set_url_coverage(False) + super().setUpClass() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + set_template_coverage(True) + set_url_coverage(True) + def setUp(self): - super(IetfSeleniumTestCase, self).setUp() + super().setUp() + # LiveServerTestCase uses TransactionTestCase which seems to + # somehow interfere with the fixture loading process in + # IetfTestRunner when running multiple tests (the first test + # is fine, in the next ones the fixtures have been wiped) - + # this is no doubt solvable somehow, but until then we simply + # recreate them here + from ietf.person.models import Person + if not Person.objects.exists(): + load_and_run_fixtures(verbosity=0) + self.replaced_settings = dict() + if hasattr(settings, 'IDTRACKER_BASE_URL'): + self.replaced_settings['IDTRACKER_BASE_URL'] = settings.IDTRACKER_BASE_URL + settings.IDTRACKER_BASE_URL = self.live_server_url self.driver = start_web_driver() self.driver.set_window_size(1024,768) def tearDown(self): - super(IetfSeleniumTestCase, self).tearDown() self.driver.close() + for k, v in self.replaced_settings.items(): + setattr(settings, k, v) + super().tearDown() def absreverse(self,*args,**kwargs): return '%s%s'%(self.live_server_url, urlreverse(*args, **kwargs)) @@ -96,6 +122,48 @@ def scroll_to_element(self, element): # actions = ActionChains(self.driver) # actions.move_to_element(element).perform() + def scroll_and_click(self, element_locator, timeout_seconds=5): + """ + Selenium has restrictions around clicking elements outside the viewport, so + this wrapper encapsulates the boilerplate of forcing scrolling and clicking. + + :param element_locator: A two item tuple of a Selenium locator eg `(By.CSS_SELECTOR, '#something')` + """ + + # so that we can restore the state of the webpage after clicking + original_html_scroll_behaviour_to_restore = self.driver.execute_script('return document.documentElement.style.scrollBehavior') + original_html_overflow_to_restore = self.driver.execute_script('return document.documentElement.style.overflow') + + original_body_scroll_behaviour_to_restore = self.driver.execute_script('return document.body.style.scrollBehavior') + original_body_overflow_to_restore = self.driver.execute_script('return document.body.style.overflow') + + self.driver.execute_script('document.documentElement.style.scrollBehavior = "auto"') + self.driver.execute_script('document.documentElement.style.overflow = "auto"') + + self.driver.execute_script('document.body.style.scrollBehavior = "auto"') + self.driver.execute_script('document.body.style.overflow = "auto"') + + element = self.driver.find_element(element_locator[0], element_locator[1]) + self.scroll_to_element(element) + + # Note that Selenium itself seems to have multiple definitions of 'clickable'. + # You might expect that the following wait for the 'element_to_be_clickable' + # would confirm that the following .click() would succeed but it doesn't. + # That's why the preceeding code attempts to force scrolling to bring the + # element into the viewport to allow clicking. + WebDriverWait(self.driver, timeout_seconds).until(expected_conditions.element_to_be_clickable(element_locator)) + + element.click() + + if original_html_scroll_behaviour_to_restore: + self.driver.execute_script(f'document.documentElement.style.scrollBehavior = "{original_html_scroll_behaviour_to_restore}"') + if original_html_overflow_to_restore: + self.driver.execute_script(f'document.documentElement.style.overflow = "{original_html_overflow_to_restore}"') + + if original_body_scroll_behaviour_to_restore: + self.driver.execute_script(f'document.body.style.scrollBehavior = "{original_body_scroll_behaviour_to_restore}"') + if original_body_overflow_to_restore: + self.driver.execute_script(f'document.body.style.overflow = "{original_body_overflow_to_restore}"') class presence_of_element_child_by_css_selector: """Wait for presence of a child of a WebElement matching a CSS selector diff --git a/ietf/utils/log.py b/ietf/utils/log.py index d5a54e5516..2a068ade9a 100644 --- a/ietf/utils/log.py +++ b/ietf/utils/log.py @@ -9,37 +9,10 @@ import os.path import traceback -from typing import Callable # pyflakes:ignore - -try: - import syslog - logfunc = syslog.syslog # type: Callable -except ImportError: # import syslog will fail on Windows boxes - logging.basicConfig(filename='tracker.log',level=logging.INFO) - logfunc = logging.info - pass - from django.conf import settings import debug # pyflakes:ignore -formatter = logging.Formatter('{levelname}: {name}:{lineno}: {message}', style='{') -for name, level in settings.UTILS_LOGGER_LEVELS.items(): - logger = logging.getLogger(name) - if not logger.hasHandlers(): - debug.say(' Adding handlers to logger %s' % logger.name) - - handlers = [ - logging.StreamHandler(), - logging.handlers.SysLogHandler(address='/dev/log', - facility=logging.handlers.SysLogHandler.LOG_USER), - ] - for h in handlers: - h.setFormatter(formatter) - h.setLevel(level) - logger.addHandler(h) - debug.say(" Setting %s logging level to %s" % (logger.name, level)) - logger.setLevel(level) def getclass(frame): cls = None @@ -56,20 +29,9 @@ def getcaller(): return (pmodule, pclass, pfunction, pfile, pline) def log(msg, e=None): - "Uses syslog by preference. Logs the given calling point and message." - global logfunc - def _flushfunc(): - pass - _logfunc = logfunc - if settings.SERVER_MODE == 'test': - if getattr(settings, 'show_logging', False) is True: - _logfunc = debug.say - _flushfunc = sys.stdout.flush # pyflakes:ignore (intentional redefinition) - else: + "Logs the given calling point and message to the logging framework's datatracker handler at severity INFO" + if settings.SERVER_MODE == 'test' and not getattr(settings, 'show_logging',False): return - elif settings.DEBUG == True: - _logfunc = debug.say - _flushfunc = sys.stdout.flush # pyflakes:ignore (intentional redefinition) if not isinstance(msg, str): msg = msg.encode('unicode_escape') try: @@ -82,11 +44,8 @@ def _flushfunc(): where = " in " + func + "()" except IndexError: file, line, where = "/", 0, "" - _flushfunc() - _logfunc("ietf%s(%d)%s: %s" % (file, line, where, msg)) - -logger = logging.getLogger('django') + logging.getLogger("datatracker").info(msg=msg, extra = {"file":file, "line":line, "where":where}) def exc_parts(): @@ -124,6 +83,7 @@ def assertion(statement, state=True, note=None): This acts like an assertion. It uses the django logger in order to send the failed assertion and a backtrace as for an internal server error. """ + logger = logging.getLogger("django") # Note this is a change - before this would have gone to "django" frame = inspect.currentframe().f_back value = eval(statement, frame.f_globals, frame.f_locals) if bool(value) != bool(state): @@ -148,6 +108,7 @@ def assertion(statement, state=True, note=None): def unreachable(date="(unknown)"): "Raises an assertion or sends traceback to admins if executed." + logger = logging.getLogger("django") frame = inspect.currentframe().f_back if settings.DEBUG is True or settings.SERVER_MODE == 'test': raise AssertionError("Arrived at code in %s() which was marked unreachable on %s." % (frame.f_code.co_name, date)) diff --git a/ietf/utils/mail.py b/ietf/utils/mail.py index e747c74778..5417161451 100644 --- a/ietf/utils/mail.py +++ b/ietf/utils/mail.py @@ -19,11 +19,13 @@ from email.header import Header, decode_header from email import message_from_bytes, message_from_string from email import charset as Charset +from typing import Optional from django.conf import settings from django.contrib import messages from django.core.exceptions import ImproperlyConfigured, ValidationError from django.core.validators import validate_email +from django.http import HttpRequest from django.template.loader import render_to_string from django.template import Context,RequestContext from django.utils import timezone @@ -64,6 +66,18 @@ def add_headers(msg): msg['From'] = settings.DEFAULT_FROM_EMAIL return msg + +def decode_header_value(value: str) -> str: + """Decode a header value + + Easier-to-use wrapper around email.message.decode_header() + """ + return "".join( + part.decode(charset if charset else "utf-8") if isinstance(part, bytes) else part + for part, charset in decode_header(value) + ) + + class SMTPSomeRefusedRecipients(smtplib.SMTPException): def __init__(self, message, original_msg, refusals): @@ -92,7 +106,17 @@ def send_smtp(msg, bcc=None): ''' mark = time.time() add_headers(msg) - (fname, frm) = parseaddr(msg.get('From')) + # N.B. We have a disconnect with most of this code assuming a From header value will only + # have one address. + # The frm computed here is only used as the envelope from. + # Previous code simply ran `parseaddr(msg.get('From'))`, getting lucky if the string returned + # from the get had more than one address in it. Python 3.9.20 changes the behavior of parseaddr + # and that erroneous use of the function no longer gets lucky. + # For the short term, to match behavior to date as closely as possible, if we get a message + # that has multiple addresses in the From header, we will use the first for the envelope from + from_tuples = getaddresses(msg.get_all('From', [settings.DEFAULT_FROM_EMAIL])) + assertion('len(from_tuples)==1', note=f"send_smtp received multiple From addresses: {from_tuples}") + _ , frm = from_tuples[0] addrlist = msg.get_all('To') + msg.get_all('Cc', []) if bcc: addrlist += [bcc] @@ -241,8 +265,7 @@ def parseaddr(addr): """ - addr = ''.join( [ ( s.decode(m) if m else s.decode()) if isinstance(s, bytes) else s for (s,m) in decode_header(addr) ] ) - name, addr = simple_parseaddr(addr) + name, addr = simple_parseaddr(decode_header_value(addr)) return name, addr def excludeaddrs(addrlist, exlist): @@ -320,18 +343,45 @@ def condition_message(to, frm, subject, msg, cc, extra): msg['Message-ID'] = make_msgid() -def show_that_mail_was_sent(request,leadline,msg,bcc): - if request and request.user: - from ietf.ietfauth.utils import has_role - if has_role(request.user,['Area Director','Secretariat','IANA','RFC Editor','ISE','IAD','IRTF Chair','WG Chair','RG Chair','WG Secretary','RG Secretary']): - info = "%s at %s %s\n" % (leadline,timezone.now().strftime("%Y-%m-%d %H:%M:%S"),settings.TIME_ZONE) - info += "Subject: %s\n" % force_str(msg.get('Subject','[no subject]')) - info += "To: %s\n" % msg.get('To','[no to]') - if msg.get('Cc'): - info += "Cc: %s\n" % msg.get('Cc') - if bcc: - info += "Bcc: %s\n" % bcc - messages.info(request,info,extra_tags='preformatted',fail_silently=True) +def show_that_mail_was_sent(request: HttpRequest, leadline: str, msg: Message, bcc: Optional[str]): + if request and request.user: + from ietf.ietfauth.utils import has_role + + if has_role( + request.user, + [ + "Area Director", + "Secretariat", + "IANA", + "RFC Editor", + "ISE", + "IAD", + "IRTF Chair", + "WG Chair", + "RG Chair", + "WG Secretary", + "RG Secretary", + ], + ): + subject = decode_header_value(msg.get("Subject", "[no subject]")) + _to = decode_header_value(msg.get("To", "[no to]")) + info_lines = [ + f"{leadline} at {timezone.now():%Y-%m-%d %H:%M:%S %Z}", + f"Subject: {subject}", + f"To: {_to}", + ] + cc = msg.get("Cc", None) + if cc: + info_lines.append(f"Cc: {decode_header_value(cc)}") + if bcc: + info_lines.append(f"Bcc: {decode_header_value(bcc)}") + messages.info( + request, + "\n".join(info_lines), + extra_tags="preformatted", + fail_silently=True, + ) + def save_as_message(request, msg, bcc): by = ((request and request.user and not request.user.is_anonymous and request.user.person) @@ -446,6 +496,8 @@ def parse_preformatted(preformatted, extra=None, override=None): values = msg.get_all(key, []) if values: values = getaddresses(values) + if key=='From': + assertion('len(values)<2', note=f'parse_preformatted is constructing a From with multiple values: {values}') del msg[key] msg[key] = ',\n '.join(formataddr(v) for v in values) for key in ['Subject', ]: diff --git a/ietf/utils/management/commands/check_draft_event_revision_integrity.py b/ietf/utils/management/commands/check_draft_event_revision_integrity.py index c8d2cbd21a..c2d4272782 100644 --- a/ietf/utils/management/commands/check_draft_event_revision_integrity.py +++ b/ietf/utils/management/commands/check_draft_event_revision_integrity.py @@ -54,7 +54,7 @@ def to_dict(instance): doc = getattr(obj, docattr) time = getattr(obj, timeattr) if not obj.rev: - if not doc.is_rfc(): + if doc.type_id != "rfc": self.stdout.write("Bad revision number: %-52s: '%s'" % (doc.name, obj.rev)) continue rev = int(obj.rev.lstrip('0') or '0') diff --git a/ietf/utils/management/commands/dumprelated.py b/ietf/utils/management/commands/dumprelated.py deleted file mode 100644 index 66fbb33bf1..0000000000 --- a/ietf/utils/management/commands/dumprelated.py +++ /dev/null @@ -1,209 +0,0 @@ -# Copyright The IETF Trust 2018-2020, All Rights Reserved -# -*- coding: utf-8 -*- - - -import io -import warnings -from collections import OrderedDict - -from django.apps import apps -from django.contrib.admin.utils import NestedObjects -from django.core import serializers -from django.core.management.base import BaseCommand, CommandError -from django.core.management.utils import parse_apps_and_model_labels -from django.db import DEFAULT_DB_ALIAS, router - -import debug # pyflakes:ignore -debug.debug = True - -class ProxyModelWarning(Warning): - pass - - -class Command(BaseCommand): - help = ( - "Output a database object and its related objects as a fixture of the given format " - ) - - def add_arguments(self, parser): - parser.add_argument( - 'args', metavar='app_label.ModelName', nargs=1, - help='Specifies the app_label.ModelName for which to dump objects given by --pks', - ) - parser.add_argument( - '--format', default='json', dest='format', - help='Specifies the output serialization format for fixtures.', - ) - parser.add_argument( - '--indent', default=None, dest='indent', type=int, - help='Specifies the indent level to use when pretty-printing output.', - ) - parser.add_argument( - '--database', action='store', dest='database', - default=DEFAULT_DB_ALIAS, - help='Nominates a specific database to dump fixtures from. ' - 'Defaults to the "default" database.', - ) - parser.add_argument( - '-e', '--exclude', dest='exclude', action='append', default=[], - help='An app_label or app_label.ModelName to exclude ' - '(use multiple --exclude to exclude multiple apps/models).', - ) - parser.add_argument( - '--natural-foreign', action='store_true', dest='use_natural_foreign_keys', default=False, - help='Use natural foreign keys if they are available.', - ) - parser.add_argument( - '--natural-primary', action='store_true', dest='use_natural_primary_keys', default=False, - help='Use natural primary keys if they are available.', - ) - parser.add_argument( - '-o', '--output', default=None, dest='output', - help='Specifies file to which the output is written.' - ) - parser.add_argument( - '--pks', dest='primary_keys', required=True, - help="Only dump objects with given primary keys. Accepts a comma-separated " - "list of keys. This option only works when you specify one model.", - ) - - def handle(self, *app_labels, **options): - format = options['format'] - indent = options['indent'] - using = options['database'] - excludes = options['exclude'] - output = options['output'] - show_traceback = options['traceback'] - use_natural_foreign_keys = options['use_natural_foreign_keys'] - use_natural_primary_keys = options['use_natural_primary_keys'] - pks = options['primary_keys'] - - if pks: - primary_keys = [pk.strip() for pk in pks.split(',')] - else: - primary_keys = [] - - excluded_models, excluded_apps = parse_apps_and_model_labels(excludes) - - if len(app_labels) == 0: - if primary_keys: - raise CommandError("You can only use --pks option with one model") - app_list = OrderedDict( - (app_config, None) for app_config in apps.get_app_configs() - if app_config.models_module is not None and app_config not in excluded_apps - ) - else: - if len(app_labels) > 1 and primary_keys: - raise CommandError("You can only use --pks option with one model") - app_list = OrderedDict() - for label in app_labels: - try: - app_label, model_label = label.split('.') - try: - app_config = apps.get_app_config(app_label) - except LookupError as e: - raise CommandError(str(e)) - if app_config.models_module is None or app_config in excluded_apps: - continue - try: - model = app_config.get_model(model_label) - except LookupError: - raise CommandError("Unknown model: %s.%s" % (app_label, model_label)) - - app_list_value = app_list.setdefault(app_config, []) - - # We may have previously seen a "all-models" request for - # this app (no model qualifier was given). In this case - # there is no need adding specific models to the list. - if app_list_value is not None: - if model not in app_list_value: - app_list_value.append(model) - except ValueError: - if primary_keys: - raise CommandError("You can only use --pks option with one model") - # This is just an app - no model qualifier - app_label = label - try: - app_config = apps.get_app_config(app_label) - except LookupError as e: - raise CommandError(str(e)) - if app_config.models_module is None or app_config in excluded_apps: - continue - app_list[app_config] = None - - # Check that the serialization format exists; this is a shortcut to - # avoid collating all the objects and _then_ failing. - if format not in serializers.get_public_serializer_formats(): - try: - serializers.get_serializer(format) - except serializers.SerializerDoesNotExist: - pass - - raise CommandError("Unknown serialization format: %s" % format) - - def flatten(l): - if isinstance(l, list): - for el in l: - if isinstance(el, list): - for sub in flatten(el): - yield sub - else: - yield el - else: - yield l - - def get_objects(count_only=False): - """ - Collate the objects to be serialized. If count_only is True, just - count the number of objects to be serialized. - """ - models = serializers.sort_dependencies(list(app_list.items())) - for model in models: - if model in excluded_models: - continue - if model._meta.proxy and model._meta.proxy_for_model not in models: - warnings.warn( - "%s is a proxy model and won't be serialized." % model._meta.label, - category=ProxyModelWarning, - ) - if not model._meta.proxy and router.allow_migrate_model(using, model): - objects = model._default_manager - - queryset = objects.using(using).order_by(model._meta.pk.name) - if primary_keys: - queryset = queryset.filter(pk__in=primary_keys) - if count_only: - yield queryset.order_by().count() - else: - for obj in queryset.iterator(): - collector = NestedObjects(using=using) - collector.collect([obj,]) - object_list = list(flatten(collector.nested())) - object_list.reverse() - for o in object_list: - yield o - - try: - self.stdout.ending = None - progress_output = None - object_count = 0 - # If dumpdata is outputting to stdout, there is no way to display progress - if (output and self.stdout.isatty() and options['verbosity'] > 0): - progress_output = self.stdout - object_count = sum(get_objects(count_only=True)) - stream = io.open(output, 'w') if output else None - try: - serializers.serialize( - format, get_objects(), indent=indent, - use_natural_foreign_keys=use_natural_foreign_keys, - use_natural_primary_keys=use_natural_primary_keys, - stream=stream or self.stdout, progress_output=progress_output, - object_count=object_count, - ) - finally: - if stream: - stream.close() - except Exception as e: - if show_traceback: - raise - raise CommandError("Unable to serialize database: %s" % e) diff --git a/ietf/utils/management/commands/import_htpasswd.py b/ietf/utils/management/commands/import_htpasswd.py deleted file mode 100644 index c33a46b727..0000000000 --- a/ietf/utils/management/commands/import_htpasswd.py +++ /dev/null @@ -1,63 +0,0 @@ -# Copyright The IETF Trust 2014-2020, All Rights Reserved -import io -import sys - -from textwrap import dedent - -from django.contrib.auth.models import User -from django.core.management.base import BaseCommand - -def import_htpasswd_file(filename, verbosity=1, overwrite=False): - with io.open(filename) as file: - for line in file: - if not ':' in line: - raise ValueError('Found a line without colon separator in the htpassword file %s:' - ' "%s"' % (file.name, line)) - username, password = line.strip().split(':', 1) - try: - user = User.objects.get(username__iexact=username) - if overwrite == True or not user.password: - if password.startswith('{SHA}'): - user.password = "sha1$$%s" % password[len('{SHA}'):] - elif password.startswith('$apr1$'): - user.password = "md5$%s" % password[len('$apr1$'):] - else: # Assume crypt - user.password = "crypt$$%s" % password - user.save() - if verbosity > 0: - sys.stderr.write('.') - if verbosity > 1: - sys.stderr.write(' %s\n' % username) - except User.DoesNotExist: - if verbosity > 1: - sys.stderr.write('\nNo such user: %s\n' % username) - -class Command(BaseCommand): - """ - Import passwords from one or more htpasswd files to Django's auth_user table. - - This command only imports passwords; it does not import usernames, as that - would leave usernames without associated Person records in the database, - something which is undesirable. - - By default the command won't overwrite existing password entries, but - given the --force switch, it will overwrite existing entries too. Without - the --force switch, the command is safe to run repeatedly. - """ - - help = dedent(__doc__).strip() - - def add_arguments(self, parser): - parser.add_argument('--force', - action='store_true', dest='overwrite', default=False, - help='Overwrite existing passwords in the auth_user table.') - - - args = '[path [path [...]]]' - - def handle(self, *filenames, **options): - overwrite = options.get('overwrite', False) - verbosity = int(options.get('verbosity')) - for fn in filenames: - import_htpasswd_file(fn, verbosity=verbosity, overwrite=overwrite) - diff --git a/ietf/utils/management/commands/loadrelated.py b/ietf/utils/management/commands/loadrelated.py deleted file mode 100644 index da9d00d5dc..0000000000 --- a/ietf/utils/management/commands/loadrelated.py +++ /dev/null @@ -1,129 +0,0 @@ -# Copyright The IETF Trust 2018-2020, All Rights Reserved -# -*- coding: utf-8 -*- - - -import gzip -import os -#import sys -import tqdm -import zipfile - -try: - import bz2 - has_bz2 = True -except ImportError: - has_bz2 = False - -from django.core.exceptions import ObjectDoesNotExist -from django.core import serializers -from django.db import DEFAULT_DB_ALIAS, DatabaseError, IntegrityError, connections -from django.db.models.signals import post_save -from django.utils.encoding import force_str -import django.core.management.commands.loaddata as loaddata - -import debug # pyflakes:ignore - -from ietf.community.models import notify_events - -class Command(loaddata.Command): - help = (""" - - Load a fixture of related objects to the database. The fixture is expected - to contain a set of related objects, created with the 'dumprelated' management - command. It differs from the 'loaddata' command in that it silently ignores - attempts to load duplicate entries, and continues loading subsequent entries. - - """) - - def add_arguments(self, parser): - parser.add_argument('args', metavar='fixture', nargs='+', help='Fixture files.') - parser.add_argument( - '--database', action='store', dest='database', default=DEFAULT_DB_ALIAS, - help='Nominates a specific database to load fixtures into. Defaults to the "default" database.', - ) - parser.add_argument( - '--ignorenonexistent', '-i', action='store_true', dest='ignore', default=False, - help='Ignores entries in the serialized data for fields that do not ' - 'currently exist on the model.', - ) - - def handle(self, *args, **options): - self.ignore = options['ignore'] - self.using = options['database'] - self.verbosity = options['verbosity'] - # - self.compression_formats = { - None: (open, 'rb'), - 'gz': (gzip.GzipFile, 'rb'), - 'zip': (SingleZipReader, 'r'), - } - if has_bz2: - self.compression_formats['bz2'] = (bz2.BZ2File, 'r') - # - self.serialization_formats = serializers.get_public_serializer_formats() - # - post_save.disconnect(notify_events) - # - connection = connections[self.using] - self.fixture_count = 0 - self.loaded_object_count = 0 - self.fixture_object_count = 0 - # - for arg in args: - fixture_file = arg - self.stdout.write("Loading objects from %s" % fixture_file) - _, ser_fmt, cmp_fmt = self.parse_name(os.path.basename(fixture_file)) - open_method, mode = self.compression_formats[cmp_fmt] - fixture = open_method(fixture_file, mode) - objects_in_fixture = 0 - self.stdout.write("Getting object count...\b\b\b", ending='') - self.stdout.flush() - for o in serializers.deserialize(ser_fmt, fixture, using=self.using, ignorenonexistent=self.ignore,): - objects_in_fixture += 1 - self.stdout.write(" %d" % objects_in_fixture) - # - fixture = open_method(fixture_file, mode) - self.fixture_count += 1 - objects = serializers.deserialize(ser_fmt, fixture, using=self.using, ignorenonexistent=self.ignore,) - with connection.constraint_checks_disabled(): - for obj in tqdm.tqdm(objects, total=objects_in_fixture): - try: - obj.save(using=self.using) - self.loaded_object_count += 1 - except (DatabaseError, IntegrityError, ObjectDoesNotExist, AttributeError) as e: - error_msg = force_str(e) - if "Duplicate entry" in error_msg: - pass - else: - self.stderr.write("Could not load %(app_label)s.%(object_name)s(pk=%(pk)s): %(error_msg)s" % { - 'app_label': obj.object._meta.app_label, - 'object_name': obj.object._meta.object_name, - 'pk': obj.object.pk, - 'error_msg': error_msg, - }, ) - self.fixture_object_count += objects_in_fixture - - if self.verbosity >= 1: - if self.fixture_object_count == self.loaded_object_count: - self.stdout.write( - "Installed %d object(s) from %d fixture(s)" - % (self.loaded_object_count, self.fixture_count) - ) - else: - self.stdout.write( - "Installed %d object(s) (of %d) from %d fixture(s)" - % (self.loaded_object_count, self.fixture_object_count, self.fixture_count) - ) - - -class SingleZipReader(zipfile.ZipFile): - - def __init__(self, *args, **kwargs): - zipfile.ZipFile.__init__(self, *args, **kwargs) - if len(self.namelist()) != 1: - raise ValueError("Zip-compressed fixtures must contain one file.") - - def read(self): - return zipfile.ZipFile.read(self, self.namelist()[0]) - - diff --git a/ietf/utils/management/commands/patch_libraries.py b/ietf/utils/management/commands/patch_libraries.py new file mode 100644 index 0000000000..d9ae11097b --- /dev/null +++ b/ietf/utils/management/commands/patch_libraries.py @@ -0,0 +1,31 @@ +# Copyright The IETF Trust 2024, All Rights Reserved +import django + +from django.conf import settings +from django.core.management.base import BaseCommand, CommandError +from pathlib import Path + +from ietf.utils import patch + + +class Command(BaseCommand): + """Apply IETF patches to libraries""" + requires_system_checks = tuple() + + def handle(self, *args, **options): + library_path = Path(django.__file__).parent.parent + top_dir = Path(settings.BASE_DIR).parent + + # All patches in settings.CHECKS_LIBRARY_PATCHES_TO_APPLY must have a + # relative file path starting from the site-packages dir, e.g. + # 'django/db/models/fields/__init__.py' + for patch_file in settings.CHECKS_LIBRARY_PATCHES_TO_APPLY: + patch_set = patch.fromfile(top_dir / Path(patch_file)) + if not patch_set: + raise CommandError(f"Could not parse patch file '{patch_file}'") + if not patch_set.apply(root=bytes(library_path)): + raise CommandError(f"Could not apply the patch from '{patch_file}'") + if patch_set.already_patched: + self.stdout.write(f"Patch from '{patch_file}' was already applied") + else: + self.stdout.write(f"Applied the patch from '{patch_file}'") diff --git a/ietf/utils/management/commands/periodic_tasks.py b/ietf/utils/management/commands/periodic_tasks.py new file mode 100644 index 0000000000..2d34f8361c --- /dev/null +++ b/ietf/utils/management/commands/periodic_tasks.py @@ -0,0 +1,310 @@ +# Copyright The IETF Trust 2024, All Rights Reserved +import json +from django_celery_beat.models import CrontabSchedule, PeriodicTask + +from django.core.management.base import BaseCommand + +CRONTAB_DEFS = { + # same as "@weekly" in a crontab + "weekly": { + "minute": "0", + "hour": "0", + "day_of_month": "*", + "month_of_year": "*", + "day_of_week": "0", + "timezone": "America/Los_Angeles", + }, + "daily": { + "minute": "5", + "hour": "0", + "day_of_month": "*", + "month_of_year": "*", + "day_of_week": "*", + "timezone": "America/Los_Angeles", + }, + "hourly": { + "minute": "5", + "hour": "*", + "day_of_month": "*", + "month_of_year": "*", + "day_of_week": "*", + }, + "every_15m": { + "minute": "*/15", + "hour": "*", + "day_of_month": "*", + "month_of_year": "*", + "day_of_week": "*", + }, + "every_15m_except_midnight": { + "minute": "*/15", + "hour": "1-23", + "day_of_month": "*", + "month_of_year": "*", + "day_of_week": "*", + "timezone": "America/Los_Angeles", + }, +} + + +class Command(BaseCommand): + """Manage periodic tasks""" + crontabs = None + + def add_arguments(self, parser): + parser.add_argument("--create-default", action="store_true") + parser.add_argument("--enable", type=int, action="append") + parser.add_argument("--disable", type=int, action="append") + + def handle(self, *args, **options): + self.crontabs = self.get_or_create_crontabs() + if options["create_default"]: + self.create_default_tasks() + if options["enable"]: + self.enable_tasks(options["enable"]) + if options["disable"]: + self.disable_tasks(options["disable"]) + self.show_tasks() + + def get_or_create_crontabs(self): + crontabs = {} + for label, definition in CRONTAB_DEFS.items(): + crontabs[label], _ = CrontabSchedule.objects.get_or_create(**definition) + return crontabs + + def create_default_tasks(self): + PeriodicTask.objects.get_or_create( + name="Send scheduled mail", + task="ietf.message.tasks.send_scheduled_mail_task", + defaults=dict( + enabled=False, + crontab=self.crontabs["every_15m"], + description="Send mail scheduled to go out at certain times" + ), + ) + + PeriodicTask.objects.get_or_create( + name="Partial sync with RFC Editor index", + task="ietf.sync.tasks.rfc_editor_index_update_task", + kwargs=json.dumps(dict(full_index=False)), + defaults=dict( + enabled=False, + crontab=self.crontabs["every_15m_except_midnight"], # don't collide with full sync + description=( + "Reparse the last _year_ of RFC index entries until " + "https://github.com/ietf-tools/datatracker/issues/3734 is addressed. " + "This takes about 20s on production as of 2022-08-11." + ) + ), + ) + + PeriodicTask.objects.get_or_create( + name="Full sync with RFC Editor index", + task="ietf.sync.tasks.rfc_editor_index_update_task", + kwargs=json.dumps(dict(full_index=True)), + defaults=dict( + enabled=False, + crontab=self.crontabs["daily"], + description=( + "Run an extended version of the rfc editor update to catch changes with backdated timestamps" + ), + ), + ) + + PeriodicTask.objects.get_or_create( + name="Fetch meeting attendance", + task="ietf.stats.tasks.fetch_meeting_attendance_task", + defaults=dict( + enabled=False, + crontab=self.crontabs["daily"], + description="Fetch meeting attendance data from ietf.org/registration/attendees", + ), + ) + + PeriodicTask.objects.get_or_create( + name="Send review reminders", + task="ietf.review.tasks.send_review_reminders_task", + defaults=dict( + enabled=False, + crontab=self.crontabs["daily"], + description="Send reminders originating from the review app", + ), + ) + + PeriodicTask.objects.get_or_create( + name="Expire I-Ds", + task="ietf.doc.tasks.expire_ids_task", + defaults=dict( + enabled=False, + crontab=self.crontabs["daily"], + description="Create expiration notices for expired I-Ds", + ), + ) + + PeriodicTask.objects.get_or_create( + name="Expire Last Calls", + task="ietf.doc.tasks.expire_last_calls_task", + defaults=dict( + enabled=False, + crontab=self.crontabs["daily"], + description="Move docs whose last call has expired to their next states", + ), + ) + + PeriodicTask.objects.get_or_create( + name="Sync with IANA changes", + task="ietf.sync.tasks.iana_changes_update_task", + defaults=dict( + enabled=False, + crontab=self.crontabs["hourly"], + description="Fetch change list from IANA and apply to documents", + ), + ) + + PeriodicTask.objects.get_or_create( + name="Sync with IANA protocols page", + task="ietf.sync.tasks.iana_protocols_update_task", + defaults=dict( + enabled=False, + crontab=self.crontabs["hourly"], + description="Fetch protocols page from IANA and update document event logs", + ), + ) + + PeriodicTask.objects.get_or_create( + name="Update I-D index files", + task="ietf.idindex.tasks.idindex_update_task", + defaults=dict( + enabled=False, + crontab=self.crontabs["hourly"], + description="Update I-D index files", + ), + ) + + PeriodicTask.objects.get_or_create( + name="Send expiration notifications", + task="ietf.doc.tasks.notify_expirations_task", + defaults=dict( + enabled=False, + crontab=self.crontabs["weekly"], + description="Send notifications about I-Ds that will expire in the next 14 days", + ) + ) + + PeriodicTask.objects.get_or_create( + name="Generate idnits2 rfcs-obsoleted blob", + task="ietf.doc.tasks.generate_idnits2_rfcs_obsoleted_task", + defaults=dict( + enabled=False, + crontab=self.crontabs["hourly"], + description="Generate the rfcs-obsoleted file used by idnits", + ), + ) + + PeriodicTask.objects.get_or_create( + name="Generate idnits2 rfc-status blob", + task="ietf.doc.tasks.generate_idnits2_rfc_status_task", + defaults=dict( + enabled=False, + crontab=self.crontabs["hourly"], + description="Generate the rfc_status blob used by idnits", + ), + ) + + PeriodicTask.objects.get_or_create( + name="Send NomCom reminders", + task="ietf.nomcom.tasks.send_nomcom_reminders_task", + defaults=dict( + enabled=False, + crontab=self.crontabs["daily"], + description="Send acceptance and questionnaire reminders to nominees", + ), + ) + + PeriodicTask.objects.get_or_create( + name="Generate WG charter files", + task="ietf.group.tasks.generate_wg_charters_files_task", + defaults=dict( + enabled=False, + crontab=self.crontabs["hourly"], + description="Update 1wg-charters.txt and 1wg-charters-by-acronym.txt", + ), + ) + + PeriodicTask.objects.get_or_create( + name="Generate WG summary files", + task="ietf.group.tasks.generate_wg_summary_files_task", + defaults=dict( + enabled=False, + crontab=self.crontabs["hourly"], + description="Update 1wg-summary.txt and 1wg-summary-by-acronym.txt", + ), + ) + + PeriodicTask.objects.get_or_create( + name="Generate I-D bibxml files", + task="ietf.doc.tasks.generate_draft_bibxml_files_task", + defaults=dict( + enabled=False, + crontab=self.crontabs["hourly"], + description="Generate draft bibxml files for the last week's drafts", + ), + ) + + PeriodicTask.objects.get_or_create( + name="Send personal API key usage emails", + task="ietf.person.tasks.send_apikey_usage_emails_task", + kwargs=json.dumps(dict(days=7)), + defaults=dict( + enabled=False, + crontab=self.crontabs["weekly"], + description="Send personal API key usage summary emails for the past week", + ), + ) + + PeriodicTask.objects.get_or_create( + name="Purge old personal API key events", + task="ietf.person.tasks.purge_personal_api_key_events_task", + kwargs=json.dumps(dict(keep_days=14)), + defaults=dict( + enabled=False, + crontab=self.crontabs["daily"], + description="Purge PersonApiKeyEvent instances older than 14 days", + ), + ) + + PeriodicTask.objects.get_or_create( + name="Run Yang model checks", + task="ietf.submit.tasks.run_yang_model_checks_task", + defaults=dict( + enabled=False, + crontab=self.crontabs["daily"], + description="Re-run Yang model checks on all active drafts", + ), + ) + + def show_tasks(self): + for label, crontab in self.crontabs.items(): + tasks = PeriodicTask.objects.filter(crontab=crontab).order_by( + "task", "name" + ) + self.stdout.write(f"\n{label} ({crontab.human_readable})\n") + if tasks: + for task in tasks: + desc = f" {task.id:-3d}: {task.task} - {task.name}" + if task.enabled: + self.stdout.write(desc) + else: + self.stdout.write(self.style.NOTICE(f"{desc} - disabled")) + else: + self.stdout.write(" Nothing scheduled") + + def enable_tasks(self, pks): + PeriodicTask.objects.filter( + crontab__in=self.crontabs.values(), pk__in=pks + ).update(enabled=True) + + def disable_tasks(self, pks): + PeriodicTask.objects.filter( + crontab__in=self.crontabs.values(), pk__in=pks + ).update(enabled=False) diff --git a/ietf/utils/management/commands/populate_yang_model_dirs.py b/ietf/utils/management/commands/populate_yang_model_dirs.py deleted file mode 100644 index 864dfafb72..0000000000 --- a/ietf/utils/management/commands/populate_yang_model_dirs.py +++ /dev/null @@ -1,172 +0,0 @@ -# Copyright The IETF Trust 2016-2020, All Rights Reserved -# -*- coding: utf-8 -*- - - -import io -import os -import sys -import time - -from pathlib import Path -from textwrap import dedent -from xym import xym - -from django.conf import settings -from django.core.management.base import BaseCommand - -import debug # pyflakes:ignore - -class Command(BaseCommand): - """ - Populate the yang module repositories from drafts and RFCs. - - Extracts yang models from RFCs (found in settings.RFC_PATH and places - them in settings.SUBMIT_YANG_RFC_MODEL_DIR, and from active drafts, placed in - settings.SUBMIT_YANG_DRAFT_MODEL_DIR. - - """ - - help = dedent(__doc__).strip() - - def add_arguments(self, parser): - parser.add_argument('--clean', - action='store_true', dest='clean', default=False, - help='Remove the current directory content before writing new models.') - - - def handle(self, *filenames, **options): - """ - - * All yang modules from published RFCs should be extracted and be - available in an rfc-yang repository. - - * All valid yang modules from active, not replaced, Internet-Drafts - should be extracted and be available in a draft-valid-yang repository. - - * All, valid and invalid, yang modules from active, not replaced, - Internet-Drafts should be available in a draft-all-yang repository. - (Actually, given precedence ordering, it would be enough to place - non-validating modules in a draft-invalid-yang repository instead). - - * In all cases, example modules should be excluded. - - * Precedence is established by the search order of the repository as - provided to pyang. - - * As drafts expire, models should be removed in order to catch cases - where a module being worked on depends on one which has slipped out - of the work queue. - - """ - - verbosity = int(options.get('verbosity')) - - def extract_from(file, dir, strict=True): - saved_stdout = sys.stdout - saved_stderr = sys.stderr - xymerr = io.StringIO() - xymout = io.StringIO() - sys.stderr = xymerr - sys.stdout = xymout - model_list = [] - try: - model_list = xym.xym(str(file), str(file.parent), str(dir), strict=strict, debug_level=verbosity-2) - for name in model_list: - modfile = moddir / name - mtime = file.stat().st_mtime - os.utime(str(modfile), (mtime, mtime)) - if '"' in name: - name = name.replace('"', '') - modfile.rename(str(moddir/name)) - model_list = [ n.replace('"','') for n in model_list ] - except Exception as e: - self.stderr.write("** Error when extracting from %s: %s" % (file, str(e))) - finally: - sys.stdout = saved_stdout - sys.stderr = saved_stderr - # - if verbosity > 1: - outmsg = xymout.getvalue() - if outmsg.strip(): - self.stdout.write(outmsg) - if verbosity>2: - errmsg = xymerr.getvalue() - if errmsg.strip(): - self.stderr.write(errmsg) - return model_list - - # Extract from new RFCs - - rfcdir = Path(settings.RFC_PATH) - - moddir = Path(settings.SUBMIT_YANG_RFC_MODEL_DIR) - if not moddir.exists(): - moddir.mkdir(parents=True) - - latest = 0 - for item in moddir.iterdir(): - if item.stat().st_mtime > latest: - latest = item.stat().st_mtime - - if verbosity > 0: - self.stdout.write("Extracting to %s ..." % moddir) - for item in rfcdir.iterdir(): - if item.is_file() and item.name.startswith('rfc') and item.name.endswith('.txt') and item.name[3:-4].isdigit(): - if item.stat().st_mtime > latest: - model_list = extract_from(item, moddir) - for name in model_list: - if name.startswith('ietf') or name.startswith('iana'): - if verbosity > 1: - self.stdout.write(" Extracted from %s: %s" % (item, name)) - elif verbosity > 0: - self.stdout.write('.', ending='') - self.stdout.flush() - else: - modfile = moddir / name - modfile.unlink() - if verbosity > 1: - self.stdout.write(" Skipped module from %s: %s" % (item, name)) - if verbosity > 0: - self.stdout.write("") - - # Extract valid modules from drafts - - six_months_ago = time.time() - 6*31*24*60*60 - def active(item): - return item.stat().st_mtime > six_months_ago - - draftdir = Path(settings.INTERNET_DRAFT_PATH) - - moddir = Path(settings.SUBMIT_YANG_DRAFT_MODEL_DIR) - if not moddir.exists(): - moddir.mkdir(parents=True) - if verbosity > 0: - self.stdout.write("Emptying %s ..." % moddir) - for item in moddir.iterdir(): - item.unlink() - - if verbosity > 0: - self.stdout.write("Extracting to %s ..." % moddir) - for item in draftdir.iterdir(): - try: - if item.is_file() and item.name.startswith('draft') and item.name.endswith('.txt') and active(item): - model_list = extract_from(item, moddir, strict=False) - for name in model_list: - if not name.startswith('example'): - if verbosity > 1: - self.stdout.write(" Extracted module from %s: %s" % (item, name)) - elif verbosity > 0: - self.stdout.write('.', ending='') - self.stdout.flush() - else: - modfile = moddir / name - modfile.unlink() - if verbosity > 1: - self.stdout.write(" Skipped module from %s: %s" % (item, name)) - except UnicodeDecodeError as e: - self.stderr.write('\nError: %s' % (e, )) - self.stderr.write(item.name) - self.stderr.write('') - if verbosity > 0: - self.stdout.write('') - diff --git a/ietf/utils/management/commands/run_yang_model_checks.py b/ietf/utils/management/commands/run_yang_model_checks.py deleted file mode 100644 index 13fb61c46b..0000000000 --- a/ietf/utils/management/commands/run_yang_model_checks.py +++ /dev/null @@ -1,85 +0,0 @@ -# Copyright The IETF Trust 2017-2020, All Rights Reserved -# -*- coding: utf-8 -*- - - -import json - -from textwrap import dedent - -from django.core.management.base import BaseCommand - -import debug # pyflakes:ignore - -from ietf.doc.models import Document, State, DocAlias -from ietf.submit.models import Submission -from ietf.submit.checkers import DraftYangChecker - - -class Command(BaseCommand): - """ - Run yang model checks on active drafts. - - Repeats the yang checks in ietf/submit/checkers.py for active drafts, in - order to catch changes in status due to new modules becoming available in - the module directories. - - """ - - help = dedent(__doc__).strip() - - def add_arguments(self, parser): - parser.add_argument('draftnames', nargs="*", help="drafts to check, or none to check all active yang drafts") - parser.add_argument('--clean', - action='store_true', dest='clean', default=False, - help='Remove the current directory content before writing new models.') - - - def check_yang(self, checker, draft, force=False): - if self.verbosity > 1: - self.stdout.write("Checking %s-%s" % (draft.name, draft.rev)) - elif self.verbosity > 0: - self.stderr.write('.', ending='') - submission = Submission.objects.filter(name=draft.name, rev=draft.rev).order_by('-id').first() - if submission or force: - check = submission.checks.filter(checker=checker.name).order_by('-id').first() - if check or force: - result = checker.check_file_txt(draft.get_file_name()) - passed, message, errors, warnings, items = result - if self.verbosity > 2: - self.stdout.write(" Errors: %s\n" - " Warnings: %s\n" - " Message:\n%s\n" % (errors, warnings, message)) - items = json.loads(json.dumps(items)) - new_res = (passed, errors, warnings, message) - old_res = (check.passed, check.errors, check.warnings, check.message) if check else () - if new_res != old_res: - if self.verbosity > 1: - self.stdout.write(" Saving new yang checker results for %s-%s" % (draft.name, draft.rev)) - qs = submission.checks.filter(checker=checker.name).order_by('time') - submission.checks.filter(checker=checker.name).exclude(pk=qs.first().pk).delete() - submission.checks.create(submission=submission, checker=checker.name, passed=passed, - message=message, errors=errors, warnings=warnings, items=items, - symbol=checker.symbol) - else: - self.stderr.write("Error: did not find any submission object for %s-%s" % (draft.name, draft.rev)) - - def handle(self, *filenames, **options): - """ - """ - - self.verbosity = int(options.get('verbosity')) - drafts = options.get('draftnames') - - active_state = State.objects.get(type="draft", slug="active") - - checker = DraftYangChecker() - if drafts: - for name in drafts: - parts = name.rsplit('-',1) - if len(parts)==2 and len(parts[1])==2 and parts[1].isdigit(): - name = parts[0] - draft = DocAlias.objects.get(name=name).document - self.check_yang(checker, draft, force=True) - else: - for draft in Document.objects.filter(states=active_state, type_id='draft'): - self.check_yang(checker, draft) diff --git a/ietf/utils/management/commands/showloggers.py b/ietf/utils/management/commands/showloggers.py index 3de9db0c06..b79da9ce26 100644 --- a/ietf/utils/management/commands/showloggers.py +++ b/ietf/utils/management/commands/showloggers.py @@ -11,18 +11,7 @@ import debug # pyflakes:ignore class Command(BaseCommand): - """ - Display a list or tree representation of python loggers. - - Add a UTILS_LOGGER_LEVELS setting in settings_local.py to configure - non-default logging levels for any registered logger, for instance: - - UTILS_LOGGER_LEVELS = { - 'oicd_provider': 'DEBUG', - 'urllib3.connection': 'DEBUG', - } - - """ + """Display a list or tree representation of python loggers""" help = dedent(__doc__).strip() diff --git a/ietf/utils/management/commands/update_community_list_index.py b/ietf/utils/management/commands/update_community_list_index.py index 609577763e..7f4951fd6e 100644 --- a/ietf/utils/management/commands/update_community_list_index.py +++ b/ietf/utils/management/commands/update_community_list_index.py @@ -32,7 +32,7 @@ def handle(self, *args, **options): person = rule.person if not person and not group: try: - person = rule.community_list.user.person + person = rule.community_list.person except: pass name = ((group and group.acronym) or (person and person.email_address())) or '?' diff --git a/ietf/utils/management/commands/update_external_command_info.py b/ietf/utils/management/commands/update_external_command_info.py deleted file mode 100644 index e9e24f000d..0000000000 --- a/ietf/utils/management/commands/update_external_command_info.py +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright The IETF Trust 2017-2020, All Rights Reserved -# -*- coding: utf-8 -*- - - -import sys - -from textwrap import dedent - -from django.core.management.base import BaseCommand - -import debug # pyflakes:ignore - -from ietf.utils.models import VersionInfo -from ietf.utils.pipe import pipe - -class Command(BaseCommand): - """ - Update the version information for external commands used by the datatracker. - - Iterates through the entries in the VersionInfo table, runs the relevant - command, and updates the version string with the result. - - """ - - help = dedent(__doc__).strip() - - def handle(self, *filenames, **options): - for c in VersionInfo.objects.filter(used=True): - cmd = "%s %s" % (c.command, c.switch) - code, out, err = pipe(cmd) - out = out.decode('utf-8') - err = err.decode('utf-8') - if code != 0: - sys.stderr.write("Command '%s' returned %s: \n%s\n%s\n" % (cmd, code, out, err)) - else: - c.version = (out.strip()+'\n'+err.strip()).strip() - if options.get('verbosity', 1) > 1: - sys.stdout.write( - "Command: %s\n" - " Version: %s\n" % (cmd, c.version)) - c.save() diff --git a/ietf/utils/management/tests.py b/ietf/utils/management/tests.py index e94c39354f..38be464c7f 100644 --- a/ietf/utils/management/tests.py +++ b/ietf/utils/management/tests.py @@ -1,7 +1,7 @@ # Copyright The IETF Trust 2013-2020, All Rights Reserved # -*- coding: utf-8 -*- -import mock +from unittest import mock from django.core.management import call_command, CommandError from django.test import override_settings @@ -12,7 +12,7 @@ from ietf.utils.test_utils import TestCase -@mock.patch.object(EmailOnFailureCommand, 'handle') +@mock.patch.object(EmailOnFailureCommand, 'handle', return_value=None) class EmailOnFailureCommandTests(TestCase): def test_calls_handle(self, handle_method): call_command(EmailOnFailureCommand()) diff --git a/ietf/utils/markdown.py b/ietf/utils/markdown.py index 63d1c7a70f..0b522685b2 100644 --- a/ietf/utils/markdown.py +++ b/ietf/utils/markdown.py @@ -12,13 +12,15 @@ from django.utils.safestring import mark_safe from ietf.doc.templatetags.ietf_filters import urlize_ietf_docs -from ietf.utils.text import bleach_cleaner, bleach_linker +from .html import clean_html, liberal_clean_html +from .text import linkify + class LinkifyExtension(Extension): """ Simple Markdown extension inspired by https://github.com/daGrevis/mdx_linkify, - but using our bleach_linker directly. Doing the linkification on the converted + but using our own linker directly. Doing the linkification on the converted Markdown output introduces artifacts. """ @@ -31,12 +33,28 @@ def extendMarkdown(self, md): class LinkifyPostprocessor(Postprocessor): def run(self, text): - return urlize_ietf_docs(bleach_linker.linkify(text)) + return urlize_ietf_docs(linkify(text)) def markdown(text): return mark_safe( - bleach_cleaner.clean( + clean_html( + python_markdown.markdown( + text, + extensions=[ + "extra", + "nl2br", + "sane_lists", + "toc", + LinkifyExtension(), + ], + ) + ) + ) + +def liberal_markdown(text): + return mark_safe( + liberal_clean_html( python_markdown.markdown( text, extensions=[ diff --git a/ietf/utils/meetecho.py b/ietf/utils/meetecho.py index 26ae93f03d..943f3789ef 100644 --- a/ietf/utils/meetecho.py +++ b/ietf/utils/meetecho.py @@ -1,9 +1,10 @@ -# Copyright The IETF Trust 2021, All Rights Reserved +# Copyright The IETF Trust 2021-2024, All Rights Reserved # """Meetecho interim meeting scheduling API Implements the v1 API described in email from alex@meetecho.com -on 2021-12-09. +on 2021-12-09, plus additional slide management API discussed via +IM in 2024 Feb. API methods return Python objects equivalent to the JSON structures specified in the API documentation. Times and durations are represented @@ -13,29 +14,36 @@ import debug # pyflakes: ignore -from datetime import datetime, timedelta +import datetime from json import JSONDecodeError -from pytz import utc -from typing import Dict, Sequence, Union +from pprint import pformat +from typing import Sequence, TypedDict, TYPE_CHECKING, Union from urllib.parse import urljoin +# Guard against hypothetical cyclical import problems +if TYPE_CHECKING: + from ietf.doc.models import Document + from ietf.meeting.models import Session + class MeetechoAPI: - timezone = utc + timezone = datetime.UTC - def __init__(self, api_base: str, client_id: str, client_secret: str, request_timeout=3.01): + def __init__( + self, api_base: str, client_id: str, client_secret: str, request_timeout=3.01 + ): self.client_id = client_id self.client_secret = client_secret self.request_timeout = request_timeout # python-requests doc recommend slightly > a multiple of 3 seconds self._session = requests.Session() # if needed, add a trailing slash so urljoin won't eat the trailing path component - self.api_base = api_base if api_base.endswith('/') else f'{api_base}/' + self.api_base = api_base if api_base.endswith("/") else f"{api_base}/" def _request(self, method, url, api_token=None, json=None): """Execute an API request""" - headers = {'Accept': 'application/json'} + headers = {"Accept": "application/json"} if api_token is not None: - headers['Authorization'] = f'bearer {api_token}' + headers["Authorization"] = f"bearer {api_token}" try: response = self._session.request( @@ -47,28 +55,31 @@ def _request(self, method, url, api_token=None, json=None): ) except requests.RequestException as err: raise MeetechoAPIError(str(err)) from err - if response.status_code != 200: - raise MeetechoAPIError(f'API request failed (HTTP status code = {response.status_code})') + if response.status_code not in (200, 202): + # Could be more selective about status codes, but not seeing an immediate need + raise MeetechoAPIError( + f"API request failed (HTTP status code = {response.status_code})" + ) # try parsing the result as JSON in case the server failed to set the Content-Type header try: return response.json() except JSONDecodeError as err: - if response.headers['Content-Type'].startswith('application/json'): + if response.headers.get("Content-Type", "").startswith("application/json"): # complain if server told us to expect JSON and it was invalid - raise MeetechoAPIError('Error decoding response as JSON') from err + raise MeetechoAPIError("Error decoding response as JSON") from err return None - def _deserialize_time(self, s: str) -> datetime: - return self.timezone.localize(datetime.strptime(s, '%Y-%m-%d %H:%M:%S')) + def _deserialize_time(self, s: str) -> datetime.datetime: + return datetime.datetime.strptime(s, "%Y-%m-%d %H:%M:%S").replace(tzinfo=self.timezone) - def _serialize_time(self, dt: datetime) -> str: - return dt.astimezone(self.timezone).strftime('%Y-%m-%d %H:%M:%S') + def _serialize_time(self, dt: datetime.datetime) -> str: + return dt.astimezone(self.timezone).strftime("%Y-%m-%d %H:%M:%S") - def _deserialize_duration(self, minutes: int) -> timedelta: - return timedelta(minutes=minutes) + def _deserialize_duration(self, minutes: int) -> datetime.timedelta: + return datetime.timedelta(minutes=minutes) - def _serialize_duration(self, td: timedelta) -> int: + def _serialize_duration(self, td: datetime.timedelta) -> int: return int(td.total_seconds() // 60) def _deserialize_meetings_response(self, response): @@ -76,30 +87,42 @@ def _deserialize_meetings_response(self, response): Deserializes data in the structure where needed (currently, that's time-related structures) """ - for session_data in response['rooms'].values(): - session_data['room']['start_time'] = self._deserialize_time(session_data['room']['start_time']) - session_data['room']['duration'] = self._deserialize_duration(session_data['room']['duration']) + for session_data in response["rooms"].values(): + session_data["room"]["start_time"] = self._deserialize_time( + session_data["room"]["start_time"] + ) + session_data["room"]["duration"] = self._deserialize_duration( + session_data["room"]["duration"] + ) return response def retrieve_wg_tokens(self, acronyms: Union[str, Sequence[str]]): """Retrieve API tokens for one or more WGs - :param acronyms: list of WG acronyms for which tokens are requested + :param acronyms: list of WG acronyms for which tokens are requested :return: {'tokens': {acronym0: token0, acronym1: token1, ...}} """ return self._request( - 'POST', 'auth/ietfservice/tokens', + "POST", + "auth/ietfservice/tokens", json={ - 'client': self.client_id, - 'secret': self.client_secret, - 'wgs': [acronyms] if isinstance(acronyms, str) else acronyms, - } + "client": self.client_id, + "secret": self.client_secret, + "wgs": [acronyms] if isinstance(acronyms, str) else acronyms, + }, ) - def schedule_meeting(self, wg_token: str, description: str, start_time: datetime, duration: timedelta, - extrainfo=''): + def schedule_meeting( + self, + wg_token: str, + room_id: int, + description: str, + start_time: datetime.datetime, + duration: datetime.timedelta, + extrainfo="", + ): """Schedule a meeting session - + Return structure is: { "rooms": { @@ -115,8 +138,9 @@ def schedule_meeting(self, wg_token: str, description: str, start_time: datetime } } } - - :param wg_token: token retrieved via retrieve_wg_tokens() + + :param wg_token: token retrieved via retrieve_wg_tokens() + :param room_id: int id to identify the room (will be echoed as room.id) :param description: str describing the meeting :param start_time: starting time as a datetime :param duration: duration as a timedelta @@ -125,13 +149,15 @@ def schedule_meeting(self, wg_token: str, description: str, start_time: datetime """ return self._deserialize_meetings_response( self._request( - 'POST', 'meeting/interim/createRoom', + "POST", + "meeting/interim/createRoom", api_token=wg_token, json={ - 'description': description, - 'start_time': self._serialize_time(start_time), - 'duration': self._serialize_duration(duration), - 'extrainfo': extrainfo, + "room_id": room_id, + "description": description, + "start_time": self._serialize_time(start_time), + "duration": self._serialize_duration(duration), + "extrainfo": extrainfo, }, ) ) @@ -154,7 +180,7 @@ def fetch_meetings(self, wg_token: str): } } } - + As of 2022-01-31, the return structure also includes a 'group' key whose value is the group acronym. This is not shown in the documentation. @@ -162,7 +188,7 @@ def fetch_meetings(self, wg_token: str): :return: meeting data dict """ return self._deserialize_meetings_response( - self._request('GET', 'meeting/interim/fetchRooms', api_token=wg_token) + self._request("GET", "meeting/interim/fetchRooms", api_token=wg_token) ) def delete_meeting(self, deletion_token: str): @@ -171,7 +197,166 @@ def delete_meeting(self, deletion_token: str): :param deletion_token: deletion_key from fetch_meetings() or schedule_meeting() return data :return: {} """ - return self._request('POST', 'meeting/interim/deleteRoom', api_token=deletion_token) + return self._request( + "POST", "meeting/interim/deleteRoom", api_token=deletion_token + ) + + class SlideDeckDict(TypedDict): + id: int + title: str + url: str + rev: str + order: int + + def add_slide_deck( + self, + wg_token: str, + session: str, # unique identifier + deck: SlideDeckDict, + ): + """Add a slide deck for the specified session + + API spec: + ⠀POST /materials + + Authentication -> same as interim scheduler + + content application/json + + body + { + "session": String, // Unique session identifier + "title": String, + "id": Number, + "url": String, + "rev": String, + "order": Number + } + + + Results + 202 Accepted + {4xx} + """ + self._request( + "POST", + "materials", + api_token=wg_token, + json={ + "session": session, + "title": deck["title"], + "id": deck["id"], + "url": deck["url"], + "rev": deck["rev"], + "order": deck["order"], + }, + ) + + def delete_slide_deck( + self, + wg_token: str, + session: str, # unique identifier + id: int, + ): + """Delete a slide deck from the specified session + + API spec: + DELETE /materials + + Authentication -> same as interim scheduler + + content application/json + + body + { + "session": String, + "id": Number + } + + + Results + 202 Accepted + {4xx} + """ + self._request( + "DELETE", + "materials", + api_token=wg_token, + json={ + "session": session, + "id": id, + }, + ) + + def update_slide_decks( + self, + wg_token: str, + session: str, # unique id + decks: list[SlideDeckDict], + ): + """Update/reorder decks for specified session + + PUT /materials + + Authentication -> same as interim scheduler + + content application/json + + body + { + "session": String, + "decks": [ + { + "id": Number, + "title": String, + "url": String, + "rev": String, + "order": Number + }, + { + "id": Number, + "title": String, + "url": String, + "rev": String, + "order": Number + }, + ... + ] + } + + + Results + 202 Accepted + """ + self._request( + "PUT", + "materials", + api_token=wg_token, + json={ + "session": session, + "decks": decks, + } + ) + + +class DebugMeetechoAPI(MeetechoAPI): + """Meetecho API stand-in that writes to stdout instead of making requests""" + def _request(self, method, url, api_token=None, json=None): + json_lines = pformat(json, width=60).split("\n") + debug.say( + "\n" + + "\n".join( + [ + f">> MeetechoAPI: request(method={method},", + f">> MeetechoAPI: url={url},", + f">> MeetechoAPI: api_token={api_token},", + ">> MeetechoAPI: json=" + json_lines[0], + ( + ">> MeetechoAPI: " + + "\n>> MeetechoAPI: ".join(l for l in json_lines[1:]) + ), + ">> MeetechoAPI: )" + ] + ) + ) + + def retrieve_wg_tokens(self, acronyms: Union[str, Sequence[str]]): + super().retrieve_wg_tokens(acronyms) # so that we capture the outgoing request + acronyms = [acronyms] if isinstance(acronyms, str) else acronyms + return { + "tokens": { + acro: f"{acro}-token" + for acro in acronyms + } + } class MeetechoAPIError(Exception): @@ -180,7 +365,18 @@ class MeetechoAPIError(Exception): class Conference: """Scheduled session/room representation""" - def __init__(self, manager, id, public_id, description, start_time, duration, url, deletion_token): + + def __init__( + self, + manager, + id, + public_id, + description, + start_time, + duration, + url, + deletion_token, + ): self._manager = manager self.id = id # Meetecho system ID self.public_id = public_id # public session UUID @@ -195,22 +391,23 @@ def from_api_dict(cls, manager, api_dict): # Returns a list of Conferences return [ cls( - **val['room'], + **val["room"], public_id=public_id, - url=val['url'], - deletion_token=val['deletion_token'], + url=val["url"], + deletion_token=val["deletion_token"], manager=manager, - ) for public_id, val in api_dict.items() + ) + for public_id, val in api_dict.items() ] def __str__(self): - return f'Meetecho conference {self.description}' + return f"Meetecho conference {self.description}" def __repr__(self): props = [ f'description="{self.description}"', - f'start_time={repr(self.start_time)}', - f'duration={repr(self.duration)}', + f"start_time={repr(self.start_time)}", + f"duration={repr(self.duration)}", ] return f'Conference({", ".join(props)})' @@ -218,8 +415,13 @@ def __eq__(self, other): return isinstance(other, type(self)) and all( getattr(self, attr) == getattr(other, attr) for attr in [ - 'id', 'public_id', 'description', 'start_time', - 'duration', 'url', 'deletion_token' + "id", + "public_id", + "description", + "start_time", + "duration", + "url", + "deletion_token", ] ) @@ -227,37 +429,182 @@ def delete(self): self._manager.delete_conference(self) -class ConferenceManager: - def __init__(self, api_config: dict): - self.api = MeetechoAPI(**api_config) - self.wg_tokens: Dict[str, str] = {} - +class Manager: + def __init__(self, api_config): + api_kwargs = dict( + api_base=api_config["api_base"], + client_id=api_config["client_id"], + client_secret=api_config["client_secret"], + ) + if "request_timeout" in api_config: + api_kwargs["request_timeout"] = api_config["request_timeout"] + if api_config.get("debug", False): + self.api = DebugMeetechoAPI(**api_kwargs) + else: + self.api = MeetechoAPI(**api_kwargs) + self.wg_tokens = {} + def wg_token(self, group): - group_acronym = group.acronym if hasattr(group, 'acronym') else group + group_acronym = group.acronym if hasattr(group, "acronym") else group if group_acronym not in self.wg_tokens: - self.wg_tokens[group_acronym] = self.api.retrieve_wg_tokens( - group_acronym - )['tokens'][group_acronym] + self.wg_tokens[group_acronym] = self.api.retrieve_wg_tokens(group_acronym)[ + "tokens" + ][group_acronym] return self.wg_tokens[group_acronym] + +class ConferenceManager(Manager): def fetch(self, group): response = self.api.fetch_meetings(self.wg_token(group)) - return Conference.from_api_dict(self, response['rooms']) + return Conference.from_api_dict(self, response["rooms"]) - def create(self, group, description, start_time, duration, extrainfo=''): + def create(self, group, session_id, description, start_time, duration, extrainfo=""): response = self.api.schedule_meeting( wg_token=self.wg_token(group), + room_id=int(session_id), description=description, start_time=start_time, duration=duration, extrainfo=extrainfo, ) - return Conference.from_api_dict(self, response['rooms']) - + return Conference.from_api_dict(self, response["rooms"]) + def delete_by_url(self, group, url): for conf in self.fetch(group): if conf.url == url: self.api.delete_meeting(conf.deletion_token) def delete_conference(self, conf: Conference): - self.api.delete_meeting(conf.deletion_token) \ No newline at end of file + self.api.delete_meeting(conf.deletion_token) + + +class SlidesManager(Manager): + """Interface between Datatracker models and Meetecho API + + Note: the URL sent for a slide deck comes from DocumentInfo.get_href() and includes the revision + of the slides being sent. Be sure that 1) the URL matches what api_get_session_materials() returns + for the slides; and 2) the URL is valid if it is fetched immediately - possibly even before the call + to SlidesManager.add() or send_update() returns. + """ + + def __init__(self, api_config): + super().__init__(api_config) + slides_notify_time = api_config.get("slides_notify_time", 15) + if slides_notify_time is None: + self.slides_notify_time = None + else: + self.slides_notify_time = datetime.timedelta(minutes=slides_notify_time) + + def _should_send_update(self, session): + if self.slides_notify_time is None: + return False + timeslot = session.official_timeslotassignment().timeslot + if timeslot is None: + return False + if self.slides_notify_time < datetime.timedelta(0): + return True # < 0 means "always" for a scheduled session + else: + now = datetime.datetime.now(tz=datetime.UTC) + return (timeslot.time - self.slides_notify_time) < now < (timeslot.end_time() + self.slides_notify_time) + + def add(self, session: "Session", slides: "Document", order: int): + """Add a slide deck to the session + + Returns True if the update was sent, False if it was not sent because the + current time is outside the update window for the session. + """ + if not self._should_send_update(session): + return False + + # Would like to confirm that session.presentations includes the slides Document, but we can't + # (same problem regarding unsaved Documents discussed in the docstring) + self.api.add_slide_deck( + wg_token=self.wg_token(session.group), + session=str(session.pk), + deck={ + "id": slides.pk, + "title": slides.title, + "url": slides.get_href(), + "rev": slides.rev, + "order": order, + } + ) + return True + + def delete(self, session: "Session", slides: "Document"): + """Delete a slide deck from the session + + Returns True if the update was sent, False if it was not sent because the + current time is outside the update window for the session. + """ + if not self._should_send_update(session): + return False + + if session.presentations.filter(document=slides).exists(): + # "order" problems are very likely to result if we delete slides that are actually still + # linked to the session + raise MeetechoAPIError( + f"Slides {slides.pk} are still linked to session {session.pk}." + ) + # remove, leaving a hole + self.api.delete_slide_deck( + wg_token=self.wg_token(session.group), + session=str(session.pk), + id=slides.pk, + ) + if session.presentations.filter(document__type_id="slides").exists(): + self._send_update(session) # adjust order to fill in the hole + return True + + def revise(self, session: "Session", slides: "Document"): + """Replace existing deck with its current state + + Returns True if the update was sent, False if it was not sent because the + current time is outside the update window for the session. + """ + if not self._should_send_update(session): + return False + + sp = session.presentations.filter(document=slides).first() + if sp is None: + raise MeetechoAPIError(f"Slides {slides.pk} not in session {session.pk}") + order = sp.order + # remove, leaving a hole in the order on Meetecho's side + self.api.delete_slide_deck( + wg_token=self.wg_token(session.group), + session=str(session.pk), + id=slides.pk, + ) + self.add(session, slides, order) # fill in the hole + return True + + def _send_update(self, session: "Session"): + """Notify of the current state of the session's slides (no time window check) + + This is a private helper - use send_update() (no leading underscore) instead. + """ + self.api.update_slide_decks( + wg_token=self.wg_token(session.group), + session=str(session.pk), + decks=[ + { + "id": deck.document.pk, + "title": deck.document.title, + "url": deck.document.get_href(), + "rev": deck.document.rev, + "order": deck.order, + } + for deck in session.presentations.filter(document__type="slides") + ] + ) + + def send_update(self, session: "Session"): + """Notify of the current state of the session's slides + + Returns True if the update was sent, False if it was not sent because the + current time is outside the update window for the session. + """ + if not self._should_send_update(session): + return False + self._send_update(session) + return True diff --git a/ietf/utils/migrations/0002_delete_versioninfo.py b/ietf/utils/migrations/0002_delete_versioninfo.py new file mode 100644 index 0000000000..2835bb017b --- /dev/null +++ b/ietf/utils/migrations/0002_delete_versioninfo.py @@ -0,0 +1,16 @@ +# Generated by Django 4.2.11 on 2024-05-03 21:03 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("utils", "0001_initial"), + ] + + operations = [ + migrations.DeleteModel( + name="VersionInfo", + ), + ] diff --git a/ietf/utils/migrations/0003_dirtybits.py b/ietf/utils/migrations/0003_dirtybits.py new file mode 100644 index 0000000000..11f6ed09f6 --- /dev/null +++ b/ietf/utils/migrations/0003_dirtybits.py @@ -0,0 +1,37 @@ +# Copyright The IETF Trust 2026, All Rights Reserved + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("utils", "0002_delete_versioninfo"), + ] + + operations = [ + migrations.CreateModel( + name="DirtyBits", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "slug", + models.CharField( + choices=[("rfcindex", "RFC Index")], max_length=40, unique=True + ), + ), + ("dirty_time", models.DateTimeField(blank=True, null=True)), + ("processed_time", models.DateTimeField(blank=True, null=True)), + ], + options={ + "verbose_name_plural": "dirty bits", + }, + ), + ] diff --git a/ietf/utils/migrations/0004_alter_dirtybits_slug.py b/ietf/utils/migrations/0004_alter_dirtybits_slug.py new file mode 100644 index 0000000000..e17ea6cadd --- /dev/null +++ b/ietf/utils/migrations/0004_alter_dirtybits_slug.py @@ -0,0 +1,21 @@ +# Copyright The IETF Trust 2026, All Rights Reserved + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("utils", "0003_dirtybits"), + ] + + operations = [ + migrations.AlterField( + model_name="dirtybits", + name="slug", + field=models.CharField( + choices=[("rfcindex", "RFC Index"), ("errata", "Errata Tags")], + max_length=40, + unique=True, + ), + ), + ] diff --git a/ietf/utils/mime.py b/ietf/utils/mime.py index ab21cfe5c6..1f9b75b4df 100644 --- a/ietf/utils/mime.py +++ b/ietf/utils/mime.py @@ -5,6 +5,7 @@ import magic import re + def get_mime_type(content): # try to fixup encoding if hasattr(magic, "open"): @@ -13,15 +14,17 @@ def get_mime_type(content): filetype = m.buffer(content) else: m = magic.Magic() - m.cookie = magic.magic_open(magic.MAGIC_NONE | magic.MAGIC_MIME | magic.MAGIC_MIME_ENCODING) + m.cookie = magic.magic_open( + magic.MAGIC_NONE | magic.MAGIC_MIME | magic.MAGIC_MIME_ENCODING + ) magic.magic_load(m.cookie, None) filetype = m.from_buffer(content) # Work around silliness in libmagic on OpenSUSE 15.1 - filetype = filetype.replace('text/x-Algol68;', 'text/plain;') - if ';' in filetype and 'charset=' in filetype: - mimetype, charset = re.split('; *charset=', filetype) + filetype = filetype.replace("text/x-Algol68;", "text/plain;") + filetype = filetype.replace("application/vnd.hp-HPGL;", "text/plain;") + if ";" in filetype and "charset=" in filetype: + mimetype, charset = re.split("; *charset=", filetype) else: - mimetype = re.split(';', filetype)[0] - charset = 'utf-8' + mimetype = re.split(";", filetype)[0] + charset = "utf-8" return mimetype, charset - diff --git a/ietf/utils/models.py b/ietf/utils/models.py index 0915537fd8..64f7f253f2 100644 --- a/ietf/utils/models.py +++ b/ietf/utils/models.py @@ -1,22 +1,35 @@ -# Copyright The IETF Trust 2015-2020, All Rights Reserved +# Copyright The IETF Trust 2015-2026, All Rights Reserved import itertools from django.db import models + +class DirtyBits(models.Model): + """A weak semaphore mechanism for coordination with celery beat tasks + + Web workers will set the "dirty_time" value for a given dirtybit slug. + Celery workers will do work if "processed_time" < "dirty_time" and update + "processed_time". + """ + + class Slugs(models.TextChoices): + RFCINDEX = "rfcindex", "RFC Index" + ERRATA = "errata", "Errata Tags" + + # next line can become `...choices=Slugs)` when we get to Django 5.x + slug = models.CharField(max_length=40, blank=False, choices=Slugs.choices, unique=True) + dirty_time = models.DateTimeField(null=True, blank=True) + processed_time = models.DateTimeField(null=True, blank=True) + + class Meta: + verbose_name_plural = "dirty bits" + + class DumpInfo(models.Model): date = models.DateTimeField() host = models.CharField(max_length=128) tz = models.CharField(max_length=32, default='UTC') - -class VersionInfo(models.Model): - time = models.DateTimeField(auto_now=True) - command = models.CharField(max_length=32) - switch = models.CharField(max_length=16) - version = models.CharField(max_length=64) - used = models.BooleanField(default=True) - class Meta: - verbose_name_plural = 'VersionInfo' class ForeignKey(models.ForeignKey): "A local ForeignKey proxy which provides the on_delete value required under Django 2.0." diff --git a/ietf/utils/patch.py b/ietf/utils/patch.py index 9de2270ebb..fd3e4a165d 100644 --- a/ietf/utils/patch.py +++ b/ietf/utils/patch.py @@ -87,8 +87,7 @@ def createLock(self): debugmode = False def setdebug(): - global debugmode, streamhandler - + global debugmode debugmode = True loglevel = logging.DEBUG logformat = "%(levelname)8s %(message)s" diff --git a/ietf/utils/resources.py b/ietf/utils/resources.py index 6d61c5e2ed..63206eb33a 100644 --- a/ietf/utils/resources.py +++ b/ietf/utils/resources.py @@ -1,6 +1,4 @@ -# Copyright The IETF Trust 2014-2019, All Rights Reserved -# -*- coding: utf-8 -*- -# Autogenerated by the mkresources management command 2014-11-13 05:39 +# Copyright The IETF Trust 2014-2026, All Rights Reserved from ietf.api import ModelResource @@ -12,7 +10,7 @@ from django.contrib.contenttypes.models import ContentType from ietf import api -from ietf.utils.models import DumpInfo, VersionInfo +from ietf.utils.models import DirtyBits, DumpInfo class UserResource(ModelResource): @@ -45,19 +43,7 @@ class Meta: api.utils.register(DumpInfoResource()) -class VersionInfoResource(ModelResource): +class DirtyBitsResource(ModelResource): class Meta: - queryset = VersionInfo.objects.all() - serializer = api.Serializer() - cache = SimpleCache() - #resource_name = 'versioninfo' - ordering = ['id', ] - filtering = { - "id": ALL, - "time": ALL, - "command": ALL, - "switch": ALL, - "version": ALL, - "used": ALL, - } -api.utils.register(VersionInfoResource()) + queryset = DirtyBits.objects.none() +api.utils.register(DirtyBitsResource()) diff --git a/ietf/utils/searchindex.py b/ietf/utils/searchindex.py new file mode 100644 index 0000000000..87951abb60 --- /dev/null +++ b/ietf/utils/searchindex.py @@ -0,0 +1,372 @@ +# Copyright The IETF Trust 2026, All Rights Reserved +"""Search indexing utilities""" + +import re +from itertools import batched +from math import floor +from typing import Iterable + +import httpx # just for exceptions +import typesense +import typesense.exceptions +from django.conf import settings +from typesense.types.document import DocumentSchema + +from ietf.doc.models import Document, StoredObject +from ietf.doc.storage_utils import retrieve_str +from ietf.utils.log import log + +# Error classes that might succeed just by retrying a failed attempt. +# Must be a tuple for use with isinstance() +RETRYABLE_ERROR_CLASSES = ( + httpx.ConnectError, + httpx.ConnectTimeout, + typesense.exceptions.Timeout, + typesense.exceptions.ServerError, + typesense.exceptions.ServiceUnavailable, +) + + +DEFAULT_SETTINGS = { + "TYPESENSE_API_URL": "", + "TYPESENSE_API_KEY": "", + "TYPESENSE_COLLECTION_NAME": "docs", + "TASK_RETRY_DELAY": 10, + "TASK_MAX_RETRIES": 12, +} + + +def get_settings(): + return DEFAULT_SETTINGS | getattr(settings, "SEARCHINDEX_CONFIG", {}) + + +def enabled(): + _settings = get_settings() + return _settings["TYPESENSE_API_URL"] != "" + + +def get_typesense_client() -> typesense.Client: + _settings = get_settings() + client = typesense.Client( + { + "api_key": _settings["TYPESENSE_API_KEY"], + "nodes": [_settings["TYPESENSE_API_URL"]], + } + ) + return client + + +def get_collection_name() -> str: + _settings = get_settings() + collection_name = _settings["TYPESENSE_COLLECTION_NAME"] + assert isinstance(collection_name, str) + return collection_name + + +def _sanitize_text(content): + """Sanitize content or abstract text for search""" + # REs (with approximate names) + RE_DOT_OR_BANG_SPACE = r"\. |! " # -> " " (space) + RE_COMMENT_OR_TOC_CRUD = r"<--|-->|--+|\+|\.\.+" # -> "" + RE_BRACKETED_REF = r"\[[a-zA-Z0-9 -]+\]" # -> "" + RE_DOTTED_NUMBERS = r"[0-9]+\.[0-9]+(\.[0-9]+)?" # -> "" + RE_MULTIPLE_WHITESPACE = r"\s+" # -> " " (space) + # Replacement values (for clarity of intent) + SPACE = " " + EMPTY = "" + # Sanitizing begins here, order is significant! + content = re.sub(RE_DOT_OR_BANG_SPACE, SPACE, content.strip()) + content = re.sub(RE_COMMENT_OR_TOC_CRUD, EMPTY, content) + content = re.sub(RE_BRACKETED_REF, EMPTY, content) + content = re.sub(RE_DOTTED_NUMBERS, EMPTY, content) + content = re.sub(RE_MULTIPLE_WHITESPACE, SPACE, content) + return content.strip() + + +def typesense_doc_from_rfc(rfc: Document) -> DocumentSchema: + assert rfc.type_id == "rfc" + assert rfc.rfc_number is not None + assert rfc.pages is not None + + keywords: list[str] = rfc.keywords # help type checking + + subseries = rfc.part_of() + if len(subseries) > 1: + log( + f"RFC {rfc.rfc_number} is in multiple subseries. " + f"Indexing as {subseries[0].name}" + ) + subseries = subseries[0] if len(subseries) > 0 else None + obsoleted_by = rfc.related_that("obs") + updated_by = rfc.related_that("updates") + + stored_txt = ( + StoredObject.objects.exclude_deleted() + .filter(store="rfc", doc_name=rfc.name, name__startswith="txt/") + .first() + ) + content = "" + if stored_txt is not None: + # Should be available in the blobdb, but be cautious... + try: + content = retrieve_str(kind=stored_txt.store, name=stored_txt.name) + except Exception as err: + log(f"Unable to retrieve {stored_txt} from storage: {err}") + + ts_document = { + "id": f"doc-{rfc.pk}", + "rfcNumber": rfc.rfc_number, + "rfc": str(rfc.rfc_number), + "filename": rfc.name, + "title": rfc.title, + "abstract": _sanitize_text(rfc.abstract), + "pages": rfc.pages, + "keywords": keywords, + "type": "rfc", + "state": [state.name for state in rfc.states.all()], + "status": {"slug": rfc.std_level.slug, "name": rfc.std_level.name}, + "date": floor(rfc.time.timestamp()), + "publicationDate": floor(rfc.pub_datetime().timestamp()), + "stream": {"slug": rfc.stream.slug, "name": rfc.stream.name}, + "authors": [ + {"name": rfc_author.titlepage_name, "affiliation": rfc_author.affiliation} + for rfc_author in rfc.rfcauthor_set.all() + ], + "flags": { + "hiddenDefault": False, + "obsoleted": len(obsoleted_by) > 0, + "updated": len(updated_by) > 0, + }, + "obsoletedBy": [str(doc.rfc_number) for doc in obsoleted_by], + "updatedBy": [str(doc.rfc_number) for doc in updated_by], + "ranking": rfc.rfc_number, + } + if subseries is not None: + ts_document["subseries"] = { + "acronym": subseries.type.slug, + "number": int(subseries.name[len(subseries.type.slug) :]), + "total": len(subseries.contains()), + } + if rfc.group is not None: + ts_document["group"] = { + "acronym": rfc.group.acronym, + "name": rfc.group.name, + "full": f"{rfc.group.acronym} - {rfc.group.name}", + } + if ( + rfc.group.parent is not None + and rfc.stream_id not in ["ise", "irtf", "iab"] # exclude editorial? + ): + ts_document["area"] = { + "acronym": rfc.group.parent.acronym, + "name": rfc.group.parent.name, + "full": f"{rfc.group.parent.acronym} - {rfc.group.parent.name}", + } + if rfc.ad is not None: + ts_document["adName"] = rfc.ad.name + if content != "": + ts_document["content"] = _sanitize_text(content) + return ts_document + + +def update_or_create_rfc_entry(rfc: Document): + """Update/create index entries for one RFC""" + ts_document = typesense_doc_from_rfc(rfc) + client = get_typesense_client() + client.collections[get_collection_name()].documents.upsert(ts_document) + + +def update_or_create_rfc_entries( + rfcs: Iterable[Document], batchsize: int | None = None +): + """Update/create index entries for RFCs in bulk + + If batchsize is set, computes index data in batches of batchsize and adds to the + index. Will make a total of (len(rfcs) // batchsize) + 1 API calls. + + N.b. that typesense has a server-side batch size that defaults to 40, which should + "almost never be changed from the default." This does not change that. Further, + the python client library's import_ method has a batch_size parameter that does + client-side batching. We don't use that, either. + """ + success_count = 0 + fail_count = 0 + client = get_typesense_client() + batches = [rfcs] if batchsize is None else batched(rfcs, batchsize) + for batch in batches: + tdoc_batch = [typesense_doc_from_rfc(rfc) for rfc in batch] + results = client.collections[get_collection_name()].documents.import_( + tdoc_batch, {"action": "upsert"} + ) + for tdoc, result in zip(tdoc_batch, results): + if result["success"]: + success_count += 1 + else: + fail_count += 1 + log(f"Failed to index RFC {tdoc['rfcNumber']}: {result['error']}") + log(f"Added {success_count} RFCs to the index, failed to add {fail_count}") + + +DOCS_SCHEMA = { + "enable_nested_fields": True, + "default_sorting_field": "ranking", + "fields": [ + # RFC number in integer form, for sorting asc/desc in search results + # Omit field for drafts + { + "name": "rfcNumber", + "type": "int32", + "facet": False, + "optional": True, + "sort": True, + }, + # RFC number in string form, for direct matching with ranking + # Omit field for drafts + {"name": "rfc", "type": "string", "facet": False, "optional": True}, + # For drafts that correspond to an RFC, insert the RFC number + # Omit field for rfcs or if not relevant + {"name": "ref", "type": "string", "facet": False, "optional": True}, + # Filename of the document (without the extension, e.g. "rfc1234" + # or "draft-ietf-abc-def-02") + {"name": "filename", "type": "string", "facet": False, "infix": True}, + # Title of the draft / rfc + {"name": "title", "type": "string", "facet": False}, + # Abstract of the draft / rfc + {"name": "abstract", "type": "string", "facet": False}, + # Number of pages + {"name": "pages", "type": "int32", "facet": False}, + # A list of search keywords if relevant, set to empty array otherwise + {"name": "keywords", "type": "string[]", "facet": True}, + # Type of the document + # Accepted values: "draft" or "rfc" + {"name": "type", "type": "string", "facet": True}, + # State(s) of the document (e.g. "Published", "Adopted by a WG", etc.) + # Use the full name, not the slug + {"name": "state", "type": "string[]", "facet": True, "optional": True}, + # Status (Standard Level Name) + # Object with properties "slug" and "name" + # e.g.: { slug: "std", "name": "Internet Standard" } + {"name": "status", "type": "object", "facet": True, "optional": True}, + # The subseries it is part of. (e.g. "BCP") + # Omit otherwise. + { + "name": "subseries.acronym", + "type": "string", + "facet": True, + "optional": True, + }, + # The subseries number it is part of. (e.g. 123) + # Omit otherwise. + { + "name": "subseries.number", + "type": "int32", + "facet": True, + "sort": True, + "optional": True, + }, + # The total of RFCs in the subseries + # Omit if not part of a subseries + { + "name": "subseries.total", + "type": "int32", + "facet": False, + "sort": False, + "optional": True, + }, + # Date of the document, in unix epoch seconds (can be negative for < 1970) + {"name": "date", "type": "int64", "facet": False}, + # Expiration date of the document, in unix epoch seconds (can be negative + # for < 1970). Omit field for RFCs + {"name": "expires", "type": "int64", "facet": False, "optional": True}, + # Publication date of the RFC, in unix epoch seconds (can be negative + # for < 1970). Omit field for drafts + { + "name": "publicationDate", + "type": "int64", + "facet": True, + "optional": True, + }, + # Working Group + # Object with properties "acronym", "name" and "full" + # e.g.: + # { + # "acronym": "ntp", + # "name": "Network Time Protocols", + # "full": "ntp - Network Time Protocols", + # } + {"name": "group", "type": "object", "facet": True, "optional": True}, + # Area + # Object with properties "acronym", "name" and "full" + # e.g.: + # { + # "acronym": "mpls", + # "name": "Multiprotocol Label Switching", + # "full": "mpls - Multiprotocol Label Switching", + # } + {"name": "area", "type": "object", "facet": True, "optional": True}, + # Stream + # Object with properties "slug" and "name" + # e.g.: { slug: "ietf", "name": "IETF" } + {"name": "stream", "type": "object", "facet": True, "optional": True}, + # List of authors + # Array of objects with properties "name" and "affiliation" + # e.g.: + # [ + # {"name": "John Doe", "affiliation": "ACME Inc."}, + # {"name": "Ada Lovelace", "affiliation": "Babbage Corps."}, + # ] + {"name": "authors", "type": "object[]", "facet": True, "optional": True}, + # Area Director Name (e.g. "Leonardo DaVinci") + {"name": "adName", "type": "string", "facet": True, "optional": True}, + # Whether the document should be hidden by default in search results or not. + {"name": "flags.hiddenDefault", "type": "bool", "facet": True}, + # Whether the document is obsoleted by another document or not. + {"name": "flags.obsoleted", "type": "bool", "facet": True}, + # Whether the document is updated by another document or not. + {"name": "flags.updated", "type": "bool", "facet": True}, + # List of documents that obsolete this document. + # Array of strings. Use RFC number for RFCs. (e.g. ["123", "456"]) + # Omit if none. Must be provided if "flags.obsoleted" is set to True. + { + "name": "obsoletedBy", + "type": "string[]", + "facet": False, + "optional": True, + }, + # List of documents that update this document. + # Array of strings. Use RFC number for RFCs. (e.g. ["123", "456"]) + # Omit if none. Must be provided if "flags.updated" is set to True. + {"name": "updatedBy", "type": "string[]", "facet": False, "optional": True}, + # Sanitized content of the document. + # Make sure to remove newlines, double whitespaces, symbols and tags. + { + "name": "content", + "type": "string", + "facet": False, + "optional": True, + "store": False, + }, + # Ranking value to use when no explicit sorting is used during search + # Set to the RFC number for RFCs and the revision number for drafts + # This ensures newer RFCs get listed first in the default search results + # (without a query) + {"name": "ranking", "type": "int32", "facet": False}, + ], +} + + +def create_collection(): + collection_name = get_collection_name() + log(f"Creating '{collection_name}' collection") + client = get_typesense_client() + client.collections.create({"name": get_collection_name()} | DOCS_SCHEMA) + + +def delete_collection(): + collection_name = get_collection_name() + log(f"Deleting '{collection_name}' collection") + client = get_typesense_client() + try: + client.collections[collection_name].delete() + except typesense.exceptions.ObjectNotFound: + pass diff --git a/ietf/utils/serialize.py b/ietf/utils/serialize.py index 342d211cf5..77f97942cb 100644 --- a/ietf/utils/serialize.py +++ b/ietf/utils/serialize.py @@ -16,7 +16,7 @@ def object_as_shallow_dict(obj): if isinstance(f, models.ManyToManyField): v = list(v.values_list("pk", flat=True)) elif isinstance(f, models.DateTimeField): - v = v.astimezone(datetime.timezone.utc).isoformat() + v = v.astimezone(datetime.UTC).isoformat() elif isinstance(f, models.DateField): v = v.strftime('%Y-%m-%d') diff --git a/ietf/utils/storage.py b/ietf/utils/storage.py index 0aa02cab86..bad5af5178 100644 --- a/ietf/utils/storage.py +++ b/ietf/utils/storage.py @@ -1,8 +1,95 @@ +# Copyright The IETF Trust 2020-2025, All Rights Reserved +"""Django Storage classes""" +import datetime +from hashlib import sha384 +from pathlib import Path +from typing import Optional + +from django.conf import settings +from django.core.files.base import File from django.core.files.storage import FileSystemStorage +from ietf.doc.storage_utils import store_file +from .log import log + class NoLocationMigrationFileSystemStorage(FileSystemStorage): - def deconstruct(obj): # pylint: disable=no-self-argument - path, args, kwargs = FileSystemStorage.deconstruct(obj) - kwargs["location"] = None - return (path, args, kwargs) + def deconstruct(self): + path, args, kwargs = super().deconstruct() + kwargs["location"] = None # don't record location in migrations + return path, args, kwargs + + +class BlobShadowFileSystemStorage(NoLocationMigrationFileSystemStorage): + """FileSystemStorage that shadows writes to the blob store as well + + Strips directories from the filename when naming the blob. + """ + + def __init__( + self, + *, # disallow positional arguments + kind: str, + location=None, + base_url=None, + file_permissions_mode=None, + directory_permissions_mode=None, + ): + self.kind = kind + super().__init__( + location, base_url, file_permissions_mode, directory_permissions_mode + ) + + def save(self, name, content, max_length=None): + # Write content to the filesystem - this deals with chunks, etc... + saved_name = super().save(name, content, max_length) + + if settings.ENABLE_BLOBSTORAGE: + try: + # Retrieve the content and write to the blob store + blob_name = Path(saved_name).name # strips path + with self.open(saved_name, "rb") as f: + store_file(self.kind, blob_name, f, allow_overwrite=True) + except Exception as err: + log(f"Blobstore Error: Failed to shadow {saved_name} at {self.kind}:{blob_name}: {repr(err)}") + if settings.SERVER_MODE == "development": + raise + return saved_name # includes the path! + + def deconstruct(self): + path, args, kwargs = super().deconstruct() + kwargs["kind"] = "" # don't record "kind" in migrations + return path, args, kwargs + + +class MetadataFile(File): + """File that includes metadata""" + + def __init__(self, file, name=None, mtime: Optional[datetime.datetime]=None, content_type=""): + super().__init__(file=file, name=name) + self.mtime = mtime + self.content_type = content_type + self._custom_metadata = None + + @property + def custom_metadata(self): + if self._custom_metadata is None: + self._custom_metadata = self._compute_custom_metadata() + return self._custom_metadata + + def _compute_custom_metadata(self): + try: + self.file.seek(0) + except AttributeError: # TODO-BLOBSTORE + raise NotImplementedError("cannot handle unseekable content") + content_bytes = self.file.read() + if not isinstance( + content_bytes, bytes + ): # TODO-BLOBSTORE: This is sketch-development only -remove before committing + raise Exception(f"Expected bytes - got {type(content_bytes)}") + self.file.seek(0) + return { + "len": f"{len(content_bytes)}", + "sha384": f"{sha384(content_bytes).hexdigest()}", + "mtime": None if self.mtime is None else self.mtime.isoformat(), + } diff --git a/ietf/utils/templatetags/htmlfilters.py b/ietf/utils/templatetags/htmlfilters.py index a0f9232c57..1e399e2d72 100644 --- a/ietf/utils/templatetags/htmlfilters.py +++ b/ietf/utils/templatetags/htmlfilters.py @@ -7,6 +7,7 @@ from django.template.defaultfilters import stringfilter from ietf.utils.html import remove_tags +from ietf.utils.markdown import markdown as utils_markdown register = Library() @@ -16,3 +17,9 @@ def removetags(value, tags): """Removes a comma-separated list of [X]HTML tags from the output.""" return remove_tags(value, re.split(r"\s*,\s*", tags)) + +@register.filter(name="markdown", is_safe=True) +def markdown(string): + # One issue is that the string is enclosed in

    ... Let's remove the leading/trailing ones... + return utils_markdown(string)[3:-4] + diff --git a/ietf/utils/templatetags/tests.py b/ietf/utils/templatetags/tests.py index a93bf2d94d..859319be3d 100644 --- a/ietf/utils/templatetags/tests.py +++ b/ietf/utils/templatetags/tests.py @@ -3,6 +3,7 @@ from django.template import Context, Origin, Template from django.test import override_settings +from ietf.utils.templatetags.textfilters import linkify from ietf.utils.test_utils import TestCase import debug # pyflakes: ignore @@ -39,3 +40,68 @@ def test_origin_outside_base_dir(self): output = template.render(Context()) self.assertNotIn(component, output, 'Full path components should not be revealed in html') + + +class TextfiltersTests(TestCase): + def test_linkify(self): + # Cases with autoescape = True (the default) + self.assertEqual( + linkify("plain string"), + "plain string", + ) + self.assertEqual( + linkify("https://www.ietf.org"), + 'https://www.ietf.org', + ) + self.assertEqual( + linkify('IETF'), + ( + '<a href="https://www.ietf.org">IETF</a>' + ), + ) + self.assertEqual( + linkify("somebody@example.com"), + 'somebody@example.com', + ) + self.assertEqual( + linkify("Some Body "), + ( + 'Some Body <' + 'somebody@example.com>' + ), + ) + self.assertEqual( + linkify(""), + "<script>alert('h4x0r3d');</script>", + ) + + # Cases with autoescape = False (these are dangerous and assume the caller + # has sanitized already) + self.assertEqual( + linkify("plain string", autoescape=False), + "plain string", + ) + self.assertEqual( + linkify("https://www.ietf.org", autoescape=False), + 'https://www.ietf.org', + ) + self.assertEqual( + linkify('IETF', autoescape=False), + 'IETF', + ) + self.assertEqual( + linkify("somebody@example.com", autoescape=False), + 'somebody@example.com', + ) + # bleach.Linkifier translates the < -> < and > -> > on this one + self.assertEqual( + linkify("Some Body ", autoescape=False), + ( + 'Some Body <' + 'somebody@example.com>' + ), + ) + self.assertEqual( + linkify("", autoescape=False), + "", + ) diff --git a/ietf/utils/templatetags/textfilters.py b/ietf/utils/templatetags/textfilters.py index 70b94cf673..e3bfbe0c56 100644 --- a/ietf/utils/templatetags/textfilters.py +++ b/ietf/utils/templatetags/textfilters.py @@ -7,11 +7,12 @@ from django import template from django.conf import settings from django.template.defaultfilters import stringfilter +from django.utils.html import conditional_escape from django.utils.safestring import mark_safe import debug # pyflakes:ignore -from ietf.utils.text import xslugify as _xslugify, texescape, bleach_linker +from ietf.utils.text import linkify as _linkify, xslugify as _xslugify, texescape register = template.Library() @@ -71,10 +72,13 @@ def texescape_filter(value): "A TeX escape filter" return texescape(value) -@register.filter +@register.filter(needs_autoescape=True) @stringfilter -def linkify(value): - text = mark_safe(bleach_linker.linkify(value)) +def linkify(value, autoescape=True): + if autoescape: + # Escape unless the input was already a SafeString + value = conditional_escape(value) + text = mark_safe(_linkify(value)) # _linkify is a safe operation return text @register.filter diff --git a/ietf/utils/test_data.py b/ietf/utils/test_data.py index ce5a46995d..c5d3472751 100644 --- a/ietf/utils/test_data.py +++ b/ietf/utils/test_data.py @@ -11,12 +11,13 @@ import debug # pyflakes:ignore -from ietf.doc.models import Document, DocAlias, State, DocumentAuthor, DocEvent, RelatedDocument, NewRevisionDocEvent +from ietf.doc.models import Document, State, DocumentAuthor, DocEvent, RelatedDocument, NewRevisionDocEvent +from ietf.doc.factories import IndividualDraftFactory, ConflictReviewFactory, StatusChangeFactory, WgDraftFactory, WgRfcFactory from ietf.group.models import Group, GroupHistory, Role, RoleHistory from ietf.iesg.models import TelechatDate from ietf.ipr.models import HolderIprDisclosure, IprDocRel, IprDisclosureStateName, IprLicenseTypeName from ietf.meeting.models import Meeting, ResourceAssociation -from ietf.name.models import StreamName, DocRelationshipName, RoomResourceName, ConstraintName +from ietf.name.models import DocRelationshipName, RoomResourceName, ConstraintName from ietf.person.models import Person, Email from ietf.group.utils import setup_default_community_list_for_group from ietf.review.models import (ReviewRequest, ReviewerSettings, ReviewResultName, ReviewTypeName, ReviewTeamSettings ) @@ -83,7 +84,7 @@ def make_immutable_base_data(): create_person(iab, "chair") create_person(iab, "member") - ise = create_group(name="Independent Submission Editor", acronym="ise", type_id="rfcedtyp") + ise = create_group(name="Independent Submission Editor", acronym="ise", type_id="ise") create_person(ise, "chair") rsoc = create_group(name="RFC Series Oversight Committee", acronym="rsoc", type_id="rfcedtyp") @@ -176,7 +177,6 @@ def make_test_data(): charter.set_state(State.objects.get(used=True, slug="approved", type="charter")) group.charter = charter group.save() - DocAlias.objects.create(name=charter.name).docs.add(charter) setup_default_community_list_for_group(group) # ames WG @@ -198,7 +198,6 @@ def make_test_data(): rev="00", ) charter.set_state(State.objects.get(used=True, slug="infrev", type="charter")) - DocAlias.objects.create(name=charter.name).docs.add(charter) group.charter = charter group.save() setup_default_community_list_for_group(group) @@ -243,7 +242,6 @@ def make_test_data(): # rev="00", # ) #charter.set_state(State.objects.get(used=True, slug="infrev", type="charter")) - #DocAlias.objects.create(name=charter.name).docs.add(charter) #group.charter = charter #group.save() @@ -287,8 +285,6 @@ def make_test_data(): expires=timezone.now(), ) old_draft.set_state(State.objects.get(used=True, type="draft", slug="expired")) - old_alias = DocAlias.objects.create(name=old_draft.name) - old_alias.docs.add(old_draft) # draft draft = Document.objects.create( @@ -312,10 +308,7 @@ def make_test_data(): draft.set_state(State.objects.get(used=True, type="draft-iesg", slug="pub-req")) draft.set_state(State.objects.get(used=True, type="draft-stream-%s" % draft.stream_id, slug="wg-doc")) - doc_alias = DocAlias.objects.create(name=draft.name) - doc_alias.docs.add(draft) - - RelatedDocument.objects.create(source=draft, target=old_alias, relationship=DocRelationshipName.objects.get(slug='replaces')) + RelatedDocument.objects.create(source=draft, target=old_draft, relationship=DocRelationshipName.objects.get(slug='replaces')) old_draft.set_state(State.objects.get(type='draft', slug='repl')) DocumentAuthor.objects.create( @@ -361,7 +354,7 @@ def make_test_data(): IprDocRel.objects.create( disclosure=ipr, - document=doc_alias, + document=draft, revisions='00', ) @@ -390,37 +383,27 @@ def make_test_data(): ) # an independent submission before review - doc = Document.objects.create(name='draft-imaginary-independent-submission',type_id='draft',rev='00', - title="Some Independent Notes on Imagination") - doc.set_state(State.objects.get(used=True, type="draft", slug="active")) - DocAlias.objects.create(name=doc.name).docs.add(doc) + IndividualDraftFactory(title="Some Independent Notes on Imagination") # an irtf submission mid review - doc = Document.objects.create(name='draft-imaginary-irtf-submission', type_id='draft',rev='00', - stream=StreamName.objects.get(slug='irtf'), title="The Importance of Research Imagination") - docalias = DocAlias.objects.create(name=doc.name) - docalias.docs.add(doc) - doc.set_state(State.objects.get(type="draft", slug="active")) - crdoc = Document.objects.create(name='conflict-review-imaginary-irtf-submission', type_id='conflrev', - rev='00', notify="fsm@ietf.org", title="Conflict Review of IRTF Imagination Document") - DocAlias.objects.create(name=crdoc.name).docs.add(crdoc) - crdoc.set_state(State.objects.get(name='Needs Shepherd', type__slug='conflrev')) - crdoc.relateddocument_set.create(target=docalias,relationship_id='conflrev') + doc = IndividualDraftFactory(name="draft-imaginary-irtf-submission", stream_id="irtf", title="The Importance of Research Imagination") + ConflictReviewFactory(name="conflict-review-imaginary-irtf-submission", review_of=doc, notify="fsm@ietf.org", title="Conflict Review of IRTF Imagination Document") # A status change mid review iesg = Group.objects.get(acronym='iesg') - doc = Document.objects.create(name='status-change-imaginary-mid-review',type_id='statchg', rev='00', - notify="fsm@ietf.org", group=iesg, title="Status Change Review without Imagination") - doc.set_state(State.objects.get(slug='needshep',type__slug='statchg')) - docalias = DocAlias.objects.create(name='status-change-imaginary-mid-review') - docalias.docs.add(doc) + doc = StatusChangeFactory( + name='status-change-imaginary-mid-review', + notify="fsm@ietf.org", + group=iesg, + title="Status Change Review without Imagination", + states= [State.objects.get(type_id="statchg",slug="needshep")] + ) # Some things for a status change to affect def rfc_for_status_change_test_factory(name,rfc_num,std_level_id): - target_rfc = Document.objects.create(name=name, type_id='draft', std_level_id=std_level_id, notify="%s@ietf.org"%name) - target_rfc.set_state(State.objects.get(slug='rfc',type__slug='draft')) - DocAlias.objects.create(name=name).docs.add(target_rfc) - DocAlias.objects.create(name='rfc%d'%rfc_num).docs.add(target_rfc) + target_rfc = WgRfcFactory(rfc_number=rfc_num, std_level_id=std_level_id) + source_draft = WgDraftFactory(name=name, states=[("draft","rfc")], notify=f"{name}@ietf.org") + source_draft.relateddocument_set.create(relationship_id="became_rfc", target=target_rfc) return target_rfc rfc_for_status_change_test_factory('draft-ietf-random-thing',9999,'ps') rfc_for_status_change_test_factory('draft-ietf-random-otherthing',9998,'inf') diff --git a/ietf/utils/test_runner.py b/ietf/utils/test_runner.py index da2a8aa26a..a23416e87f 100644 --- a/ietf/utils/test_runner.py +++ b/ietf/utils/test_runner.py @@ -1,4 +1,4 @@ -# Copyright The IETF Trust 2009-2020, All Rights Reserved +# Copyright The IETF Trust 2009-2025, All Rights Reserved # -*- coding: utf-8 -*- # # Portion Copyright (C) 2009 Nokia Corporation and/or its subsidiary(-ies). @@ -48,16 +48,16 @@ import subprocess import tempfile import copy +from contextlib import contextmanager + +import boto3 +import botocore.config import factory.random import urllib3 import warnings -from urllib.parse import urlencode -from fnmatch import fnmatch - -from coverage.report import Reporter -from coverage.results import Numbers -from coverage.misc import NotPython +from typing import Callable, Optional +from urllib.parse import urlencode import django from django.conf import settings @@ -70,7 +70,7 @@ from django.template.loaders.filesystem import Loader as BaseLoader from django.test.runner import DiscoverRunner from django.core.management import call_command -from django.urls import URLResolver # type: ignore +from django.urls import URLResolver, resolve, Resolver404 # type: ignore from django.template.backends.django import DjangoTemplates from django.template.backends.django import Template # type: ignore[attr-defined] from django.utils import timezone @@ -82,21 +82,47 @@ import ietf import ietf.utils.mail from ietf.utils.management.commands import pyflakes -from ietf.utils.test_smtpserver import SMTPTestServerDriver +from ietf.utils.aiosmtpd import SMTPTestServerDriver from ietf.utils.test_utils import TestCase +from mypy_boto3_s3.service_resource import Bucket + + +class UrlCoverageWarning(UserWarning): + """Warning category for URL coverage-related warnings""" + # URLs for which we don't expect patterns to match + IGNORE_URLS = ( + "/_doesnotexist/", + "/sitemap.xml.", + ) -loaded_templates = set() -visited_urls = set() -test_database_name = None -old_destroy = None -old_create = None -template_coverage_collection = None -code_coverage_collection = None -url_coverage_collection = None +class UninterestingPatternWarning(UrlCoverageWarning): + """Warning category for unexpected URL match patterns + + These are common, caused by tests that hit a URL that is not selected for + coverage checking. The warning is in place to help with a putative future + review of whether we're selecting the right patterns to check for coverage. + """ + pass + + +# Configure warnings for reasonable output quantity +warnings.simplefilter("once", UrlCoverageWarning) +warnings.simplefilter("ignore", UninterestingPatternWarning) + + +loaded_templates: set[str] = set() +visited_urls: set[str] = set() +test_database_name: Optional[str] = None +old_destroy: Optional[Callable] = None +old_create: Optional[Callable] = None + +template_coverage_collection = False +url_coverage_collection = False validation_settings = {"validate_html": None, "validate_html_harder": None, "show_logging": False} + def start_vnu_server(port=8888): "Start a vnu validation server on the indicated port" vnu = subprocess.Popen( @@ -226,10 +252,12 @@ def load_and_run_fixtures(verbosity): fn() def safe_create_test_db(self, verbosity, *args, **kwargs): - global test_database_name, old_create + if old_create is None: + raise RuntimeError("old_create has not been set, cannot proceed") keepdb = kwargs.get('keepdb', False) if not keepdb: print(" Creating test database...") + global test_database_name test_database_name = old_create(self, 0, *args, **kwargs) if settings.GLOBAL_TEST_FIXTURES: @@ -239,8 +267,9 @@ def safe_create_test_db(self, verbosity, *args, **kwargs): return test_database_name def safe_destroy_test_db(*args, **kwargs): + if old_destroy is None: + raise RuntimeError("old_destroy has not been set, cannot proceed") sys.stdout.write('\n') - global test_database_name, old_destroy keepdb = kwargs.get('keepdb', False) if not keepdb: if settings.DATABASES["default"]["NAME"] != test_database_name: @@ -259,7 +288,14 @@ def pyflakes_test(self): path = os.path.join(settings.BASE_DIR) warnings = [] warnings = pyflakes.checkPaths([path], verbosity=0) - self.assertEqual([], [str(w) for w in warnings]) + + # Filter out warnings about unused global variables + filtered_warnings = [ + w for w in warnings + if not re.search(r"`global \w+` is unused: name is never assigned in scope", str(w)) + ] + + self.assertEqual([], [str(w) for w in filtered_warnings]) class MyPyTest(TestCase): @@ -347,15 +383,13 @@ class TemplateCoverageLoader(BaseLoader): is_usable = True def get_template(self, template_name, skip=None): - global template_coverage_collection, loaded_templates - if template_coverage_collection == True: + if template_coverage_collection: loaded_templates.add(str(template_name)) raise TemplateDoesNotExist(template_name) def record_urls_middleware(get_response): def record_urls(request): - global url_coverage_collection, visited_urls - if url_coverage_collection == True: + if url_coverage_collection: visited_urls.add(request.path) return get_response(request) return record_urls @@ -401,8 +435,9 @@ def do_append(res, p0, p1, item): res.append((str(item.pattern), item)) return res + _all_templates = None -def get_template_paths(apps=None): +def get_template_paths(apps=None) -> list[str]: global _all_templates if not _all_templates: # TODO: Add app templates to the full list, if we are using @@ -411,25 +446,30 @@ def get_template_paths(apps=None): templatepaths = settings.TEMPLATES[0]['DIRS'] for templatepath in templatepaths: for dirpath, dirs, files in os.walk(templatepath): - if ".svn" in dirs: - dirs.remove(".svn") - relative_path = dirpath[len(templatepath)+1:] - for file in files: - ignore = False - for pattern in settings.TEST_TEMPLATE_IGNORE: - if fnmatch(file, pattern): - ignore = True - break - if ignore: - continue - if relative_path != "": - file = os.path.join(relative_path, file) - templates.add(file) - if apps: - templates = [ t for t in templates if t.split(os.path.sep)[0] in apps ] - _all_templates = templates + # glob against path from PROJECT_DIR + project_path = pathlib.Path( + dirpath.removeprefix(settings.PROJECT_DIR).lstrip("/") + ) + # label entries with name relative to templatepath + relative_path = pathlib.Path( + dirpath.removeprefix(templatepath).lstrip("/") + ) + if ( + apps + and len(relative_path.parts) > 0 + and relative_path.parts[0] not in apps + ): + continue # skip uninteresting apps + for filename in files: + file_path = project_path / filename + if not any( + file_path.match(pat) for pat in settings.TEST_TEMPLATE_IGNORE + ): + templates.add(relative_path / filename) + _all_templates = [str(t) for t in templates] return _all_templates + def save_test_results(failures, test_labels): # Record the test result in a file, in order to be able to check the # results and avoid re-running tests if we've already run them with OK @@ -445,50 +485,29 @@ def save_test_results(failures, test_labels): tfile.write("%s OK\n" % (timestr, )) tfile.close() -def set_coverage_checking(flag=True): + +def set_template_coverage(flag): global template_coverage_collection - global code_coverage_collection + orig = template_coverage_collection + template_coverage_collection = flag + return orig + + +def set_url_coverage(flag): global url_coverage_collection - if settings.SERVER_MODE == 'test': - if flag: - settings.TEST_CODE_COVERAGE_CHECKER.collector.resume() - template_coverage_collection = True - code_coverage_collection = True - url_coverage_collection = True - else: - settings.TEST_CODE_COVERAGE_CHECKER.collector.pause() - template_coverage_collection = False - code_coverage_collection = False - url_coverage_collection = False - -class CoverageReporter(Reporter): - def report(self): - self.find_file_reporters(None) - - total = Numbers() - result = {"coverage": 0.0, "covered": {}, "format": 5, } - for fr in self.file_reporters: - try: - analysis = self.coverage._analyze(fr) - nums = analysis.numbers - missing_nums = sorted(analysis.missing) - with io.open(analysis.filename, encoding='utf-8') as file: - lines = file.read().splitlines() - missing_lines = [ lines[l-1] for l in missing_nums ] - result["covered"][fr.relative_filename()] = (nums.n_statements, nums.pc_covered/100.0, missing_nums, missing_lines) - total += nums - except KeyboardInterrupt: # pragma: not covered - raise - except Exception: - report_it = not self.config.ignore_errors - if report_it: - typ, msg = sys.exc_info()[:2] - if typ is NotPython and not fr.should_be_python(): - report_it = False - if report_it: - raise - result["coverage"] = total.pc_covered/100.0 - return result + orig = url_coverage_collection + url_coverage_collection = flag + return orig + + +@contextmanager +def disable_coverage(): + """Context manager/decorator that disables template/url coverage""" + orig_template = set_template_coverage(False) + orig_url = set_url_coverage(False) + yield + set_template_coverage(orig_template) + set_url_coverage(orig_url) class CoverageTest(unittest.TestCase): @@ -511,8 +530,8 @@ def report_test_result(self, test): # Assert coverage failure only if we're running the full test suite -- if we're # only running some tests, then of course the coverage is going to be low. if self.runner.run_full_test_suite: - # Permit 0.02% variation in results -- otherwise small code changes become a pain - fudge_factor = 0.0002 + # Permit a small variation in results -- otherwise small code changes become a pain + fudge_factor = 0.0004 self.assertLessEqual(len(test_missing), len(master_missing), msg = "New %s without test coverage since %s: %s" % (test, latest_coverage_version, list(set(test_missing) - set(master_missing)))) if not self.runner.ignore_lower_coverage: @@ -521,7 +540,6 @@ def report_test_result(self, test): ( test, test_coverage*100, latest_coverage_version, master_coverage*100, )) def template_coverage_test(self): - global loaded_templates if self.runner.check_coverage: apps = [ app.split('.')[-1] for app in self.runner.test_apps ] all = get_template_paths(apps) @@ -556,44 +574,62 @@ def ignore_pattern(regex, pattern): ) or pattern.callback == django.views.static.serve) - patterns = [(regex, re.compile(regex, re.U), obj) for regex, obj in url_patterns - if not ignore_pattern(regex, obj)] + patterns ={ + regex: obj + for regex, obj in url_patterns + if not ignore_pattern(regex, obj) + } covered = set() for url in visited_urls: - for regex, compiled, obj in patterns: - if regex not in covered and compiled.match(url[1:]): # strip leading / - covered.add(regex) - break + try: + resolved = resolve(url) # let Django resolve the URL for us + except Resolver404: + if url not in UrlCoverageWarning.IGNORE_URLS: + warnings.warn( + f"Unable to resolve visited URL {url}", UrlCoverageWarning + ) + continue + if resolved.route not in patterns: + warnings.warn( + f"WARNING: url resolved to an unexpected pattern (url='{url}', " + f"resolved to r'{resolved.route}'", + UninterestingPatternWarning, + ) + continue + covered.add(resolved.route) self.runner.coverage_data["url"] = { - "coverage": 1.0*len(covered)/len(patterns), - "covered": dict( (k, (o.lookup_str, k in covered)) for k,p,o in patterns ), + "coverage": 1.0 * len(covered) / len(patterns), + "covered": dict( + (k, (o.lookup_str, k in covered)) for k, o in patterns.items() + ), "format": 4, - } + } self.report_test_result("url") else: self.skipTest("Coverage switched off with --skip-coverage") def code_coverage_test(self): - if self.runner.check_coverage: - include = [ os.path.join(path, '*') for path in self.runner.test_paths ] - checker = self.runner.code_coverage_checker - checker.stop() + if ( + self.runner.check_coverage + and settings.TEST_CODE_COVERAGE_CHECKER is not None + ): + coverage_manager = settings.TEST_CODE_COVERAGE_CHECKER + coverage_manager.stop() # Save to the .coverage file - checker.save() + coverage_manager.save() # Apply the configured and requested omit and include data - checker.config.from_args(ignore_errors=None, omit=settings.TEST_CODE_COVERAGE_EXCLUDE_FILES, - include=include, file=None) - for pattern in settings.TEST_CODE_COVERAGE_EXCLUDE_LINES: - checker.exclude(pattern) # Maybe output an HTML report if self.runner.run_full_test_suite and self.runner.html_report: - checker.html_report(directory=settings.TEST_CODE_COVERAGE_REPORT_DIR) - # In any case, build a dictionary with per-file data for this run - reporter = CoverageReporter(checker, checker.config) - self.runner.coverage_data["code"] = reporter.report() + coverage_manager.checker.html_report( + directory=settings.TEST_CODE_COVERAGE_REPORT_DIR + ) + # Generate the output report data + self.runner.coverage_data["code"] = coverage_manager.report( + include=[str(pathlib.Path(p) / "*") for p in self.runner.test_paths] + ) self.report_test_result("code") else: self.skipTest("Coverage switched off with --skip-coverage") @@ -722,9 +758,25 @@ def add_arguments(cls, parser): parser.add_argument('--rerun-until-failure', action='store_true', dest='rerun', default=False, help='Run the indicated tests in a loop until a failure occurs. ' ) - - def __init__(self, ignore_lower_coverage=False, skip_coverage=False, save_version_coverage=None, html_report=None, permit_mixed_migrations=None, show_logging=None, validate_html=None, validate_html_harder=None, rerun=None, **kwargs): - # + parser.add_argument('--no-manage-blobstore', action='store_false', dest='manage_blobstore', + help='Disable creating/deleting test buckets in the blob store.' + 'When this argument is used, a set of buckets with "test-" prefixed to their ' + 'names must already exist.') + + def __init__( + self, + ignore_lower_coverage=False, + skip_coverage=False, + save_version_coverage=None, + html_report=None, + permit_mixed_migrations=None, + show_logging=None, + validate_html=None, + validate_html_harder=None, + rerun=None, + manage_blobstore=True, + **kwargs + ): # self.ignore_lower_coverage = ignore_lower_coverage self.check_coverage = not skip_coverage self.save_version_coverage = save_version_coverage @@ -733,7 +785,6 @@ def __init__(self, ignore_lower_coverage=False, skip_coverage=False, save_versio self.show_logging = show_logging self.rerun = rerun self.test_labels = None - global validation_settings validation_settings["validate_html"] = self if validate_html else None validation_settings["validate_html_harder"] = self if validate_html and validate_html_harder else None validation_settings["show_logging"] = show_logging @@ -752,11 +803,10 @@ def __init__(self, ignore_lower_coverage=False, skip_coverage=False, save_versio # contains parent classes to later subclasses, the parent classes will determine the ordering, so use the most # specific classes necessary to get the right ordering: self.reorder_by = (PyFlakesTestCase, MyPyTest,) + self.reorder_by + (StaticLiveServerTestCase, TemplateTagTest, CoverageTest,) + #self.buckets = set() + self.blobstoremanager = TestBlobstoreManager() if manage_blobstore else None def setup_test_environment(self, **kwargs): - global template_coverage_collection - global url_coverage_collection - ietf.utils.mail.test_mode = True ietf.utils.mail.SMTP_ADDR['ip4'] = '127.0.0.1' ietf.utils.mail.SMTP_ADDR['port'] = 2025 @@ -793,23 +843,12 @@ def setup_test_environment(self, **kwargs): "covered": {}, "format": 1, }, - "migration": { - "present": {}, - "format": 3, - } } settings.TEMPLATES[0]['OPTIONS']['loaders'] = ('ietf.utils.test_runner.TemplateCoverageLoader',) + settings.TEMPLATES[0]['OPTIONS']['loaders'] settings.MIDDLEWARE = ('ietf.utils.test_runner.record_urls_middleware',) + tuple(settings.MIDDLEWARE) - self.code_coverage_checker = settings.TEST_CODE_COVERAGE_CHECKER - if not self.code_coverage_checker._started: - sys.stderr.write(" ** Warning: In %s: Expected the coverage checker to have\n" - " been started already, but it wasn't. Doing so now. Coverage numbers\n" - " will be off, though.\n" % __name__) - self.code_coverage_checker.start() - if settings.SITE_ID != 1: print(" Changing SITE_ID to '1' during testing.") settings.SITE_ID = 1 @@ -837,7 +876,7 @@ def setup_test_environment(self, **kwargs): try: # remember the value so ietf.utils.mail.send_smtp() will use the same ietf.utils.mail.SMTP_ADDR['port'] = base + offset - self.smtpd_driver = SMTPTestServerDriver((ietf.utils.mail.SMTP_ADDR['ip4'],ietf.utils.mail.SMTP_ADDR['port']),None) + self.smtpd_driver = SMTPTestServerDriver(ietf.utils.mail.SMTP_ADDR['ip4'],ietf.utils.mail.SMTP_ADDR['port'], None) self.smtpd_driver.start() print((" Running an SMTP test server on %(ip4)s:%(port)s to catch outgoing email." % ietf.utils.mail.SMTP_ADDR)) break @@ -891,6 +930,10 @@ def setup_test_environment(self, **kwargs): "form-dup-name": "off", # Don't trip over unused disable blocks "no-unused-disable": "off", + # Ignore focusable elements in aria-hidden elements + "hidden-focusable": "off", + # Ignore missing unique identifier for page "landmarks" + "unique-landmark": "off", }, } @@ -932,6 +975,9 @@ def setup_test_environment(self, **kwargs): print(" (extra pedantically)") self.vnu = start_vnu_server() + if self.blobstoremanager is not None: + self.blobstoremanager.createTestBlobstores() + super(IetfTestRunner, self).setup_test_environment(**kwargs) def teardown_test_environment(self, **kwargs): @@ -962,6 +1008,9 @@ def teardown_test_environment(self, **kwargs): if self.vnu: self.vnu.terminate() + if self.blobstoremanager is not None: + self.blobstoremanager.destroyTestBlobstores() + super(IetfTestRunner, self).teardown_test_environment(**kwargs) def validate(self, testcase): @@ -1099,9 +1148,8 @@ def _extra_tests(self): ), ] if self.check_coverage: - global template_coverage_collection, code_coverage_collection, url_coverage_collection + global template_coverage_collection, url_coverage_collection template_coverage_collection = True - code_coverage_collection = True url_coverage_collection = True tests += [ PyFlakesTestCase(test_runner=self, methodName='pyflakes_test'), @@ -1185,34 +1233,43 @@ def run_tests(self, test_labels, extra_tests=None, **kwargs): return failures -class IetfLiveServerTestCase(StaticLiveServerTestCase): - @classmethod - def setUpClass(cls): - set_coverage_checking(False) - super(IetfLiveServerTestCase, cls).setUpClass() - - def setUp(self): - super(IetfLiveServerTestCase, self).setUp() - # LiveServerTestCase uses TransactionTestCase which seems to - # somehow interfere with the fixture loading process in - # IetfTestRunner when running multiple tests (the first test - # is fine, in the next ones the fixtures have been wiped) - - # this is no doubt solvable somehow, but until then we simply - # recreate them here - from ietf.person.models import Person - if not Person.objects.exists(): - load_and_run_fixtures(verbosity=0) - self.replaced_settings = dict() - if hasattr(settings, 'IDTRACKER_BASE_URL'): - self.replaced_settings['IDTRACKER_BASE_URL'] = settings.IDTRACKER_BASE_URL - settings.IDTRACKER_BASE_URL = self.live_server_url - @classmethod - def tearDownClass(cls): - super(IetfLiveServerTestCase, cls).tearDownClass() - set_coverage_checking(True) - - def tearDown(self): - for k, v in self.replaced_settings.items(): - setattr(settings, k, v) - super().tearDown() +class TestBlobstoreManager(): + # N.B. buckets and blobstore are intentional Class-level attributes + buckets: set[Bucket] = set() + + blobstore = boto3.resource("s3", + endpoint_url="http://blobstore:9000", + aws_access_key_id="minio_root", + aws_secret_access_key="minio_pass", + aws_session_token=None, + config = botocore.config.Config( + request_checksum_calculation="when_required", + response_checksum_validation="when_required", + signature_version="s3v4", + ), + #config=botocore.config.Config(signature_version=botocore.UNSIGNED), + verify=False + ) + + def createTestBlobstores(self): + for storagename in settings.ARTIFACT_STORAGE_NAMES: + bucketname = f"test-{storagename}" + try: + bucket = self.blobstore.create_bucket(Bucket=bucketname) + self.buckets.add(bucket) + except self.blobstore.meta.client.exceptions.BucketAlreadyOwnedByYou: + bucket = self.blobstore.Bucket(bucketname) + self.buckets.add(bucket) + + def destroyTestBlobstores(self): + self.emptyTestBlobstores(destroy=True) + + def emptyTestBlobstores(self, destroy=False): + # debug.show('f"Asked to empty test blobstores with destroy={destroy}"') + for bucket in self.buckets: + bucket.objects.delete() + if destroy: + bucket.delete() + if destroy: + self.buckets = set() diff --git a/ietf/utils/test_smtpserver.py b/ietf/utils/test_smtpserver.py deleted file mode 100644 index 66675aa0b1..0000000000 --- a/ietf/utils/test_smtpserver.py +++ /dev/null @@ -1,92 +0,0 @@ -# Copyright The IETF Trust 2014-2020, All Rights Reserved -# -*- coding: utf-8 -*- - - -import smtpd -import threading -import asyncore - -import debug # pyflakes:ignore - -class AsyncCoreLoopThread(object): - - def wrap_loop(self, exit_condition, timeout=1.0, use_poll=False, map=None): - if map is None: - map = asyncore.socket_map - while map and not exit_condition: - asyncore.loop(timeout=1.0, use_poll=False, map=map, count=1) - - def start(self): - """Start the listening service""" - self.exit_condition = [] - kwargs={'exit_condition':self.exit_condition,'timeout':1.0} - self.thread = threading.Thread(target=self.wrap_loop, kwargs=kwargs) - self.thread.daemon = True - self.thread.daemon = True - self.thread.start() - - def stop(self): - """Stop the listening service""" - self.exit_condition.append(True) - self.thread.join() - - -class SMTPTestChannel(smtpd.SMTPChannel): - -# mail_options = ['BODY=8BITMIME', 'SMTPUTF8'] - - def smtp_RCPT(self, arg): - if not self.mailfrom: - self.push(str('503 Error: need MAIL command')) - return - arg = self._strip_command_keyword('TO:', arg) - address, __ = self._getaddr(arg) - if not address: - self.push(str('501 Syntax: RCPT TO:
    ')) - return - if "poison" in address: - self.push(str('550 Error: Not touching that')) - return - self.rcpt_options = [] - self.rcpttos.append(address) - self.push(str('250 Ok')) - -class SMTPTestServer(smtpd.SMTPServer): - - def __init__(self,localaddr,remoteaddr,inbox): - if inbox is not None: - self.inbox=inbox - else: - self.inbox = [] - smtpd.SMTPServer.__init__(self,localaddr,remoteaddr) - - def handle_accept(self): - pair = self.accept() - if pair is not None: - conn, addr = pair - #channel = SMTPTestChannel(self, conn, addr) - SMTPTestChannel(self, conn, addr) - - def process_message(self, peer, mailfrom, rcpttos, data, mail_options=None, rcpt_options=None): - self.inbox.append(data) - - -class SMTPTestServerDriver(object): - def __init__(self, localaddr, remoteaddr, inbox=None): - self.localaddr=localaddr - self.remoteaddr=remoteaddr - if inbox is not None: - self.inbox = inbox - else: - self.inbox = [] - self.thread_driver = None - - def start(self): - self.smtpserver = SMTPTestServer(self.localaddr,self.remoteaddr,self.inbox) - self.thread_driver = AsyncCoreLoopThread() - self.thread_driver.start() - - def stop(self): - if self.thread_driver: - self.thread_driver.stop() - diff --git a/ietf/utils/test_utils.py b/ietf/utils/test_utils.py index ddd274a613..5faf83d93f 100644 --- a/ietf/utils/test_utils.py +++ b/ietf/utils/test_utils.py @@ -34,10 +34,11 @@ # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -import os +import tempfile import re import email import html5lib +import rest_framework.test import requests_mock import shutil import sys @@ -211,6 +212,7 @@ class TestCase(django.test.TestCase): 'INTERNET_DRAFT_ARCHIVE_DIR', 'INTERNET_DRAFT_PATH', 'BIBXML_BASE_PATH', + 'FTP_DIR', ] parser = html5lib.HTMLParser(strict=True) @@ -238,13 +240,8 @@ def normalize(x): def tempdir(self, label): slug = slugify(self.__class__.__name__.replace('.','-')) - dirname = "tmp-{label}-{slug}-dir".format(**locals()) - if 'VIRTUAL_ENV' in os.environ: - dirname = os.path.join(os.environ['VIRTUAL_ENV'], dirname) - path = os.path.abspath(dirname) - if not os.path.exists(path): - os.mkdir(path) - return path + suffix = "-{label}-{slug}-dir".format(**locals()) + return tempfile.mkdtemp(suffix=suffix) def assertNoFormPostErrors(self, response, error_css_selector=".is-invalid"): """Try to fish out form errors, if none found at least check the @@ -305,7 +302,7 @@ def setUp(self): # Replace settings paths with temporary directories. self._ietf_temp_dirs = {} # trashed during tearDown, DO NOT put paths you care about in this - for setting in self.settings_temp_path_overrides: + for setting in set(self.settings_temp_path_overrides): self._ietf_temp_dirs[setting] = self.tempdir(slugify(setting)) self._ietf_saved_context = django.test.utils.override_settings(**self._ietf_temp_dirs) self._ietf_saved_context.enable() @@ -316,3 +313,11 @@ def tearDown(self): shutil.rmtree(dir) self.requests_mock.stop() super().tearDown() + + +class APITestCase(TestCase): + """Test case that uses rest_framework's APIClient + + This is equivalent to rest_framework.test.APITestCase, but picks up our + """ + client_class = rest_framework.test.APIClient diff --git a/ietf/utils/tests.py b/ietf/utils/tests.py index 499b874886..99c33f34b3 100644 --- a/ietf/utils/tests.py +++ b/ietf/utils/tests.py @@ -1,26 +1,29 @@ -# Copyright The IETF Trust 2014-2020, All Rights Reserved +# Copyright The IETF Trust 2014-2025, All Rights Reserved # -*- coding: utf-8 -*- import datetime import io import json +import lxml.etree import os.path import pytz import shutil import types -from mock import patch +from unittest.mock import call, patch from pyquery import PyQuery from typing import Dict, List # pyflakes:ignore +from email.message import Message from email.mime.image import MIMEImage from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText -from fnmatch import fnmatch from importlib import import_module from textwrap import dedent from tempfile import mkdtemp +from xml2rfc import log as xml2rfc_log +from xml2rfc.util.date import extract_date as xml2rfc_extract_date from django.apps import apps from django.contrib.auth.models import User @@ -31,27 +34,40 @@ from django.template.defaulttags import URLNode from django.template.loader import get_template, render_to_string from django.templatetags.static import StaticNode +from django.test import RequestFactory from django.urls import reverse as urlreverse import debug # pyflakes:ignore +from ietf.admin.sites import AdminSite from ietf.person.name import name_parts, unidecode_name from ietf.submit.tests import submission_file from ietf.utils.draft import PlaintextDraft, getmeta from ietf.utils.fields import SearchableField from ietf.utils.log import unreachable, assertion -from ietf.utils.mail import send_mail_preformatted, send_mail_text, send_mail_mime, outbox, get_payload_text -from ietf.utils.test_runner import get_template_paths, set_coverage_checking +from ietf.utils.mail import ( + send_mail_preformatted, + send_mail_text, + send_mail_mime, + outbox, + get_payload_text, + decode_header_value, + show_that_mail_was_sent, +) +from ietf.utils.test_runner import ( + get_template_paths, + set_template_coverage, + set_url_coverage, +) from ietf.utils.test_utils import TestCase, unicontent -from ietf.utils.text import parse_unicode from ietf.utils.timezone import timezone_not_near_midnight -from ietf.utils.xmldraft import XMLDraft +from ietf.utils.xmldraft import XMLDraft, InvalidMetadataError, capture_xml2rfc_output class SendingMail(TestCase): def test_send_mail_preformatted(self): msg = """To: to1@example.com, to2@example.com -From: from1@ietf.org, from2@ietf.org +From: from1@ietf.org Cc: cc1@example.com, cc2@example.com Bcc: bcc1@example.com, bcc2@example.com Subject: subject @@ -61,7 +77,7 @@ def test_send_mail_preformatted(self): send_mail_preformatted(None, msg, {}, {}) recv = outbox[-1] self.assertSameEmail(recv['To'], ', ') - self.assertSameEmail(recv['From'], 'from1@ietf.org, from2@ietf.org') + self.assertSameEmail(recv['From'], 'from1@ietf.org') self.assertSameEmail(recv['Cc'], 'cc1@example.com, cc2@example.com') self.assertSameEmail(recv['Bcc'], None) self.assertEqual(recv['Subject'], 'subject') @@ -69,14 +85,14 @@ def test_send_mail_preformatted(self): override = { 'To': 'oto1@example.net, oto2@example.net', - 'From': 'ofrom1@ietf.org, ofrom2@ietf.org', + 'From': 'ofrom1@ietf.org', 'Cc': 'occ1@example.net, occ2@example.net', 'Subject': 'osubject', } send_mail_preformatted(request=None, preformatted=msg, extra={}, override=override) recv = outbox[-1] self.assertSameEmail(recv['To'], ', ') - self.assertSameEmail(recv['From'], 'ofrom1@ietf.org, ofrom2@ietf.org') + self.assertSameEmail(recv['From'], 'ofrom1@ietf.org') self.assertSameEmail(recv['Cc'], 'occ1@example.net, occ2@example.net') self.assertSameEmail(recv['Bcc'], None) self.assertEqual(recv['Subject'], 'osubject') @@ -84,14 +100,14 @@ def test_send_mail_preformatted(self): override = { 'To': ['', 'oto2@example.net'], - 'From': ['', 'ofrom2@ietf.org'], + 'From': [''], 'Cc': ['', 'occ2@example.net'], 'Subject': 'osubject', } send_mail_preformatted(request=None, preformatted=msg, extra={}, override=override) recv = outbox[-1] self.assertSameEmail(recv['To'], ', ') - self.assertSameEmail(recv['From'], ', ofrom2@ietf.org') + self.assertSameEmail(recv['From'], '') self.assertSameEmail(recv['Cc'], ', occ2@example.net') self.assertSameEmail(recv['Bcc'], None) self.assertEqual(recv['Subject'], 'osubject') @@ -107,6 +123,135 @@ def test_send_mail_preformatted(self): recv = outbox[-1] self.assertEqual(recv['Fuzz'], 'bucket, monger') + +class MailUtilsTests(TestCase): + def test_decode_header_value(self): + self.assertEqual( + decode_header_value("cake"), + "cake", + "decodes simple string value", + ) + self.assertEqual( + decode_header_value("=?utf-8?b?8J+Ogg==?="), + "\U0001f382", + "decodes single utf-8-encoded part", + ) + self.assertEqual( + decode_header_value("=?utf-8?b?8J+Ogg==?= = =?macintosh?b?jYxrjg==?="), + "\U0001f382 = çåké", + "decodes a value with non-utf-8 encodings", + ) + + # Patch in a side_effect so we can distinguish values that came from decode_header_value. + @patch("ietf.utils.mail.decode_header_value", side_effect=lambda s: f"decoded-{s}") + @patch("ietf.utils.mail.messages") + def test_show_that_mail_was_sent(self, mock_messages, mock_decode_header_value): + request = RequestFactory().get("/some/path") + request.user = object() # just needs to exist + msg = Message() + msg["To"] = "to-value" + msg["Subject"] = "subject-value" + msg["Cc"] = "cc-value" + with patch("ietf.ietfauth.utils.has_role", return_value=True): + show_that_mail_was_sent(request, "mail was sent", msg, "bcc-value") + self.assertCountEqual( + mock_decode_header_value.call_args_list, + [call("to-value"), call("subject-value"), call("cc-value"), call("bcc-value")], + ) + self.assertEqual(mock_messages.info.call_args[0][0], request) + self.assertIn("mail was sent", mock_messages.info.call_args[0][1]) + self.assertIn("decoded-subject-value", mock_messages.info.call_args[0][1]) + self.assertIn("decoded-to-value", mock_messages.info.call_args[0][1]) + self.assertIn("decoded-cc-value", mock_messages.info.call_args[0][1]) + self.assertIn("decoded-bcc-value", mock_messages.info.call_args[0][1]) + mock_messages.reset_mock() + mock_decode_header_value.reset_mock() + + # no bcc + with patch("ietf.ietfauth.utils.has_role", return_value=True): + show_that_mail_was_sent(request, "mail was sent", msg, None) + self.assertCountEqual( + mock_decode_header_value.call_args_list, + [call("to-value"), call("subject-value"), call("cc-value")], + ) + self.assertEqual(mock_messages.info.call_args[0][0], request) + self.assertIn("mail was sent", mock_messages.info.call_args[0][1]) + self.assertIn("decoded-subject-value", mock_messages.info.call_args[0][1]) + self.assertIn("decoded-to-value", mock_messages.info.call_args[0][1]) + self.assertIn("decoded-cc-value", mock_messages.info.call_args[0][1]) + # Note: here and below - when using assertNotIn(), leaving off the "decoded-" prefix + # proves that neither the original value nor the decoded value appear. + self.assertNotIn("bcc-value", mock_messages.info.call_args[0][1]) + mock_messages.reset_mock() + mock_decode_header_value.reset_mock() + + # no cc + del msg["Cc"] + with patch("ietf.ietfauth.utils.has_role", return_value=True): + show_that_mail_was_sent(request, "mail was sent", msg, None) + self.assertCountEqual( + mock_decode_header_value.call_args_list, + [call("to-value"), call("subject-value")], + ) + self.assertEqual(mock_messages.info.call_args[0][0], request) + self.assertIn("mail was sent", mock_messages.info.call_args[0][1]) + self.assertIn("decoded-subject-value", mock_messages.info.call_args[0][1]) + self.assertIn("decoded-to-value", mock_messages.info.call_args[0][1]) + self.assertNotIn("cc-value", mock_messages.info.call_args[0][1]) + self.assertNotIn("bcc-value", mock_messages.info.call_args[0][1]) + mock_messages.reset_mock() + mock_decode_header_value.reset_mock() + + # no to + del msg["To"] + with patch("ietf.ietfauth.utils.has_role", return_value=True): + show_that_mail_was_sent(request, "mail was sent", msg, None) + self.assertCountEqual( + mock_decode_header_value.call_args_list, + [call("[no to]"), call("subject-value")], + ) + self.assertEqual(mock_messages.info.call_args[0][0], request) + self.assertIn("mail was sent", mock_messages.info.call_args[0][1]) + self.assertIn("decoded-subject-value", mock_messages.info.call_args[0][1]) + self.assertIn("decoded-[no to]", mock_messages.info.call_args[0][1]) + self.assertNotIn("to-value", mock_messages.info.call_args[0][1]) + self.assertNotIn("cc-value", mock_messages.info.call_args[0][1]) + self.assertNotIn("bcc-value", mock_messages.info.call_args[0][1]) + mock_messages.reset_mock() + mock_decode_header_value.reset_mock() + + # no subject + del msg["Subject"] + with patch("ietf.ietfauth.utils.has_role", return_value=True): + show_that_mail_was_sent(request, "mail was sent", msg, None) + self.assertCountEqual( + mock_decode_header_value.call_args_list, + [call("[no to]"), call("[no subject]")], + ) + self.assertEqual(mock_messages.info.call_args[0][0], request) + self.assertIn("mail was sent", mock_messages.info.call_args[0][1]) + self.assertIn("decoded-[no subject]", mock_messages.info.call_args[0][1]) + self.assertNotIn("subject-value", mock_messages.info.call_args[0][1]) + self.assertIn("decoded-[no to]", mock_messages.info.call_args[0][1]) + self.assertNotIn("to-value", mock_messages.info.call_args[0][1]) + self.assertNotIn("cc-value", mock_messages.info.call_args[0][1]) + self.assertNotIn("bcc-value", mock_messages.info.call_args[0][1]) + mock_messages.reset_mock() + mock_decode_header_value.reset_mock() + + # user does not have role + with patch("ietf.ietfauth.utils.has_role", return_value=False): + show_that_mail_was_sent(request, "mail was sent", msg, None) + self.assertFalse(mock_messages.called) + + # no user + request.user = None + with patch("ietf.ietfauth.utils.has_role", return_value=True) as mock_has_role: + show_that_mail_was_sent(request, "mail was sent", msg, None) + self.assertFalse(mock_messages.called) + self.assertFalse(mock_has_role.called) + + class TestSMTPServer(TestCase): def test_address_rejected(self): @@ -169,15 +314,16 @@ def qualified(name): return list(callbacks) -class TemplateChecksTestCase(TestCase): +class TemplateChecksTestCase(TestCase): # pragma: no cover paths = [] # type: List[str] templates = {} # type: Dict[str, Template] def setUp(self): super().setUp() - set_coverage_checking(False) - self.paths = list(get_template_paths()) + set_template_coverage(False) + set_url_coverage(False) + self.paths = get_template_paths() # already filtered ignores self.paths.sort() for path in self.paths: try: @@ -186,17 +332,14 @@ def setUp(self): pass def tearDown(self): - set_coverage_checking(True) + set_template_coverage(True) + set_url_coverage(True) super().tearDown() def test_parse_templates(self): errors = [] for path in self.paths: - for pattern in settings.TEST_TEMPLATE_IGNORE: - if fnmatch(path, pattern): - continue - if not path in self.templates: - + if path not in self.templates: try: get_template(path) except Exception as e: @@ -324,7 +467,7 @@ def test_all_model_admins_exist(self): User.objects.create_superuser('admin', 'admin@example.org', 'admin+password') self.client.login(username='admin', password='admin+password') rtop = self.client.get("/admin/") - self.assertContains(rtop, 'Django administration') + self.assertContains(rtop, AdminSite.site_header()) for name in self.apps: app_name = self.apps[name] self.assertContains(rtop, name) @@ -403,7 +546,7 @@ def test_get_refs_v2(self): def test_parse_creation_date(self): # override date_today to avoid skew when test runs around midnight today = datetime.date.today() - with patch("ietf.utils.xmldraft.date_today", return_value=today): + with capture_xml2rfc_output(), patch("ietf.utils.xmldraft.date_today", return_value=today): # Note: using a dict as a stand-in for XML elements, which rely on the get() method self.assertEqual( XMLDraft.parse_creation_date({"year": "2022", "month": "11", "day": "24"}), @@ -449,6 +592,212 @@ def test_parse_creation_date(self): ), datetime.date(today.year, 1 if today.month != 1 else 2, 15), ) + # Some exeception-inducing conditions + with self.assertRaises( + InvalidMetadataError, + msg="raise an InvalidMetadataError if a year-only date is not current", + ): + XMLDraft.parse_creation_date( + { + "year": str(today.year - 1), + "month": "", + "day": "", + } + ) + with self.assertRaises( + InvalidMetadataError, + msg="raise an InvalidMetadataError for a non-numeric year" + ): + XMLDraft.parse_creation_date( + { + "year": "two thousand twenty-five", + "month": "2", + "day": "28", + } + ) + with self.assertRaises( + InvalidMetadataError, + msg="raise an InvalidMetadataError for an invalid month" + ): + XMLDraft.parse_creation_date( + { + "year": "2024", + "month": "13", + "day": "28", + } + ) + with self.assertRaises( + InvalidMetadataError, + msg="raise an InvalidMetadataError for a misspelled month" + ): + XMLDraft.parse_creation_date( + { + "year": "2024", + "month": "Oktobur", + "day": "28", + } + ) + with self.assertRaises( + InvalidMetadataError, + msg="raise an InvalidMetadataError for an invalid day" + ): + XMLDraft.parse_creation_date( + { + "year": "2024", + "month": "feb", + "day": "31", + } + ) + with self.assertRaises( + InvalidMetadataError, + msg="raise an InvalidMetadataError for a non-numeric day" + ): + XMLDraft.parse_creation_date( + { + "year": "2024", + "month": "feb", + "day": "twenty-four", + } + ) + + + def test_parse_docname(self): + with self.assertRaises(ValueError) as cm: + XMLDraft.parse_docname(lxml.etree.Element("xml")) # no docName + self.assertIn("Missing docName attribute", str(cm.exception)) + + # There to be more invalid docNames, but we use XMLDraft in places where we don't + # actually care about the validation, so for now just test what has long been the + # implementation. + with self.assertRaises(ValueError) as cm: + XMLDraft.parse_docname(lxml.etree.Element("xml", docName="")) # not a valid docName + self.assertIn("Unable to parse docName", str(cm.exception)) + + self.assertEqual( + XMLDraft.parse_docname(lxml.etree.Element("xml", docName="draft-foo-bar-baz-01")), + ("draft-foo-bar-baz", "01"), + ) + + self.assertEqual( + XMLDraft.parse_docname(lxml.etree.Element("xml", docName="draft-foo-bar-baz")), + ("draft-foo-bar-baz", None), + ) + + self.assertEqual( + XMLDraft.parse_docname(lxml.etree.Element("xml", docName="draft-foo-bar-baz-")), + ("draft-foo-bar-baz-", None), + ) + + # This is awful, but is how we've been running for some time. The missing rev will trigger + # validation errors for submissions, so we're at least somewhat guarded against this + # property. + self.assertEqual( + XMLDraft.parse_docname(lxml.etree.Element("xml", docName="-01")), + ("-01", None), + ) + + def test_render_author_name(self): + self.assertEqual( + XMLDraft.render_author_name(lxml.etree.Element("author", fullname="Joanna Q. Public")), + "Joanna Q. Public", + ) + self.assertEqual( + XMLDraft.render_author_name(lxml.etree.Element( + "author", + fullname="Joanna Q. Public", + asciiFullname="Not the Same at All", + )), + "Joanna Q. Public", + ) + self.assertEqual( + XMLDraft.render_author_name(lxml.etree.Element( + "author", + fullname=chr(340)+"ich", + asciiFullname="Rich UTF-8", + )), + chr(340)+"ich (Rich UTF-8)", + ) + self.assertEqual( + XMLDraft.render_author_name(lxml.etree.Element( + "author", + fullname="Joanna Q. Public", + initials="J. Q.", + surname="Public-Private", + )), + "Joanna Q. Public", + ) + self.assertEqual( + XMLDraft.render_author_name(lxml.etree.Element( + "author", + initials="J. Q.", + surname="Public", + )), + "J. Q. Public", + ) + self.assertEqual( + XMLDraft.render_author_name(lxml.etree.Element( + "author", + surname="Public", + )), + "Public", + ) + self.assertEqual( + XMLDraft.render_author_name(lxml.etree.Element( + "author", + initials="J. Q.", + )), + "J. Q.", + ) + + @patch("ietf.utils.xmldraft.XMLDraft.__init__", return_value=None) + def test_get_title(self, mock_init): + xmldraft = XMLDraft("fake") + self.assertTrue(mock_init.called) + # Stub XML that does not have a front/title element + xmldraft.xmlroot = lxml.etree.XML( + "" # no title + ) + self.assertEqual(xmldraft.get_title(), "") + + # Stub XML that has a front/title element + xmldraft.xmlroot = lxml.etree.XML( + "This Is the Title" + ) + self.assertEqual(xmldraft.get_title(), "This Is the Title") + + + def test_capture_xml2rfc_output(self): + """capture_xml2rfc_output reroutes and captures xml2rfc logs""" + orig_write_out = xml2rfc_log.write_out + orig_write_err = xml2rfc_log.write_err + with capture_xml2rfc_output() as outer_log_streams: # ensure no output + # such meta! very Inception! + with capture_xml2rfc_output() as inner_log_streams: + # arbitrary xml2rfc method that triggers a log, nothing special otherwise + xml2rfc_extract_date({"year": "fish"}, datetime.date(2025,3,1)) + self.assertNotEqual(inner_log_streams, outer_log_streams) + self.assertEqual(xml2rfc_log.write_out, outer_log_streams["stdout"], "out stream should be restored") + self.assertEqual(xml2rfc_log.write_err, outer_log_streams["stderr"], "err stream should be restored") + self.assertEqual(xml2rfc_log.write_out, orig_write_out, "original out stream should be restored") + self.assertEqual(xml2rfc_log.write_err, orig_write_err, "original err stream should be restored") + + # don't happen to get any output on stdout and not paranoid enough to force some, just test stderr + self.assertGreater(len(inner_log_streams["stderr"].getvalue()), 0, "want output on inner streams") + self.assertEqual(len(outer_log_streams["stdout"].getvalue()), 0, "no output on outer streams") + self.assertEqual(len(outer_log_streams["stderr"].getvalue()), 0, "no output on outer streams") + + def test_capture_xml2rfc_output_exception_handling(self): + """capture_xml2rfc_output restores streams after an exception""" + orig_write_out = xml2rfc_log.write_out + orig_write_err = xml2rfc_log.write_err + with capture_xml2rfc_output() as outer_log_streams: # ensure no output + with self.assertRaises(RuntimeError), capture_xml2rfc_output() as inner_log_streams: + raise RuntimeError("nooo") + self.assertNotEqual(inner_log_streams, outer_log_streams) + self.assertEqual(xml2rfc_log.write_out, outer_log_streams["stdout"], "out stream should be restored") + self.assertEqual(xml2rfc_log.write_err, outer_log_streams["stderr"], "err stream should be restored") + self.assertEqual(xml2rfc_log.write_out, orig_write_out, "original out stream should be restored") + self.assertEqual(xml2rfc_log.write_err, orig_write_err, "original err stream should be restored") class NameTests(TestCase): @@ -514,24 +863,6 @@ def test_assertion(self): assertion('False') settings.SERVER_MODE = 'test' -class TestRFC2047Strings(TestCase): - def test_parse_unicode(self): - names = ( - ('=?utf-8?b?4Yuz4YuK4Ym1IOGJoOGJgOGIiA==?=', 'ዳዊት በቀለ'), - ('=?utf-8?b?5Li9IOmDnA==?=', '丽 郜'), - ('=?utf-8?b?4KSV4KSu4KWN4KSs4KWL4KScIOCkoeCkvuCksA==?=', 'कम्बोज डार'), - ('=?utf-8?b?zpfPgc6szrrOu861zrnOsSDOm865z4zOvc+Ezrc=?=', 'Ηράκλεια Λιόντη'), - ('=?utf-8?b?15nXqdeo15DXnCDXqNeV15bXoNek15zXkw==?=', 'ישראל רוזנפלד'), - ('=?utf-8?b?5Li95Y2OIOeahw==?=', '丽华 皇'), - ('=?utf-8?b?77ul77qu766V77qzIO+tlu+7ru+vvu+6ju+7pw==?=', 'ﻥﺮﮕﺳ ﭖﻮﯾﺎﻧ'), - ('=?utf-8?b?77uh77uu77qz77uu76++IO+6su+7tO+7p++6jSDvurDvu6Pvuo7vu6jvr74=?=', 'ﻡﻮﺳﻮﯾ ﺲﻴﻧﺍ ﺰﻣﺎﻨﯾ'), - ('=?utf-8?b?ScOxaWdvIFNhbsOnIEliw6HDsWV6IGRlIGxhIFBlw7Fh?=', 'Iñigo Sanç Ibáñez de la Peña'), - ('Mart van Oostendorp', 'Mart van Oostendorp'), - ('', ''), - ) - for encoded_str, unicode in names: - self.assertEqual(unicode, parse_unicode(encoded_str)) - class TestAndroidSiteManifest(TestCase): def test_manifest(self): r = self.client.get(urlreverse('site.webmanifest')) @@ -598,3 +929,12 @@ class TestForm(Form): self.assertTrue(changed_form.has_changed()) unchanged_form = TestForm(initial={'test_field': [1]}, data={'test_field': [1]}) self.assertFalse(unchanged_form.has_changed()) + + +class HealthTests(TestCase): + def test_health(self): + self.assertEqual( + self.client.get("/health/").status_code, + 200, + ) + diff --git a/ietf/utils/tests_coverage.py b/ietf/utils/tests_coverage.py new file mode 100644 index 0000000000..68795994a7 --- /dev/null +++ b/ietf/utils/tests_coverage.py @@ -0,0 +1,56 @@ +# Copyright The IETF Trust 2025, All Rights Reserved +"""Tests of the coverage.py module""" + +from unittest import mock + +from django.test import override_settings + +from .coverage import CoverageManager +from .test_utils import TestCase + + +class CoverageManagerTests(TestCase): + @override_settings( + BASE_DIR="/path/to/project/ietf", + TEST_CODE_COVERAGE_EXCLUDE_FILES=["a.py"], + TEST_CODE_COVERAGE_EXCLUDE_LINES=["some-regex"], + ) + @mock.patch("ietf.utils.coverage.Coverage") + def test_coverage_manager(self, mock_coverage): + """CoverageManager managed coverage correctly in non-production mode + + Presumes we're not running tests in production mode. + """ + cm = CoverageManager() + self.assertFalse(cm.started) + + cm.start() + self.assertTrue(cm.started) + self.assertEqual(cm.checker, mock_coverage.return_value) + self.assertTrue(mock_coverage.called) + coverage_kwargs = mock_coverage.call_args.kwargs + self.assertEqual(coverage_kwargs["source"], ["/path/to/project/ietf"]) + self.assertEqual(coverage_kwargs["omit"], ["a.py"]) + self.assertTrue(isinstance(cm.checker.exclude, mock.Mock)) + assert isinstance(cm.checker.exclude, mock.Mock) # for type checker + self.assertEqual(cm.checker.exclude.call_count, 1) + cm.checker.exclude.assert_called_with("some-regex") + + @mock.patch("ietf.utils.coverage.Coverage") + def test_coverage_manager_is_defanged_in_production(self, mock_coverage): + """CoverageManager is a no-op in production mode""" + # Be careful faking settings.SERVER_MODE, but there's really no other way to + # test this. + with override_settings(SERVER_MODE="production"): + cm = CoverageManager() + cm.start() + + # Check that nothing actually happened + self.assertFalse(mock_coverage.called) + self.assertIsNone(cm.checker) + self.assertFalse(cm.started) + + # Check that other methods are guarded appropriately + cm.stop() + cm.save() + self.assertIsNone(cm.report()) diff --git a/ietf/utils/tests_meetecho.py b/ietf/utils/tests_meetecho.py index db3d36f405..c076a3df74 100644 --- a/ietf/utils/tests_meetecho.py +++ b/ietf/utils/tests_meetecho.py @@ -1,19 +1,21 @@ # Copyright The IETF Trust 2021, All Rights Reserved # -*- coding: utf-8 -*- import datetime -import pytz import requests import requests_mock -from unittest.mock import patch +from unittest.mock import call, patch from urllib.parse import urljoin +from zoneinfo import ZoneInfo from django.conf import settings from django.test import override_settings from django.utils import timezone +from ietf.doc.factories import DocumentFactory +from ietf.meeting.factories import SessionFactory, SessionPresentationFactory from ietf.utils.tests import TestCase -from .meetecho import Conference, ConferenceManager, MeetechoAPI, MeetechoAPIError +from .meetecho import Conference, ConferenceManager, MeetechoAPI, MeetechoAPIError, SlidesManager API_BASE = 'https://meetecho-api.example.com' CLIENT_ID = 'datatracker' @@ -22,6 +24,7 @@ 'api_base': API_BASE, 'client_id': CLIENT_ID, 'client_secret': CLIENT_SECRET, + 'slides_notify_time': -1, # always send notification } @@ -31,6 +34,7 @@ class APITests(TestCase): schedule_meeting_url = urljoin(API_BASE, 'meeting/interim/createRoom') fetch_meetings_url = urljoin(API_BASE, 'meeting/interim/fetchRooms') delete_meetings_url = urljoin(API_BASE, 'meeting/interim/deleteRoom') + slide_deck_url = urljoin(API_BASE, "materials") def setUp(self): super().setUp() @@ -78,7 +82,7 @@ def test_schedule_meeting(self): 'rooms': { '3d55bce0-535e-4ba8-bb8e-734911cf3c32': { 'room': { - 'id': 18, + 'id': 18, # should match room_id in api.schedule_meeting() below 'start_time': '2021-09-14 10:00:00', 'duration': 130, 'description': 'interim-2021-wgname-01', @@ -93,7 +97,8 @@ def test_schedule_meeting(self): api = MeetechoAPI(API_BASE, CLIENT_ID, CLIENT_SECRET) api_response = api.schedule_meeting( wg_token='my-token', - start_time=pytz.utc.localize(datetime.datetime(2021, 9, 14, 10, 0, 0)), + room_id=18, + start_time=datetime.datetime(2021, 9, 14, 10, 0, 0, tzinfo=datetime.UTC), duration=datetime.timedelta(minutes=130), description='interim-2021-wgname-01', extrainfo='message for staff', @@ -112,6 +117,7 @@ def test_schedule_meeting(self): self.assertEqual( request.json(), { + 'room_id': 18, 'duration': 130, 'start_time': '2021-09-14 10:00:00', 'extrainfo': 'message for staff', @@ -121,11 +127,11 @@ def test_schedule_meeting(self): ) # same time in different time zones for start_time in [ - pytz.utc.localize(datetime.datetime(2021, 9, 14, 10, 0, 0)), - pytz.timezone('america/halifax').localize(datetime.datetime(2021, 9, 14, 7, 0, 0)), - pytz.timezone('europe/kiev').localize(datetime.datetime(2021, 9, 14, 13, 0, 0)), - pytz.timezone('pacific/easter').localize(datetime.datetime(2021, 9, 14, 5, 0, 0)), - pytz.timezone('africa/porto-novo').localize(datetime.datetime(2021, 9, 14, 11, 0, 0)), + datetime.datetime(2021, 9, 14, 10, 0, 0, tzinfo=datetime.UTC), + datetime.datetime(2021, 9, 14, 7, 0, 0, tzinfo=ZoneInfo('America/Halifax')), + datetime.datetime(2021, 9, 14, 13, 0, 0, tzinfo=ZoneInfo('Europe/Kiev')), + datetime.datetime(2021, 9, 14, 5, 0, 0, tzinfo=ZoneInfo('Pacific/Easter')), + datetime.datetime(2021, 9, 14, 11, 0, 0, tzinfo=ZoneInfo('Africa/Porto-Novo')), ]: self.assertEqual( api_response, @@ -143,7 +149,7 @@ def test_schedule_meeting(self): }, } }, - f'Incorrect time conversion for {start_time.tzinfo.zone}', + f'Incorrect time conversion for {start_time.tzinfo}', ) def test_fetch_meetings(self): @@ -192,7 +198,7 @@ def test_fetch_meetings(self): '3d55bce0-535e-4ba8-bb8e-734911cf3c32': { 'room': { 'id': 18, - 'start_time': pytz.utc.localize(datetime.datetime(2021, 9, 14, 10, 0, 0)), + 'start_time': datetime.datetime(2021, 9, 14, 10, 0, 0, tzinfo=datetime.UTC), 'duration': datetime.timedelta(minutes=130), 'description': 'interim-2021-wgname-01', }, @@ -202,7 +208,7 @@ def test_fetch_meetings(self): 'e68e96d4-d38f-475b-9073-ecab46ca96a5': { 'room': { 'id': 23, - 'start_time': pytz.utc.localize(datetime.datetime(2021, 9, 15, 14, 30, 0)), + 'start_time': datetime.datetime(2021, 9, 15, 14, 30, 0, tzinfo=datetime.UTC), 'duration': datetime.timedelta(minutes=30), 'description': 'interim-2021-wgname-02', }, @@ -228,6 +234,136 @@ def test_delete_meeting(self): self.assertIsNone(request.body, 'Delete meeting request has no body') self.assertCountEqual(api_response, data_to_fetch) + def test_add_slide_deck(self): + self.requests_mock.post(self.slide_deck_url, status_code=202) + + api = MeetechoAPI(API_BASE, CLIENT_ID, CLIENT_SECRET) + api_response = api.add_slide_deck( + wg_token="my-token", + session="1234", + deck={ + "title": "A Slide Deck", + "id": 17, + "url": "https://example.com/decks/17", + "rev": "00", + "order": 0, + } + ) + self.assertIsNone(api_response) # no return value from this call + + self.assertTrue(self.requests_mock.called) + request = self.requests_mock.last_request + self.assertIn("Authorization", request.headers) + self.assertEqual( + request.headers["Content-Type"], + "application/json", + "Incorrect request content-type", + ) + self.assertEqual(request.headers["Authorization"], "bearer my-token", + "Incorrect request authorization header") + self.assertEqual( + request.json(), + { + "session": "1234", + "title": "A Slide Deck", + "id": 17, + "url": "https://example.com/decks/17", + "rev": "00", + "order": 0, + }, + "Incorrect request content" + ) + + def test_delete_slide_deck(self): + self.requests_mock.delete(self.slide_deck_url, status_code=202) + + api = MeetechoAPI(API_BASE, CLIENT_ID, CLIENT_SECRET) + api_response = api.delete_slide_deck( + wg_token="my-token", + session="1234", + id=17, + ) + self.assertIsNone(api_response) # no return value from this call + + self.assertTrue(self.requests_mock.called) + request = self.requests_mock.last_request + self.assertIn("Authorization", request.headers) + self.assertEqual( + request.headers["Content-Type"], + "application/json", + "Incorrect request content-type", + ) + self.assertEqual(request.headers["Authorization"], "bearer my-token", + "Incorrect request authorization header") + self.assertEqual( + request.json(), + { + "session": "1234", + "id": 17, + }, + "Incorrect request content" + ) + + def test_update_slide_decks(self): + self.requests_mock.put(self.slide_deck_url, status_code=202) + + api = MeetechoAPI(API_BASE, CLIENT_ID, CLIENT_SECRET) + api_response = api.update_slide_decks( + wg_token="my-token", + session="1234", + decks=[ + { + "title": "A Slide Deck", + "id": 17, + "url": "https://example.com/decks/17", + "rev": "00", + "order": 0, + }, + { + "title": "Another Slide Deck", + "id": 23, + "url": "https://example.com/decks/23", + "rev": "03", + "order": 1, + } + ] + ) + self.assertIsNone(api_response) # no return value from this call + + self.assertTrue(self.requests_mock.called) + request = self.requests_mock.last_request + self.assertIn("Authorization", request.headers) + self.assertEqual( + request.headers["Content-Type"], + "application/json", + "Incorrect request content-type", + ) + self.assertEqual(request.headers["Authorization"], "bearer my-token", + "Incorrect request authorization header") + self.assertEqual( + request.json(), + { + "session": "1234", + "decks": [ + { + "title": "A Slide Deck", + "id": 17, + "url": "https://example.com/decks/17", + "rev": "00", + "order": 0, + }, + { + "title": "Another Slide Deck", + "id": 23, + "url": "https://example.com/decks/23", + "rev": "03", + "order": 1, + }, + ], + }, + "Incorrect request content" + ) + def test_request_helper_failed_requests(self): self.requests_mock.register_uri(requests_mock.ANY, urljoin(API_BASE, 'unauthorized/url/endpoint'), status_code=401) self.requests_mock.register_uri(requests_mock.ANY, urljoin(API_BASE, 'forbidden/url/endpoint'), status_code=403) @@ -250,7 +386,7 @@ def test_request_helper_exception(self): def test_time_serialization(self): """Time de/serialization should be consistent""" - time = timezone.now().astimezone(pytz.utc).replace(microsecond=0) # cut off to 0 microseconds + time = timezone.now().astimezone(datetime.UTC).replace(microsecond=0) # cut off to 0 microseconds api = MeetechoAPI(API_BASE, CLIENT_ID, CLIENT_SECRET) self.assertEqual(api._deserialize_time(api._serialize_time(time)), time) @@ -264,7 +400,7 @@ def test_conference_from_api_dict(self): 'session-1-uuid': { 'room': { 'id': 1, - 'start_time': pytz.utc.localize(datetime.datetime(2022,2,4,1,2,3)), + 'start_time': datetime.datetime(2022,2,4,1,2,3, tzinfo=datetime.UTC), 'duration': datetime.timedelta(minutes=45), 'description': 'some-description', }, @@ -274,7 +410,7 @@ def test_conference_from_api_dict(self): 'session-2-uuid': { 'room': { 'id': 2, - 'start_time': pytz.utc.localize(datetime.datetime(2022,2,5,4,5,6)), + 'start_time': datetime.datetime(2022,2,5,4,5,6, tzinfo=datetime.UTC), 'duration': datetime.timedelta(minutes=90), 'description': 'another-description', }, @@ -291,7 +427,7 @@ def test_conference_from_api_dict(self): id=1, public_id='session-1-uuid', description='some-description', - start_time=pytz.utc.localize(datetime.datetime(2022, 2, 4, 1, 2, 3)), + start_time=datetime.datetime(2022, 2, 4, 1, 2, 3, tzinfo=datetime.UTC), duration=datetime.timedelta(minutes=45), url='https://example.com/some/url', deletion_token='delete-me', @@ -301,7 +437,7 @@ def test_conference_from_api_dict(self): id=2, public_id='session-2-uuid', description='another-description', - start_time=pytz.utc.localize(datetime.datetime(2022, 2, 5, 4, 5, 6)), + start_time=datetime.datetime(2022, 2, 5, 4, 5, 6, tzinfo=datetime.UTC), duration=datetime.timedelta(minutes=90), url='https://example.com/another/url', deletion_token='delete-me-too', @@ -317,7 +453,7 @@ def test_fetch(self, mock_fetch, _): 'session-1-uuid': { 'room': { 'id': 1, - 'start_time': pytz.utc.localize(datetime.datetime(2022,2,4,1,2,3)), + 'start_time': datetime.datetime(2022,2,4,1,2,3, tzinfo=datetime.UTC), 'duration': datetime.timedelta(minutes=45), 'description': 'some-description', }, @@ -336,7 +472,7 @@ def test_fetch(self, mock_fetch, _): id=1, public_id='session-1-uuid', description='some-description', - start_time=pytz.utc.localize(datetime.datetime(2022,2,4,1,2,3)), + start_time=datetime.datetime(2022,2,4,1,2,3, tzinfo=datetime.UTC), duration=datetime.timedelta(minutes=45), url='https://example.com/some/url', deletion_token='delete-me', @@ -351,8 +487,8 @@ def test_create(self, mock_schedule, _): 'rooms': { 'session-1-uuid': { 'room': { - 'id': 1, - 'start_time': pytz.utc.localize(datetime.datetime(2022,2,4,1,2,3)), + 'id': 1, # value should match session_id param to cm.create() below + 'start_time': datetime.datetime(2022,2,4,1,2,3, tzinfo=datetime.UTC), 'duration': datetime.timedelta(minutes=45), 'description': 'some-description', }, @@ -362,7 +498,7 @@ def test_create(self, mock_schedule, _): }, } cm = ConferenceManager(settings.MEETECHO_API_CONFIG) - result = cm.create('group', 'desc', 'starttime', 'dur', 'extra') + result = cm.create('group', '1', 'desc', 'starttime', 'dur', 'extra') self.assertEqual( result, [Conference( @@ -370,7 +506,7 @@ def test_create(self, mock_schedule, _): id=1, public_id='session-1-uuid', description='some-description', - start_time=pytz.utc.localize(datetime.datetime(2022,2,4,1,2,3)), + start_time=datetime.datetime(2022,2,4,1,2,3, tzinfo=datetime.UTC), duration=datetime.timedelta(minutes=45), url='https://example.com/some/url', deletion_token='delete-me', @@ -381,6 +517,7 @@ def test_create(self, mock_schedule, _): kwargs, { 'wg_token': 'atoken', + 'room_id': 1, 'description': 'desc', 'start_time': 'starttime', 'duration': 'dur', @@ -394,10 +531,176 @@ def test_delete_conference(self, mock_delete): args, kwargs = mock_delete.call_args self.assertEqual(args, ('delete-this',)) - @patch('ietf.utils.meetecho.MeetechoAPI.delete_meeting') def test_delete_by_url(self, mock_delete): cm = ConferenceManager(settings.MEETECHO_API_CONFIG) cm.delete_conference(Conference(None, None, None, None, None, None, 'the-url', 'delete-this')) args, kwargs = mock_delete.call_args self.assertEqual(args, ('delete-this',)) + + +@patch.object(SlidesManager, 'wg_token', return_value='atoken') +@override_settings(MEETECHO_API_CONFIG=API_CONFIG) +class SlidesManagerTests(TestCase): + @patch("ietf.utils.meetecho.MeetechoAPI.add_slide_deck") + def test_add(self, mock_add, mock_wg_token): + sm = SlidesManager(settings.MEETECHO_API_CONFIG) + session = SessionFactory() + slides_doc = DocumentFactory(type_id="slides") + retval = sm.add(session, slides_doc, 13) + self.assertIs(retval, True) + self.assertTrue(mock_wg_token.called) + self.assertTrue(mock_add.called) + self.assertEqual( + mock_add.call_args, + call( + wg_token="atoken", + session=str(session.pk), + deck={ + "id": slides_doc.pk, + "title": slides_doc.title, + "url": slides_doc.get_href(session.meeting), + "rev": slides_doc.rev, + "order": 13, + }, + ), + ) + + # Test return value when no update is sent. Really ought to do a more + # careful test of the _should_send_update() method. + sm = SlidesManager( + settings.MEETECHO_API_CONFIG | {"slides_notify_time": None} + ) + retval = sm.add(session, slides_doc, 14) + self.assertIs(retval, False) + + @patch("ietf.utils.meetecho.MeetechoAPI.update_slide_decks") + @patch("ietf.utils.meetecho.MeetechoAPI.delete_slide_deck") + def test_delete(self, mock_delete, mock_update, mock_wg_token): + sm = SlidesManager(settings.MEETECHO_API_CONFIG) + # Test scenario: we had a session with two slide decks and we already deleted the SessionPresentation + # for one and are now updating Meetecho + slides = SessionPresentationFactory(document__type_id="slides", order=1) # still attached to the session + session = slides.session + slides_doc = slides.document + removed_slides_doc = DocumentFactory(type_id="slides") + + with self.assertRaises(MeetechoAPIError): + sm.delete(session, slides_doc) # can't remove slides still attached to the session + self.assertFalse(any([mock_wg_token.called, mock_delete.called, mock_update.called])) + + retval = sm.delete(session, removed_slides_doc) + self.assertIs(retval, True) + self.assertTrue(mock_wg_token.called) + self.assertTrue(mock_delete.called) + self.assertEqual( + mock_delete.call_args, + call(wg_token="atoken", session=str(session.pk), id=removed_slides_doc.pk), + ) + self.assertTrue(mock_update.called) + self.assertEqual( + mock_update.call_args, + call( + wg_token="atoken", + session=str(session.pk), + decks=[ + { + "id": slides_doc.pk, + "title": slides_doc.title, + "url": slides_doc.get_href(session.meeting), + "rev": slides_doc.rev, + "order": 1, + }, + ] + ) + ) + mock_delete.reset_mock() + mock_update.reset_mock() + + # Delete the other session and check that we don't make the update call + slides.delete() + retval = sm.delete(session, slides_doc) + self.assertIs(retval, True) + self.assertTrue(mock_delete.called) + self.assertFalse(mock_update.called) + + # Test return value when no update is sent. Really ought to do a more + # careful test of the _should_send_update() method. + sm = SlidesManager( + settings.MEETECHO_API_CONFIG | {"slides_notify_time": None} + ) + retval = sm.delete(session, slides_doc) + self.assertIs(retval, False) + + @patch("ietf.utils.meetecho.MeetechoAPI.delete_slide_deck") + @patch("ietf.utils.meetecho.MeetechoAPI.add_slide_deck") + def test_revise(self, mock_add, mock_delete, mock_wg_token): + sm = SlidesManager(settings.MEETECHO_API_CONFIG) + slides = SessionPresentationFactory(document__type_id="slides", order=23) + slides_doc = slides.document + retval = sm.revise(slides.session, slides_doc) + self.assertIs(retval, True) + self.assertTrue(mock_wg_token.called) + self.assertTrue(mock_delete.called) + self.assertEqual( + mock_delete.call_args, + call(wg_token="atoken", session=str(slides.session.pk), id=slides_doc.pk), + ) + self.assertTrue(mock_add.called) + self.assertEqual( + mock_add.call_args, + call( + wg_token="atoken", + session=str(slides.session.pk), + deck={ + "id": slides_doc.pk, + "title": slides_doc.title, + "url": slides_doc.get_href(slides.session.meeting), + "rev": slides_doc.rev, + "order": 23, + }, + ), + ) + + # Test return value when no update is sent. Really ought to do a more + # careful test of the _should_send_update() method. + sm = SlidesManager( + settings.MEETECHO_API_CONFIG | {"slides_notify_time": None} + ) + retval = sm.revise(slides.session, slides_doc) + self.assertIs(retval, False) + + + @patch("ietf.utils.meetecho.MeetechoAPI.update_slide_decks") + def test_send_update(self, mock_send_update, mock_wg_token): + sm = SlidesManager(settings.MEETECHO_API_CONFIG) + slides = SessionPresentationFactory(document__type_id="slides") + SessionPresentationFactory(session=slides.session, document__type_id="agenda") + retval = sm.send_update(slides.session) + self.assertIs(retval, True) + self.assertTrue(mock_wg_token.called) + self.assertTrue(mock_send_update.called) + self.assertEqual( + mock_send_update.call_args, + call( + wg_token="atoken", + session=str(slides.session_id), + decks=[ + { + "id": slides.document_id, + "title": slides.document.title, + "url": slides.document.get_href(slides.session.meeting), + "rev": slides.document.rev, + "order": 0, + } + ] + ) + ) + + # Test return value when no update is sent. Really ought to do a more + # careful test of the _should_send_update() method. + sm = SlidesManager( + settings.MEETECHO_API_CONFIG | {"slides_notify_time": None} + ) + retval = sm.send_update(slides.session) + self.assertIs(retval, False) diff --git a/ietf/utils/tests_searchindex.py b/ietf/utils/tests_searchindex.py new file mode 100644 index 0000000000..e9fbf52020 --- /dev/null +++ b/ietf/utils/tests_searchindex.py @@ -0,0 +1,213 @@ +# Copyright The IETF Trust 2026, All Rights Reserved +from unittest import mock + +import typesense.exceptions +from django.conf import settings +from django.test.utils import override_settings + +from . import searchindex +from .test_utils import TestCase +from ..blobdb.models import Blob +from ..doc.factories import ( + WgDraftFactory, + WgRfcFactory, + PublishedRfcDocEventFactory, + BcpFactory, + StdFactory, +) +from ..doc.models import Document +from ..doc.storage_utils import store_str +from ..person.factories import PersonFactory + + +class SearchindexTests(TestCase): + def test_enabled(self): + with override_settings(): + try: + del settings.SEARCHINDEX_CONFIG + except AttributeError: + pass + self.assertFalse(searchindex.enabled()) + with override_settings( + SEARCHINDEX_CONFIG={"TYPESENSE_API_KEY": "this-is-not-a-key"} + ): + self.assertFalse(searchindex.enabled()) + with override_settings( + SEARCHINDEX_CONFIG={"TYPESENSE_API_URL": "http://example.com"} + ): + self.assertTrue(searchindex.enabled()) + + def test_sanitize_text(self): + dirty_text = """ + + This is text. It + is <---- full of \tprobl.....ems! Fix it. + """ + sanitized = "This is text It is full of problems Fix it." + self.assertEqual(searchindex._sanitize_text(dirty_text), sanitized) + + @override_settings( + SEARCHINDEX_CONFIG={ + "TYPESENSE_API_URL": "http://ts.example.com", + "TYPESENSE_API_KEY": "test-api-key", + "TYPESENSE_COLLECTION_NAME": "frogs", + } + ) + def test_typesense_doc_from_rfc(self): + not_rfc = WgDraftFactory() + assert isinstance(not_rfc, Document) + with self.assertRaises(AssertionError): + searchindex.typesense_doc_from_rfc(not_rfc) + + invalid_rfc = WgRfcFactory(name="rfc1000000", rfc_number=None) + assert isinstance(invalid_rfc, Document) + with self.assertRaises(AssertionError): + searchindex.typesense_doc_from_rfc(invalid_rfc) + + rfc = PublishedRfcDocEventFactory().doc + assert isinstance(rfc, Document) + result = searchindex.typesense_doc_from_rfc(rfc) + # Check a few values, not exhaustive + self.assertEqual(result["id"], f"doc-{rfc.pk}") + self.assertEqual(result["rfcNumber"], rfc.rfc_number) + self.assertEqual(result["abstract"], searchindex._sanitize_text(rfc.abstract)) + self.assertEqual(result["pages"], rfc.pages) + self.assertNotIn("adName", result) + self.assertNotIn("content", result) # no blob + self.assertNotIn("subseries", result) + + # repeat, this time with contents, an AD, and subseries docs + store_str( + kind="rfc", + name=f"txt/{rfc.name}.txt", + content="The contents of this RFC", + doc_name=rfc.name, + doc_rev=rfc.rev, # expected to be None + ) + rfc.ad = PersonFactory(name="Alfred D. Rector") + # Put it in two Subseries docs to be sure this does not break things + # (the typesense schema does not support this for real at the moment) + BcpFactory(contains=[rfc], name="bcp1234") + StdFactory(contains=[rfc], name="std1234") + result = searchindex.typesense_doc_from_rfc(rfc) + # Check a few values, not exhaustive + self.assertEqual( + result["content"], + searchindex._sanitize_text("The contents of this RFC"), + ) + self.assertEqual(result["adName"], "Alfred D. Rector") + self.assertIn("subseries", result) + ss_dict = result["subseries"] + # We should get one of the two subseries docs, but neither is more correct + # than the other... + self.assertTrue( + any( + ss_dict == {"acronym": ss_type, "number": 1234, "total": 1} + for ss_type in ["bcp", "std"] + ) + ) + + # Finally, delete the contents blob and make sure things don't blow up + Blob.objects.get(bucket="rfc", name=f"txt/{rfc.name}.txt").delete() + result = searchindex.typesense_doc_from_rfc(rfc) + self.assertNotIn("content", result) + + @override_settings( + SEARCHINDEX_CONFIG={ + "TYPESENSE_API_URL": "http://ts.example.com", + "TYPESENSE_API_KEY": "test-api-key", + "TYPESENSE_COLLECTION_NAME": "frogs", + } + ) + @mock.patch("ietf.utils.searchindex.typesense_doc_from_rfc") + @mock.patch("ietf.utils.searchindex.typesense.Client") + def test_update_or_create_rfc_entry( + self, mock_ts_client_constructor, mock_tdoc_from_rfc + ): + fake_tdoc = object() + mock_tdoc_from_rfc.return_value = fake_tdoc + rfc = WgRfcFactory() + assert isinstance(rfc, Document) + searchindex.update_or_create_rfc_entry(rfc) + self.assertTrue(mock_ts_client_constructor.called) + # walk the tree down to the method we expected to be called... + mock_upsert = mock_ts_client_constructor.return_value.collections[ + "frogs" # matches value in override_settings above + ].documents.upsert + self.assertTrue(mock_upsert.called) + self.assertEqual(mock_upsert.call_args, mock.call(fake_tdoc)) + + @override_settings( + SEARCHINDEX_CONFIG={ + "TYPESENSE_API_URL": "http://ts.example.com", + "TYPESENSE_API_KEY": "test-api-key", + "TYPESENSE_COLLECTION_NAME": "frogs", + } + ) + @mock.patch("ietf.utils.searchindex.typesense_doc_from_rfc") + @mock.patch("ietf.utils.searchindex.typesense.Client") + def test_update_or_create_rfc_entries( + self, mock_ts_client_constructor, mock_tdoc_from_rfc + ): + fake_tdoc = object() + mock_tdoc_from_rfc.return_value = fake_tdoc + rfc = WgRfcFactory() + assert isinstance(rfc, Document) + searchindex.update_or_create_rfc_entries([rfc] * 50) # list of docs... + self.assertEqual(mock_ts_client_constructor.call_count, 1) + # walk the tree down to the method we expected to be called... + mock_import_ = mock_ts_client_constructor.return_value.collections[ + "frogs" # matches value in override_settings above + ].documents.import_ + self.assertEqual(mock_import_.call_count, 1) + self.assertEqual( + mock_import_.call_args, mock.call([fake_tdoc] * 50, {"action": "upsert"}) + ) + + mock_import_.reset_mock() + searchindex.update_or_create_rfc_entries([rfc] * 50, batchsize=20) + self.assertEqual(mock_ts_client_constructor.call_count, 2) # one more + # walk the tree down to the method we expected to be called... + mock_import_ = mock_ts_client_constructor.return_value.collections[ + "frogs" # matches value in override_settings above + ].documents.import_ + self.assertEqual(mock_import_.call_count, 3) + self.assertEqual( + mock_import_.call_args_list, + [ + mock.call([fake_tdoc] * 20, {"action": "upsert"}), + mock.call([fake_tdoc] * 20, {"action": "upsert"}), + mock.call([fake_tdoc] * 10, {"action": "upsert"}), + ], + ) + + @override_settings( + SEARCHINDEX_CONFIG={ + "TYPESENSE_API_URL": "http://ts.example.com", + "TYPESENSE_API_KEY": "test-api-key", + "TYPESENSE_COLLECTION_NAME": "frogs", + } + ) + @mock.patch("ietf.utils.searchindex.typesense.Client") + def test_create_collection(self, mock_ts_client_constructor): + searchindex.create_collection() + self.assertEqual(mock_ts_client_constructor.call_count, 1) + mock_collections = mock_ts_client_constructor.return_value.collections + self.assertTrue(mock_collections.create.called) + self.assertEqual(mock_collections.create.call_args[0][0]["name"], "frogs") + + @override_settings( + SEARCHINDEX_CONFIG={ + "TYPESENSE_API_URL": "http://ts.example.com", + "TYPESENSE_API_KEY": "test-api-key", + "TYPESENSE_COLLECTION_NAME": "frogs", + } + ) + @mock.patch("ietf.utils.searchindex.typesense.Client") + def test_delete_collection(self, mock_ts_client_constructor): + searchindex.delete_collection() + self.assertEqual(mock_ts_client_constructor.call_count, 1) + mock_collections = mock_ts_client_constructor.return_value.collections + self.assertTrue(mock_collections["frogs"].delete.called) + + mock_collections["frogs"].side_effect = typesense.exceptions.ObjectNotFound + searchindex.delete_collection() # should ignore the exception diff --git a/ietf/utils/tests_text.py b/ietf/utils/tests_text.py new file mode 100644 index 0000000000..51aa2eff13 --- /dev/null +++ b/ietf/utils/tests_text.py @@ -0,0 +1,71 @@ +# Copyright The IETF Trust 2021-2026, All Rights Reserved +from ietf.utils.test_utils import TestCase +from ietf.utils.text import parse_unicode, decode_document_content + + +class TestDecoders(TestCase): + def test_parse_unicode(self): + names = ( + ("=?utf-8?b?4Yuz4YuK4Ym1IOGJoOGJgOGIiA==?=", "ዳዊት በቀለ"), + ("=?utf-8?b?5Li9IOmDnA==?=", "丽 郜"), + ("=?utf-8?b?4KSV4KSu4KWN4KSs4KWL4KScIOCkoeCkvuCksA==?=", "कम्बोज डार"), + ("=?utf-8?b?zpfPgc6szrrOu861zrnOsSDOm865z4zOvc+Ezrc=?=", "Ηράκλεια Λιόντη"), + ("=?utf-8?b?15nXqdeo15DXnCDXqNeV15bXoNek15zXkw==?=", "ישראל רוזנפלד"), + ("=?utf-8?b?5Li95Y2OIOeahw==?=", "丽华 皇"), + ("=?utf-8?b?77ul77qu766V77qzIO+tlu+7ru+vvu+6ju+7pw==?=", "ﻥﺮﮕﺳ ﭖﻮﯾﺎﻧ"), + ( + "=?utf-8?b?77uh77uu77qz77uu76++IO+6su+7tO+7p++6jSDvurDvu6Pvuo7vu6jvr74=?=", + "ﻡﻮﺳﻮﯾ ﺲﻴﻧﺍ ﺰﻣﺎﻨﯾ", + ), + ( + "=?utf-8?b?ScOxaWdvIFNhbsOnIEliw6HDsWV6IGRlIGxhIFBlw7Fh?=", + "Iñigo Sanç Ibáñez de la Peña", + ), + ("Mart van Oostendorp", "Mart van Oostendorp"), + ("", ""), + ) + for encoded_str, unicode in names: + self.assertEqual(unicode, parse_unicode(encoded_str)) + + def test_decode_document_content(self): + utf8_bytes = "𒀭𒊩𒌆𒄈𒋢".encode("utf-8") # ends with 4-byte character + latin1_bytes = "àéîøü".encode("latin-1") + other_bytes = "àéîøü".encode("macintosh") # different from its latin-1 encoding + assert other_bytes.decode("macintosh") != other_bytes.decode("latin-1"),\ + "test broken: other_bytes must decode differently as latin-1" + + # simplest case + self.assertEqual( + decode_document_content(utf8_bytes), + utf8_bytes.decode(), + ) + # losing 1-4 bytes from the end leave the last character incomplete; the + # decoder should decode all but that last character + self.assertEqual( + decode_document_content(utf8_bytes[:-1]), + utf8_bytes.decode()[:-1], + ) + self.assertEqual( + decode_document_content(utf8_bytes[:-2]), + utf8_bytes.decode()[:-1], + ) + self.assertEqual( + decode_document_content(utf8_bytes[:-3]), + utf8_bytes.decode()[:-1], + ) + self.assertEqual( + decode_document_content(utf8_bytes[:-4]), + utf8_bytes.decode()[:-1], + ) + + # latin-1 is also simple + self.assertEqual( + decode_document_content(latin1_bytes), + latin1_bytes.decode("latin-1"), + ) + + # other character sets are just treated as latin1 (bug? feature? you decide) + self.assertEqual( + decode_document_content(other_bytes), + other_bytes.decode("latin-1"), + ) diff --git a/ietf/utils/text.py b/ietf/utils/text.py index 48f5538cba..2763056e1a 100644 --- a/ietf/utils/text.py +++ b/ietf/utils/text.py @@ -1,17 +1,15 @@ # Copyright The IETF Trust 2016-2020, All Rights Reserved # -*- coding: utf-8 -*- - -import bleach # type: ignore -import copy +import bleach import email import re import textwrap import tlds import unicodedata -from django.core.validators import URLValidator from django.core.exceptions import ValidationError +from django.core.validators import URLValidator from django.utils.functional import keep_lazy from django.utils.safestring import mark_safe @@ -19,57 +17,58 @@ from .texescape import init as texescape_init, tex_escape_map -tlds_sorted = sorted(tlds.tld_set, key=len, reverse=True) -protocols = set(bleach.sanitizer.ALLOWED_PROTOCOLS) -protocols.add("ftp") # we still have some ftp links -protocols.add("xmpp") # we still have some xmpp links - -tags = set(bleach.sanitizer.ALLOWED_TAGS).union( - { - # fmt: off - 'a', 'abbr', 'acronym', 'address', 'b', 'big', - 'blockquote', 'body', 'br', 'caption', 'center', 'cite', 'code', 'col', - 'colgroup', 'dd', 'del', 'dfn', 'dir', 'div', 'dl', 'dt', 'em', 'font', - 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'head', 'hr', 'html', 'i', 'ins', 'kbd', - 'li', 'ol', 'p', 'pre', 'q', 's', 'samp', 'small', 'span', 'strike', 'style', - 'strong', 'sub', 'sup', 'table', 'title', 'tbody', 'td', 'tfoot', 'th', 'thead', - 'tr', 'tt', 'u', 'ul', 'var' - # fmt: on - } -) +# Sort in reverse so substrings are considered later - e.g., so ".co" comes after ".com". +tlds_sorted = sorted(tlds.tld_set, reverse=True) -attributes = copy.copy(bleach.sanitizer.ALLOWED_ATTRIBUTES) -attributes["*"] = ["id"] -attributes["ol"] = ["start"] +# Protocols we're interested in auto-linking. See also ietf.utils.html.acceptable_protocols, +# which is protocols we allow people to include explicitly in sanitized html. +linkable_protocols = ["http", "https", "mailto", "ftp", "xmpp"] -bleach_cleaner = bleach.sanitizer.Cleaner( - tags=tags, attributes=attributes, protocols=protocols, strip=True -) -validate_url = URLValidator() +_validate_url = URLValidator() def check_url_validity(attrs, new=False): + """Callback for bleach linkify + + :param attrs: dict of attributes of the tag + :param new: boolean - True if the link is new; False if was found in text + :return: new dict of attributes for the link, or None to block link creation + + Attributes are namespaced, so normally look like `(None, "SomeAttribute")`. + This includes as the keys in the `attrs` argument, so `attrs[(None, "href")]` + would be the value of the href attribute. + """ if (None, "href") not in attrs: # rfc2html creates a tags without href return attrs url = attrs[(None, "href")] try: if url.startswith("http"): - validate_url(url) + _validate_url(url) except ValidationError: return None return attrs -bleach_linker = bleach.Linker( +_bleach_linker = bleach.Linker( callbacks=[check_url_validity], - url_re=bleach.linkifier.build_url_re(tlds=tlds_sorted, protocols=protocols), + url_re=bleach.linkifier.build_url_re(tlds=tlds_sorted, protocols=linkable_protocols), email_re=bleach.linkifier.build_email_re(tlds=tlds_sorted), # type: ignore parse_email=True, ) +def linkify(text): + """Convert URL-ish substrings into HTML links + + This does no sanitization whatsoever. Caller must sanitize the input or output as + contextually appropriate. Do not call `mark_safe()` on the output if the input is + user-provided unless it has been sanitized or escaped. + """ + return _bleach_linker.linkify(text) + + @keep_lazy(str) def xslugify(value): """ @@ -264,3 +263,21 @@ def parse_unicode(text): else: text = decoded_string return text + + +def decode_document_content(content: bytes) -> str: + """Decode document contents as utf-8 or latin1 + + Method was developed in DocumentInfo.text() where it gave acceptable results + for existing documents / RFCs. + """ + try: + return content.decode("utf-8") + except UnicodeDecodeError: + pass + for back in range(1, 4): + try: + return content[:-back].decode("utf-8") + except UnicodeDecodeError: + pass + return content.decode("latin-1") # everything is legal in latin-1 diff --git a/ietf/utils/timezone.py b/ietf/utils/timezone.py index a396b5e82d..e08dfa02f2 100644 --- a/ietf/utils/timezone.py +++ b/ietf/utils/timezone.py @@ -26,7 +26,7 @@ def _tzinfo(tz: Union[str, datetime.tzinfo, None]): Accepts a tzinfo or string containing a timezone name. Defaults to UTC if tz is None. """ if tz is None: - return datetime.timezone.utc + return datetime.UTC elif isinstance(tz, datetime.tzinfo): return tz else: diff --git a/ietf/utils/unicodenormalize.py b/ietf/utils/unicodenormalize.py new file mode 100644 index 0000000000..8644dbdb79 --- /dev/null +++ b/ietf/utils/unicodenormalize.py @@ -0,0 +1,9 @@ +# Copyright The IETF Trust 2025, All Rights Reserved +import unicodedata + +def normalize_for_sorting(text): + """Normalize text for proper accent-aware sorting.""" + # Normalize the text to NFD (decomposed form) + decomposed = unicodedata.normalize('NFD', text) + # Filter out combining diacritical marks + return ''.join(char for char in decomposed if not unicodedata.combining(char)) diff --git a/ietf/utils/validators.py b/ietf/utils/validators.py index 9642a2877c..a99de72724 100644 --- a/ietf/utils/validators.py +++ b/ietf/utils/validators.py @@ -4,6 +4,8 @@ import os import re +from email.utils import parseaddr + from pyquery import PyQuery from urllib.parse import urlparse, urlsplit, urlunsplit @@ -11,7 +13,13 @@ from django.apps import apps from django.conf import settings from django.core.exceptions import ObjectDoesNotExist, ValidationError -from django.core.validators import RegexValidator, URLValidator, EmailValidator, BaseValidator +from django.core.validators import ( + RegexValidator, + URLValidator, + BaseValidator, + validate_email, + ProhibitNullCharactersValidator, +) from django.template.defaultfilters import filesizeformat from django.utils.deconstruct import deconstructible from django.utils.ipv6 import is_valid_ipv6_address @@ -25,8 +33,9 @@ # Note that this is an instantiation of the regex validator, _not_ the # regex-string validator defined right below validate_no_control_chars = RegexValidator( - regex="^[^\x00-\x1f]*$", - message="Please enter a string without control characters." ) + regex="^[^\x01-\x1f]*$", + message="Please enter a string without control characters.", +) @deconstructible @@ -60,6 +69,7 @@ def __ne__(self, other): validate_regular_expression_string = RegexStringValidator() + def validate_file_size(file, missing_ok=False): try: size = file.size @@ -69,8 +79,14 @@ def validate_file_size(file, missing_ok=False): else: raise - if size > settings.SECR_MAX_UPLOAD_SIZE: - raise ValidationError('Please keep filesize under %s. Requested upload size was %s' % (filesizeformat(settings.SECR_MAX_UPLOAD_SIZE), filesizeformat(file.size))) + if size > settings.DATATRACKER_MAX_UPLOAD_SIZE: + raise ValidationError( + "Please keep filesize under {}. Requested upload size was {}".format( + filesizeformat(settings.DATATRACKER_MAX_UPLOAD_SIZE), + filesizeformat(file.size) + ) + ) + def validate_mime_type(file, valid, missing_ok=False): try: @@ -129,8 +145,17 @@ def validate_no_html_frame(file): # instantiations of sub-validiators used by the external_resource validator validate_url = URLValidator() -validate_http_url = URLValidator(schemes=['http','https']) -validate_email = EmailValidator() +validate_http_url = URLValidator(schemes=["http", "https"]) +validate_no_nulls = ProhibitNullCharactersValidator() + + +def validate_mailbox_address(s): + """Validate an RFC 5322 'mailbox' (e.g., "Some Person" )""" + # parseaddr() returns ("", "") on err; validate_email() will reject that for us + name, addr = parseaddr(s) + validate_no_nulls(name) # could be stricter... + validate_email(addr) + def validate_ipv6_address(value): if not is_valid_ipv6_address(value): diff --git a/ietf/utils/xmldraft.py b/ietf/utils/xmldraft.py index 5a0abb6132..325b8499a9 100644 --- a/ietf/utils/xmldraft.py +++ b/ietf/utils/xmldraft.py @@ -1,4 +1,4 @@ -# Copyright The IETF Trust 2022, All Rights Reserved +# Copyright The IETF Trust 2022-2025, All Rights Reserved # -*- coding: utf-8 -*- import datetime import io @@ -7,7 +7,7 @@ import debug # pyflakes: ignore -from contextlib import ExitStack +from contextlib import contextmanager from lxml.etree import XMLSyntaxError from xml2rfc.util.date import augment_date, extract_date from ietf.utils.timezone import date_today @@ -15,6 +15,21 @@ from .draft import Draft +@contextmanager +def capture_xml2rfc_output(): + orig_write_out = xml2rfc.log.write_out + orig_write_err = xml2rfc.log.write_err + parser_out = io.StringIO() + parser_err = io.StringIO() + xml2rfc.log.write_out = parser_out + xml2rfc.log.write_err = parser_err + try: + yield {"stdout": parser_out, "stderr": parser_err} + finally: + xml2rfc.log.write_out = orig_write_out + xml2rfc.log.write_err = orig_write_err + + class XMLDraft(Draft): """Draft from XML source @@ -29,7 +44,7 @@ def __init__(self, xml_file): # cast xml_file to str so, e.g., this will work with a Path self.xmltree, self.xml_version = self.parse_xml(str(xml_file)) self.xmlroot = self.xmltree.getroot() - self.filename, self.revision = self._parse_docname() + self.filename, self.revision = self.parse_docname(self.xmlroot) @staticmethod def parse_xml(filename): @@ -38,27 +53,18 @@ def parse_xml(filename): Converts to xml2rfc v3 schema, then returns the root of the v3 tree and the original xml version. """ - orig_write_out = xml2rfc.log.write_out - orig_write_err = xml2rfc.log.write_err - parser_out = io.StringIO() - parser_err = io.StringIO() - - with ExitStack() as stack: - @stack.callback - def cleanup(): # called when context exited, even if there's an exception - xml2rfc.log.write_out = orig_write_out - xml2rfc.log.write_err = orig_write_err - - xml2rfc.log.write_out = parser_out - xml2rfc.log.write_err = parser_err + with capture_xml2rfc_output() as parser_logs: parser = xml2rfc.XmlRfcParser(filename, quiet=True) try: tree = parser.parse() except XMLSyntaxError: raise InvalidXMLError() except Exception as e: - raise XMLParseError(parser_out.getvalue(), parser_err.getvalue()) from e + raise XMLParseError( + parser_logs["stdout"].getvalue(), + parser_logs["stderr"].getvalue(), + ) from e xml_version = tree.getroot().get('version', '2') if xml_version == '2': @@ -96,6 +102,17 @@ def _document_name(self, ref): number = int(maybe_number) return f"{label}{number}" + target = ref.get("target") + if isinstance(target, str): + target = target.lower() + if target.startswith("https://datatracker.ietf.org/doc/"): + # len("https://datatracker.ietf.org/doc/")==33 + m = re.match(r"^(draft-[a-z0-9-]*[a-z0-9])([/-]\d{2})?/?$",target[33:]) + if m: + name = m.group(1) + return name + + # if we couldn't find a match so far, try the seriesInfo series_query = " or ".join(f"@name='{x.upper()}'" for x in series) for info in ref.xpath( @@ -125,8 +142,11 @@ def _reference_section_name(self, section_elt): section_name = section_elt.get('title') # fall back to title if we have it return section_name - def _parse_docname(self): - docname = self.xmlroot.attrib.get('docName') + @staticmethod + def parse_docname(xmlroot): + docname = xmlroot.attrib.get('docName') + if docname is None: + raise ValueError("Missing docName attribute in the XML root element") revmatch = re.match( r'^(?P.+?)(?:-(?P[0-9][0-9]))?$', docname, @@ -138,16 +158,38 @@ def _parse_docname(self): return revmatch.group('filename'), revmatch.group('rev') def get_title(self): - return self.xmlroot.findtext('front/title').strip() + title_text = self.xmlroot.findtext('front/title') + return "" if title_text is None else title_text.strip() @staticmethod def parse_creation_date(date_elt): if date_elt is None: return None + today = date_today() - # ths mimics handling of date elements in the xml2rfc text/html writers - year, month, day = extract_date(date_elt, today) - year, month, day = augment_date(year, month, day, today) + + # Outright reject non-numeric year / day (xml2rfc's extract_date does not do this) + # (n.b., "year" can be non-numeric in a section per RFC 7991) + year = date_elt.get("year") + day = date_elt.get("day") + non_numeric_year = year and not year.isdigit() + non_numeric_day = day and not day.isdigit() + if non_numeric_day or non_numeric_year: + raise InvalidMetadataError( + "Unable to parse the element in the section: " + "year and day must be numeric values if specified." + ) + + try: + # ths mimics handling of date elements in the xml2rfc text/html writers + year, month, day = extract_date(date_elt, today) + year, month, day = augment_date(year, month, day, today) + except Exception as err: + # Give a generic error if anything goes wrong so far... + raise InvalidMetadataError( + "Unable to parse the element in the section." + ) from err + if not day: # Must choose a day for a datetime.date. Per RFC 7991 sect 2.17, we use # today's date if it is consistent with the rest of the date. Otherwise, @@ -156,7 +198,19 @@ def parse_creation_date(date_elt): day = today.day else: day = 15 - return datetime.date(year, month, day) + + try: + creation_date = datetime.date(year, month, day) + except Exception: + # If everything went well, we should have had a valid datetime, but we didn't. + # The parsing _worked_ but not in a way that we can go forward with. + raise InvalidMetadataError( + "The element in the section specified an incomplete date " + "that was not consistent with today's date. If you specify only a year, " + "it must be the four-digit current year. To use today's date, omit the " + "date tag or use ." + ) + return creation_date def get_creation_date(self): return self.parse_creation_date(self.xmlroot.find("front/date")) @@ -176,6 +230,35 @@ def get_creation_date(self): # abstract = self.xmlroot.findtext('front/abstract') # return abstract.strip() if abstract else '' + @staticmethod + def render_author_name(author_elt): + """Get a displayable name for an author, if possible + + Based on TextWriter.render_author_name() from xml2rfc. If fullname is present, uses that. + If not, uses either initials + surname or just surname. Finally, returns None because this + author is evidently an organization, not a person. + + Does not involve ascii* attributes because rfc7991 requires fullname if any of those are + present. + """ + # Use fullname attribute, if present + fullname = author_elt.attrib.get("fullname", "").strip() + if fullname: + # If any 8bit chars in the fullname, try to append the author's + # name in ascii. + if any([x >= 0x80 for x in fullname.encode('utf8')]): + asciifullname = author_elt.attrib.get("asciiFullname", "").strip() + if asciifullname: + fullname = fullname + ' (' + asciifullname + ')' + return fullname + surname = author_elt.attrib.get("surname", "").strip() + initials = author_elt.attrib.get("initials", "").strip() + if surname or initials: + # This allows the possibility that only initials are used, which is a bit nonsensical + # but seems to be technically allowed by RFC 7991. + return f"{initials} {surname}".strip() + return None + def get_author_list(self): """Get detailed author list @@ -194,7 +277,7 @@ def get_author_list(self): for author in self.xmlroot.findall('front/author'): info = { - 'name': author.attrib.get('fullname'), + 'name': self.render_author_name(author), 'email': author.findtext('address/email'), 'affiliation': author.findtext('organization'), } @@ -243,3 +326,7 @@ def parser_msgs(self): class InvalidXMLError(Exception): """File is not valid XML""" pass + + +class InvalidMetadataError(Exception): + """XML is well-formed but has invalid metadata""" diff --git a/ietf/wsgi.py b/ietf/wsgi.py index c43334874b..bd17da5ba0 100644 --- a/ietf/wsgi.py +++ b/ietf/wsgi.py @@ -1,23 +1,17 @@ -# Copyright The IETF Trust 2013-2021, All Rights Reserved +# Copyright The IETF Trust 2013-2024, All Rights Reserved # -*- coding: utf-8 -*- - import os import sys -import syslog path = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) -syslog.openlog(str("datatracker"), syslog.LOG_PID, syslog.LOG_USER) - if not path in sys.path: sys.path.insert(0, path) os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ietf.settings") -syslog.syslog("Starting datatracker wsgi instance") - from django.core.wsgi import get_wsgi_application -application = get_wsgi_application() +application = get_wsgi_application() diff --git a/jsconfig.json b/jsconfig.json index 0898bb2e2a..da44ff2fb2 100644 --- a/jsconfig.json +++ b/jsconfig.json @@ -15,7 +15,7 @@ "vueCompilerOptions": { "target": 3, "plugins": [ - "@volar/vue-language-plugin-pug" + "@vue/language-plugin-pug" ] } } diff --git a/k8s/README.md b/k8s/README.md new file mode 100644 index 0000000000..3966101ab8 --- /dev/null +++ b/k8s/README.md @@ -0,0 +1,5 @@ +# Kustomize deployment + +## Run locally + +The `secrets.yaml` file is provided as a reference only and must be referenced manually in the `kustomization.yaml` file. \ No newline at end of file diff --git a/k8s/auth.yaml b/k8s/auth.yaml new file mode 100644 index 0000000000..2bdb064447 --- /dev/null +++ b/k8s/auth.yaml @@ -0,0 +1,148 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: auth +spec: + replicas: 1 + revisionHistoryLimit: 2 + selector: + matchLabels: + app: auth + strategy: + type: Recreate + template: + metadata: + labels: + app: auth + spec: + securityContext: + runAsNonRoot: true + containers: + # ----------------------------------------------------- + # Auth Container + # ----------------------------------------------------- + - name: auth + image: "ghcr.io/ietf-tools/datatracker:$APP_IMAGE_TAG" + imagePullPolicy: Always + volumeMounts: + - name: dt-vol + mountPath: /a + - name: dt-tmp + mountPath: /tmp + - name: dt-home + mountPath: /home/datatracker + - name: dt-xml2rfc-cache + mountPath: /var/cache/xml2rfc + - name: dt-cfg + mountPath: /workspace/ietf/settings_local.py + subPath: settings_local.py + env: + - name: "CONTAINER_ROLE" + value: "datatracker" + # ensures the pod gets recreated on every deploy: + - name: "DEPLOY_UID" + value: "$DEPLOY_UID" + envFrom: + - secretRef: + name: dt-secrets-env + startupProbe: + httpGet: + port: 8000 + path: /health/ + initialDelaySeconds: 10 + periodSeconds: 5 + failureThreshold: 30 + timeoutSeconds: 3 + securityContext: + allowPrivilegeEscalation: false + capabilities: + drop: + - ALL + readOnlyRootFilesystem: true + runAsUser: 1000 + runAsGroup: 1000 + # ----------------------------------------------------- + # Nginx Container + # ----------------------------------------------------- + - name: nginx + image: "ghcr.io/nginxinc/nginx-unprivileged:1.27" + imagePullPolicy: IfNotPresent + ports: + - containerPort: 8080 + name: http + protocol: TCP + livenessProbe: + httpGet: + port: 8080 + path: /health/nginx + securityContext: + readOnlyRootFilesystem: true + volumeMounts: + - name: nginx-tmp + mountPath: /tmp + - name: dt-cfg + mountPath: /etc/nginx/conf.d/00logging.conf + subPath: nginx-logging.conf + - name: dt-cfg + mountPath: /etc/nginx/conf.d/default.conf + subPath: nginx-auth.conf + # ----------------------------------------------------- + # ScoutAPM Container + # ----------------------------------------------------- + - name: scoutapm + image: "scoutapp/scoutapm:version-1.4.0" + imagePullPolicy: IfNotPresent + # Replace command with one that will shut down on a TERM signal + # The ./core-agent start command line is from the scoutapm docker image + command: + - "sh" + - "-c" + - >- + trap './core-agent shutdown --tcp 0.0.0.0:6590' TERM; + ./core-agent start --daemonize false --log-level debug --tcp 0.0.0.0:6590 & + wait $! + livenessProbe: + exec: + command: + - "sh" + - "-c" + - "./core-agent probe --tcp 0.0.0.0:6590 | grep -q 'Agent found'" + securityContext: + readOnlyRootFilesystem: true + runAsUser: 65534 # "nobody" user by default + runAsGroup: 65534 # "nogroup" group by default + volumes: + # To be overriden with the actual shared volume + - name: dt-vol + - name: dt-tmp + emptyDir: + sizeLimit: "2Gi" + - name: dt-xml2rfc-cache + emptyDir: + sizeLimit: "2Gi" + - name: dt-home + emptyDir: + sizeLimit: "2Gi" + - name: dt-cfg + configMap: + name: files-cfgmap + - name: nginx-tmp + emptyDir: + sizeLimit: "500Mi" + dnsPolicy: ClusterFirst + restartPolicy: Always + terminationGracePeriodSeconds: 60 +--- +apiVersion: v1 +kind: Service +metadata: + name: auth +spec: + type: ClusterIP + ports: + - port: 80 + targetPort: http + protocol: TCP + name: http + selector: + app: auth diff --git a/k8s/beat.yaml b/k8s/beat.yaml new file mode 100644 index 0000000000..b4291c7e31 --- /dev/null +++ b/k8s/beat.yaml @@ -0,0 +1,62 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: beat + labels: + deleteBeforeUpgrade: yes +spec: + replicas: 1 + revisionHistoryLimit: 2 + selector: + matchLabels: + app: beat + strategy: + type: Recreate + template: + metadata: + labels: + app: beat + spec: + securityContext: + runAsNonRoot: true + containers: + # ----------------------------------------------------- + # Beat Container + # ----------------------------------------------------- + - name: beat + image: "ghcr.io/ietf-tools/datatracker:$APP_IMAGE_TAG" + imagePullPolicy: Always + volumeMounts: + - name: dt-vol + mountPath: /a + - name: dt-tmp + mountPath: /tmp + - name: dt-cfg + mountPath: /workspace/ietf/settings_local.py + subPath: settings_local.py + env: + - name: "CONTAINER_ROLE" + value: "beat" + envFrom: + - secretRef: + name: dt-secrets-env + securityContext: + allowPrivilegeEscalation: false + capabilities: + drop: + - ALL + readOnlyRootFilesystem: true + runAsUser: 1000 + runAsGroup: 1000 + volumes: + # To be overridden with the actual shared volume + - name: dt-vol + - name: dt-tmp + emptyDir: + sizeLimit: "2Gi" + - name: dt-cfg + configMap: + name: files-cfgmap + dnsPolicy: ClusterFirst + restartPolicy: Always + terminationGracePeriodSeconds: 10 diff --git a/k8s/celery.yaml b/k8s/celery.yaml new file mode 100644 index 0000000000..2f4c0fd439 --- /dev/null +++ b/k8s/celery.yaml @@ -0,0 +1,97 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: celery + labels: + deleteBeforeUpgrade: yes +spec: + replicas: 1 + revisionHistoryLimit: 2 + selector: + matchLabels: + app: celery + strategy: + type: Recreate + template: + metadata: + labels: + app: celery + spec: + securityContext: + runAsNonRoot: true + containers: + # ----------------------------------------------------- + # Celery Container + # ----------------------------------------------------- + - name: celery + image: "ghcr.io/ietf-tools/datatracker:$APP_IMAGE_TAG" + imagePullPolicy: Always + volumeMounts: + - name: dt-vol + mountPath: /a + - name: dt-tmp + mountPath: /tmp + - name: dt-home + mountPath: /home/datatracker + - name: dt-xml2rfc-cache + mountPath: /var/cache/xml2rfc + - name: dt-cfg + mountPath: /workspace/ietf/settings_local.py + subPath: settings_local.py + env: + - name: "CONTAINER_ROLE" + value: "celery" + envFrom: + - secretRef: + name: dt-secrets-env + securityContext: + allowPrivilegeEscalation: false + capabilities: + drop: + - ALL + readOnlyRootFilesystem: true + runAsUser: 1000 + runAsGroup: 1000 + # ----------------------------------------------------- + # ScoutAPM Container + # ----------------------------------------------------- + - name: scoutapm + image: "scoutapp/scoutapm:version-1.4.0" + imagePullPolicy: IfNotPresent + # Replace command with one that will shut down on a TERM signal + # The ./core-agent start command line is from the scoutapm docker image + command: + - "sh" + - "-c" + - >- + trap './core-agent shutdown --tcp 0.0.0.0:6590' TERM; + ./core-agent start --daemonize false --log-level debug --tcp 0.0.0.0:6590 & + wait $! + livenessProbe: + exec: + command: + - "sh" + - "-c" + - "./core-agent probe --tcp 0.0.0.0:6590 | grep -q 'Agent found'" + securityContext: + readOnlyRootFilesystem: true + runAsUser: 65534 # "nobody" user by default + runAsGroup: 65534 # "nogroup" group by default + volumes: + # To be overridden with the actual shared volume + - name: dt-vol + - name: dt-tmp + emptyDir: + sizeLimit: "2Gi" + - name: dt-xml2rfc-cache + emptyDir: + sizeLimit: "2Gi" + - name: dt-home + emptyDir: + sizeLimit: "2Gi" + - name: dt-cfg + configMap: + name: files-cfgmap + dnsPolicy: ClusterFirst + restartPolicy: Always + terminationGracePeriodSeconds: 600 diff --git a/k8s/datatracker.yaml b/k8s/datatracker.yaml new file mode 100644 index 0000000000..50a2c69687 --- /dev/null +++ b/k8s/datatracker.yaml @@ -0,0 +1,179 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: datatracker +spec: + replicas: 1 + revisionHistoryLimit: 2 + selector: + matchLabels: + app: datatracker + strategy: + type: Recreate + template: + metadata: + labels: + app: datatracker + spec: + securityContext: + runAsNonRoot: true + containers: + # ----------------------------------------------------- + # Datatracker Container + # ----------------------------------------------------- + - name: datatracker + image: "ghcr.io/ietf-tools/datatracker:$APP_IMAGE_TAG" + imagePullPolicy: Always + volumeMounts: + - name: dt-vol + mountPath: /a + - name: dt-tmp + mountPath: /tmp + - name: dt-home + mountPath: /home/datatracker + - name: dt-xml2rfc-cache + mountPath: /var/cache/xml2rfc + - name: dt-cfg + mountPath: /workspace/ietf/settings_local.py + subPath: settings_local.py + env: + - name: "CONTAINER_ROLE" + value: "datatracker" + # ensures the pod gets recreated on every deploy: + - name: "DEPLOY_UID" + value: "$DEPLOY_UID" + envFrom: + - secretRef: + name: dt-secrets-env + startupProbe: + httpGet: + port: 8000 + path: /health/ + initialDelaySeconds: 10 + periodSeconds: 5 + failureThreshold: 30 + timeoutSeconds: 3 + securityContext: + allowPrivilegeEscalation: false + capabilities: + drop: + - ALL + readOnlyRootFilesystem: true + runAsUser: 1000 + runAsGroup: 1000 + # ----------------------------------------------------- + # Nginx Container + # ----------------------------------------------------- + - name: nginx + image: "ghcr.io/nginxinc/nginx-unprivileged:1.27" + imagePullPolicy: IfNotPresent + ports: + - containerPort: 8080 + name: http + protocol: TCP + livenessProbe: + httpGet: + port: 8080 + path: /health/nginx + securityContext: + readOnlyRootFilesystem: true + volumeMounts: + - name: nginx-tmp + mountPath: /tmp + - name: dt-cfg + mountPath: /etc/nginx/conf.d/00logging.conf + subPath: nginx-logging.conf + - name: dt-cfg + # Replaces the original default.conf + mountPath: /etc/nginx/conf.d/default.conf + subPath: nginx-datatracker.conf + # ----------------------------------------------------- + # ScoutAPM Container + # ----------------------------------------------------- + - name: scoutapm + image: "scoutapp/scoutapm:version-1.4.0" + imagePullPolicy: IfNotPresent + # Replace command with one that will shut down on a TERM signal + # The ./core-agent start command line is from the scoutapm docker image + command: + - "sh" + - "-c" + - >- + trap './core-agent shutdown --tcp 0.0.0.0:6590' TERM; + ./core-agent start --daemonize false --log-level debug --tcp 0.0.0.0:6590 & + wait $! + livenessProbe: + exec: + command: + - "sh" + - "-c" + - "./core-agent probe --tcp 0.0.0.0:6590 | grep -q 'Agent found'" + securityContext: + readOnlyRootFilesystem: true + runAsUser: 65534 # "nobody" user by default + runAsGroup: 65534 # "nogroup" group by default + initContainers: + - name: migration + image: "ghcr.io/ietf-tools/datatracker:$APP_IMAGE_TAG" + imagePullPolicy: Always + env: + - name: "CONTAINER_ROLE" + value: "migrations" + envFrom: + - secretRef: + name: dt-secrets-env + securityContext: + allowPrivilegeEscalation: false + capabilities: + drop: + - ALL + readOnlyRootFilesystem: true + runAsUser: 1000 + runAsGroup: 1000 + volumeMounts: + - name: dt-vol + mountPath: /a + - name: dt-tmp + mountPath: /tmp + - name: dt-home + mountPath: /home/datatracker + - name: dt-xml2rfc-cache + mountPath: /var/cache/xml2rfc + - name: dt-cfg + mountPath: /workspace/ietf/settings_local.py + subPath: settings_local.py + volumes: + # To be overriden with the actual shared volume + - name: dt-vol + - name: dt-tmp + emptyDir: + sizeLimit: "2Gi" + - name: dt-xml2rfc-cache + emptyDir: + sizeLimit: "2Gi" + - name: dt-home + emptyDir: + sizeLimit: "2Gi" + - name: dt-cfg + configMap: + name: files-cfgmap + - name: nginx-tmp + emptyDir: + sizeLimit: "500Mi" + dnsPolicy: ClusterFirst + restartPolicy: Always + terminationGracePeriodSeconds: 60 +--- +apiVersion: v1 +kind: Service +metadata: + name: datatracker +spec: + type: ClusterIP + ports: + - port: 80 + targetPort: http + protocol: TCP + name: http + selector: + app: datatracker diff --git a/k8s/kustomization.yaml b/k8s/kustomization.yaml new file mode 100644 index 0000000000..769cb03517 --- /dev/null +++ b/k8s/kustomization.yaml @@ -0,0 +1,17 @@ +namespace: datatracker +namePrefix: dt- +configMapGenerator: + - name: files-cfgmap + files: + - nginx-logging.conf + - nginx-auth.conf + - nginx-datatracker.conf + - settings_local.py +resources: + - auth.yaml + - beat.yaml + - celery.yaml + - datatracker.yaml + - memcached.yaml + - rabbitmq.yaml + - replicator.yaml diff --git a/k8s/memcached.yaml b/k8s/memcached.yaml new file mode 100644 index 0000000000..5a4c9f0aed --- /dev/null +++ b/k8s/memcached.yaml @@ -0,0 +1,80 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: memcached +spec: + replicas: 1 + revisionHistoryLimit: 2 + selector: + matchLabels: + app: memcached + template: + metadata: + labels: + app: memcached + spec: + securityContext: + runAsNonRoot: true + containers: + # ----------------------------------------------------- + # Memcached + # ----------------------------------------------------- + - image: "memcached:1.6-alpine" + imagePullPolicy: IfNotPresent + args: ["-m", "1024"] + name: memcached + ports: + - name: memcached + containerPort: 11211 + protocol: TCP + securityContext: + allowPrivilegeEscalation: false + capabilities: + drop: + - ALL + readOnlyRootFilesystem: true + # memcached image sets up uid/gid 11211 + runAsUser: 11211 + runAsGroup: 11211 + # ----------------------------------------------------- + # Memcached Exporter for Prometheus + # ----------------------------------------------------- + - image: "quay.io/prometheus/memcached-exporter:v0.14.3" + imagePullPolicy: IfNotPresent + name: memcached-exporter + ports: + - name: metrics + containerPort: 9150 + protocol: TCP + securityContext: + allowPrivilegeEscalation: false + capabilities: + drop: + - ALL + readOnlyRootFilesystem: true + runAsUser: 65534 # nobody + runAsGroup: 65534 # nobody + dnsPolicy: ClusterFirst + restartPolicy: Always + terminationGracePeriodSeconds: 30 +--- +apiVersion: v1 +kind: Service +metadata: + name: memcached + annotations: + k8s.grafana.com/scrape: "true" # this is not a bool + k8s.grafana.com/metrics.portName: "metrics" +spec: + type: ClusterIP + ports: + - port: 11211 + targetPort: memcached + protocol: TCP + name: memcached + - port: 9150 + targetPort: metrics + protocol: TCP + name: metrics + selector: + app: memcached diff --git a/k8s/nginx-auth.conf b/k8s/nginx-auth.conf new file mode 100644 index 0000000000..95aa838064 --- /dev/null +++ b/k8s/nginx-auth.conf @@ -0,0 +1,42 @@ +server { + listen 8080 default_server; + server_name _; + + # Replace default "main" formatter with the ietfjson formatter from nginx-logging.conf + access_log /var/log/nginx/access.log ietfjson; + + # Note that regex location matches take priority over non-regex "prefix" matches. Use regexes so that + # our deny all rule does not squelch the other locations. + location ~ ^/health/nginx$ { + access_log off; + return 200; + } + + location ~ ^/robots.txt$ { + add_header Content-Type text/plain; + return 200 "User-agent: *\nDisallow: /\n"; + } + + location ~ ^/accounts/create.* { + return 302 https://datatracker.ietf.org/accounts/create; + } + + # n.b. (?!...) is a negative lookahead group + location ~ ^(/(?!(api/openid/|accounts/login/|accounts/logout/|accounts/reset/|person/.*/photo|group/groupmenu.json)).*) { + return 302 https://datatracker.ietf.org$${keepempty}request_uri; + } + + location / { + add_header Content-Security-Policy "default-src 'self' 'unsafe-inline' data: https://datatracker.ietf.org/ https://www.ietf.org/ http://ietf.org/ https://analytics.ietf.org https://static.ietf.org; frame-ancestors 'self' ietf.org *.ietf.org meetecho.com *.meetecho.com gather.town *.gather.town"; + proxy_set_header Host $${keepempty}host; + proxy_set_header Connection close; + proxy_set_header X-Request-Start "t=$${keepempty}msec"; + proxy_set_header X-Forwarded-For $${keepempty}proxy_add_x_forwarded_for; + proxy_hide_header X-Datatracker-Is-Authenticated; # hide this from the outside world + proxy_pass http://localhost:8000; + # Set timeouts longer than Cloudflare proxy limits + proxy_connect_timeout 60; # nginx default (Cf = 15) + proxy_read_timeout 120; # nginx default = 60 (Cf = 100) + proxy_send_timeout 60; # nginx default = 60 (Cf = 30) + } +} diff --git a/k8s/nginx-datatracker.conf b/k8s/nginx-datatracker.conf new file mode 100644 index 0000000000..882d7563c2 --- /dev/null +++ b/k8s/nginx-datatracker.conf @@ -0,0 +1,32 @@ +server { + listen 8080 default_server; + server_name _; + + # Replace default "main" formatter with the ietfjson formatter from nginx-logging.conf + access_log /var/log/nginx/access.log ietfjson; + + location /health/nginx { + access_log off; + return 200; + } + + location /robots.txt { + add_header Content-Type text/plain; + return 200 "User-agent: *\nDisallow: /doc/pdf/\n"; + } + + location / { + add_header Content-Security-Policy "default-src 'self' 'unsafe-inline' data: https://datatracker.ietf.org/ https://www.ietf.org/ http://ietf.org/ https://analytics.ietf.org https://static.ietf.org; frame-ancestors 'self' ietf.org *.ietf.org meetecho.com *.meetecho.com"; + proxy_set_header Host $${keepempty}host; + proxy_set_header Connection close; + proxy_set_header X-Request-Start "t=$${keepempty}msec"; + proxy_set_header X-Forwarded-For $${keepempty}proxy_add_x_forwarded_for; + proxy_hide_header X-Datatracker-Is-Authenticated; # hide this from the outside world + proxy_pass http://localhost:8000; + # Set timeouts longer than Cloudflare proxy limits + proxy_connect_timeout 60; # nginx default (Cf = 15) + proxy_read_timeout 120; # nginx default = 60 (Cf = 100) + proxy_send_timeout 60; # nginx default = 60 (Cf = 30) + client_max_body_size 0; # disable size check + } +} diff --git a/k8s/nginx-logging.conf b/k8s/nginx-logging.conf new file mode 100644 index 0000000000..673d7a29ab --- /dev/null +++ b/k8s/nginx-logging.conf @@ -0,0 +1,22 @@ +# Define JSON log format - must be loaded before config that references it. +# Note that each line is fully enclosed in single quotes. Commas in arrays are +# intentionally inside the single quotes. +log_format ietfjson escape=json + '{' + '"time":"$${keepempty}time_iso8601",' + '"remote_ip":"$${keepempty}remote_addr",' + '"request":"$${keepempty}request",' + '"host":"$${keepempty}host",' + '"path":"$${keepempty}request_uri",' + '"method":"$${keepempty}request_method",' + '"status":"$${keepempty}status",' + '"len_bytes":"$${keepempty}body_bytes_sent",' + '"duration_s":"$${keepempty}request_time",' + '"referer":"$${keepempty}http_referer",' + '"user_agent":"$${keepempty}http_user_agent",' + '"x_forwarded_for":"$${keepempty}http_x_forwarded_for",' + '"x_forwarded_proto":"$${keepempty}http_x_forwarded_proto",' + '"cf_connecting_ip":"$${keepempty}http_cf_connecting_ip",' + '"cf_ray":"$${keepempty}http_cf_ray",' + '"asn":"$${keepempty}http_x_ip_src_asnum"' + '}'; diff --git a/k8s/rabbitmq.yaml b/k8s/rabbitmq.yaml new file mode 100644 index 0000000000..346b54c93e --- /dev/null +++ b/k8s/rabbitmq.yaml @@ -0,0 +1,178 @@ +apiVersion: apps/v1 +kind: StatefulSet +metadata: + name: rabbitmq +spec: + replicas: 1 + revisionHistoryLimit: 2 + selector: + matchLabels: + app: rabbitmq + template: + metadata: + labels: + app: rabbitmq + spec: + securityContext: + runAsNonRoot: true + containers: + # ----------------------------------------------------- + # RabbitMQ Container + # ----------------------------------------------------- + - image: "ghcr.io/ietf-tools/datatracker-mq:3.13-alpine" + imagePullPolicy: Always + name: rabbitmq + ports: + - name: amqp + containerPort: 5672 + protocol: TCP + volumeMounts: + - name: rabbitmq-data + mountPath: /var/lib/rabbitmq + subPath: "rabbitmq" + - name: rabbitmq-tmp + mountPath: /tmp + - name: rabbitmq-config + mountPath: "/etc/rabbitmq" + env: + - name: CELERY_PASSWORD + valueFrom: + secretKeyRef: + name: dt-secrets-env + key: CELERY_PASSWORD + livenessProbe: + exec: + command: ["rabbitmq-diagnostics", "-q", "ping", "-t", "30"] + periodSeconds: 30 + timeoutSeconds: 35 # slightly longer than ping "-t" option + startupProbe: + initialDelaySeconds: 15 + periodSeconds: 5 + timeoutSeconds: 35 # slightly longer than ping "-t" option + successThreshold: 1 + failureThreshold: 60 + exec: + command: ["rabbitmq-diagnostics", "-q", "ping", "-t", "30"] + securityContext: + allowPrivilegeEscalation: false + capabilities: + drop: + - ALL + readOnlyRootFilesystem: true + # rabbitmq image sets up uid/gid 100/101 + runAsUser: 100 + runAsGroup: 101 + initContainers: + # ----------------------------------------------------- + # Init RabbitMQ data + # ----------------------------------------------------- + - name: init-rabbitmq + image: busybox:stable + command: + - "sh" + - "-c" + - "mkdir -p -m700 /mnt/rabbitmq && chown 100:101 /mnt/rabbitmq" + securityContext: + runAsNonRoot: false + runAsUser: 0 + readOnlyRootFilesystem: true + volumeMounts: + - name: "rabbitmq-data" + mountPath: "/mnt" + volumes: + - name: rabbitmq-tmp + emptyDir: + sizeLimit: "50Mi" + - name: rabbitmq-config + configMap: + name: "rabbitmq-configmap" + dnsPolicy: ClusterFirst + restartPolicy: Always + terminationGracePeriodSeconds: 30 + volumeClaimTemplates: + - metadata: + name: rabbitmq-data + spec: + accessModes: + - ReadWriteOnce + resources: + requests: + storage: 8Gi + # storageClassName: "" +--- +apiVersion: v1 +kind: ConfigMap +metadata: + name: rabbitmq-configmap +data: + definitions.json: |- + { + "permissions": [ + { + "configure": ".*", + "read": ".*", + "user": "datatracker", + "vhost": "dt", + "write": ".*" + } + ], + "users": [ + { + "hashing_algorithm": "rabbit_password_hashing_sha256", + "limits": {}, + "name": "datatracker", + "password_hash": "HJxcItcpXtBN+R/CH7dUelfKBOvdUs3AWo82SBw2yLMSguzb", + "tags": [] + } + ], + "vhosts": [ + { + "limits": [], + "metadata": { + "description": "", + "tags": [] + }, + "name": "dt" + } + ] + } + rabbitmq.conf: |- + # prevent guest from logging in over tcp + loopback_users.guest = true + + # load saved definitions + load_definitions = /etc/rabbitmq/definitions.json + + # Ensure that enough disk is available to flush to disk. To do this, need to limit the + # memory available to the container to something reasonable. See + # https://www.rabbitmq.com/production-checklist.html#monitoring-and-resource-usage + # for recommendations. + + # 1-1.5 times the memory available to the container is adequate for disk limit + disk_free_limit.absolute = 6000MB + + # This should be ~40% of the memory available to the container. Use an + # absolute number because relative will be proprtional to the full machine + # memory. + vm_memory_high_watermark.absolute = 1600MB + + # Logging + log.file = false + log.console = true + log.console.level = info + log.console.formatter = json +--- +apiVersion: v1 +kind: Service +metadata: + name: rabbitmq +spec: + type: ClusterIP + clusterIP: None # headless service + ports: + - port: 5672 + targetPort: amqp + protocol: TCP + name: amqp + selector: + app: rabbitmq diff --git a/k8s/replicator.yaml b/k8s/replicator.yaml new file mode 100644 index 0000000000..a28d9e8a16 --- /dev/null +++ b/k8s/replicator.yaml @@ -0,0 +1,72 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: replicator + labels: + deleteBeforeUpgrade: yes +spec: + replicas: 1 + revisionHistoryLimit: 2 + selector: + matchLabels: + app: replicator + strategy: + type: Recreate + template: + metadata: + labels: + app: replicator + spec: + securityContext: + runAsNonRoot: true + containers: + # ----------------------------------------------------- + # Celery Container + # ----------------------------------------------------- + - name: celery + image: "ghcr.io/ietf-tools/datatracker:$APP_IMAGE_TAG" + imagePullPolicy: Always + volumeMounts: + - name: dt-vol + mountPath: /a + - name: dt-tmp + mountPath: /tmp + - name: dt-home + mountPath: /home/datatracker + - name: dt-xml2rfc-cache + mountPath: /var/cache/xml2rfc + - name: dt-cfg + mountPath: /workspace/ietf/settings_local.py + subPath: settings_local.py + env: + - name: "CONTAINER_ROLE" + value: "replicator" + envFrom: + - secretRef: + name: dt-secrets-env + securityContext: + allowPrivilegeEscalation: false + capabilities: + drop: + - ALL + readOnlyRootFilesystem: true + runAsUser: 1000 + runAsGroup: 1000 + volumes: + # To be overridden with the actual shared volume + - name: dt-vol + - name: dt-tmp + emptyDir: + sizeLimit: "2Gi" + - name: dt-xml2rfc-cache + emptyDir: + sizeLimit: "2Gi" + - name: dt-home + emptyDir: + sizeLimit: "2Gi" + - name: dt-cfg + configMap: + name: files-cfgmap + dnsPolicy: ClusterFirst + restartPolicy: Always + terminationGracePeriodSeconds: 600 diff --git a/k8s/secrets.yaml b/k8s/secrets.yaml new file mode 100644 index 0000000000..ba90af9c2a --- /dev/null +++ b/k8s/secrets.yaml @@ -0,0 +1,83 @@ +apiVersion: v1 +kind: Secret +metadata: + name: secrets-env +type: Opaque +stringData: + DATATRACKER_SERVER_MODE: "development" # development for staging, production for production + DATATRACKER_ADMINS: |- + Robert Sparks + Ryan Cross + Kesara Rathnayake + Jennifer Richards + Nicolas Giard + DATATRACKER_ALLOWED_HOSTS: ".ietf.org" # newline-separated list also allowed + # DATATRACKER_DATATRACKER_DEBUG: "false" + + # DB access details - needs to be filled in + # DATATRACKER_DB_HOST: "db" + # DATATRACKER_DB_PORT: "5432" + # DATATRACKER_DB_NAME: "datatracker" + # DATATRACKER_DB_USER: "django" # secret + # DATATRACKER_DB_PASS: "RkTkDPFnKpko" # secret + # DATATRACKER_DB_CONN_MAX_AGE: "0" # connection per request if not set, no limit if set to "None" + # DATATRACKER_DB_CONN_HEALTH_CHECKS: "false" + + DATATRACKER_DJANGO_SECRET_KEY: "PDwXboUq!=hPjnrtG2=ge#N$Dwy+wn@uivrugwpic8mxyPfHk" # secret + + # Set this to point testing / staging at the production statics server until we + # sort that out + # DATATRACKER_STATIC_URL: "https://static.ietf.org/dt/12.10.0/" + + # DATATRACKER_EMAIL_DEBUG: "true" + + # Outgoing email details + # DATATRACKER_EMAIL_HOST: "localhost" # defaults to localhost + # DATATRACKER_EMAIL_PORT: "2025" # defaults to 2025 + + # The value here is the default from settings.py (i.e., not actually secret) + DATATRACKER_NOMCOM_APP_SECRET_B64: "m9pzMezVoFNJfsvU9XSZxGnXnwup6P5ZgCQeEnROOoQ=" # secret + + DATATRACKER_IANA_SYNC_PASSWORD: "this-is-the-iana-sync-password" # secret + DATATRACKER_RFC_EDITOR_SYNC_PASSWORD: "this-is-the-rfc-editor-sync-password" # secret + DATATRACKER_YOUTUBE_API_KEY: "this-is-the-youtube-api-key" # secret + DATATRACKER_GITHUB_BACKUP_API_KEY: "this-is-the-github-backup-api-key" # secret + + # API key configuration + DATATRACKER_API_KEY_TYPE: "ES265" + # secret - value here is the default from settings.py (i.e., not actually secret) + DATATRACKER_API_PUBLIC_KEY_PEM_B64: |- + Ci0tLS0tQkVHSU4gUFVCTElDIEtFWS0tLS0tCk1Ga3dFd1lIS29aSXpqMENBUVlJS + 29aSXpqMERBUWNEUWdBRXFWb2pzYW9mREpTY3VNSk4rdHNodW15Tk01TUUKZ2Fyel + ZQcWtWb3ZtRjZ5RTdJSi9kdjRGY1YrUUtDdEovck9TOGUzNlk4WkFFVll1dWtoZXM + weVoxdz09Ci0tLS0tRU5EIFBVQkxJQyBLRVktLS0tLQo= + # secret - value here is the default from settings.py (i.e., not actually secret) + DATATRACKER_API_PRIVATE_KEY_PEM_B64: |- + Ci0tLS0tQkVHSU4gUFJJVkFURSBLRVktLS0tLQpNSUdIQWdFQU1CTUdCeXFHU000O + UFnRUdDQ3FHU000OUF3RUhCRzB3YXdJQkFRUWdvSTZMSmtvcEtxOFhySGk5ClFxR1 + F2RTRBODNURllqcUx6KzhnVUxZZWNzcWhSQU5DQUFTcFdpT3hxaDhNbEp5NHdrMzY + yeUc2Ykkwemt3U0IKcXZOVStxUldpK1lYcklUc2duOTIvZ1Z4WDVBb0swbitzNUx4 + N2ZwanhrQVJWaTY2U0Y2elRKblgKLS0tLS1FTkQgUFJJVkFURSBLRVktLS0tLQo= + + #DATATRACKER_REGISTRATION_API_KEY: "some-key" # secret" + + # DATATRACKER_MEETECHO_API_BASE: "https://meetings.conf.meetecho.com/api/v1/" + DATATRACKER_MEETECHO_CLIENT_ID: "this-is-the-meetecho-client-id" # secret + DATATRACKER_MEETECHO_CLIENT_SECRET: "this-is-the-meetecho-client-secret" # secret + + # DATATRACKER_MATOMO_SITE_ID: "7" # must be present to enable Matomo + # DATATRACKER_MATOMO_DOMAIN_PATH: "analytics.ietf.org" + + CELERY_PASSWORD: "this-is-a-secret" # secret + + # Only one of these may be set + # DATATRACKER_APP_API_TOKENS_JSON_B64: "e30K" # secret + # DATATRACKER_APP_API_TOKENS_JSON: "{}" # secret + + # use this to override default - one entry per line + # DATATRACKER_CSRF_TRUSTED_ORIGINS: |- + # https://datatracker.staging.ietf.org + + # Scout configuration + DATATRACKER_SCOUT_KEY: "this-is-the-scout-key" + DATATRACKER_SCOUT_NAME: "StagingDatatracker" \ No newline at end of file diff --git a/k8s/settings_local.py b/k8s/settings_local.py new file mode 100644 index 0000000000..19d0a1c2f5 --- /dev/null +++ b/k8s/settings_local.py @@ -0,0 +1,522 @@ +# Copyright The IETF Trust 2007-2026, All Rights Reserved +# -*- coding: utf-8 -*- + +from base64 import b64decode +from email.utils import parseaddr +import json + +from ietf import __release_hash__ +from ietf.settings import * # pyflakes:ignore +from ietf.settings import ( + STORAGES, + ARTIFACT_STORAGE_NAMES, + BLOBSTORAGE_CONNECT_TIMEOUT, + BLOBSTORAGE_READ_TIMEOUT, + BLOBSTORAGE_MAX_ATTEMPTS, +) +import botocore.config + + +def _multiline_to_list(s): + """Helper to split at newlines and convert to list""" + return [item.strip() for item in s.split("\n")] + + +# Default to "development". Production _must_ set DATATRACKER_SERVER_MODE="production" in the env! +SERVER_MODE = os.environ.get("DATATRACKER_SERVER_MODE", "development") + +# Secrets +_SECRET_KEY = os.environ.get("DATATRACKER_DJANGO_SECRET_KEY", None) +if _SECRET_KEY is not None: + SECRET_KEY = _SECRET_KEY +else: + raise RuntimeError("DATATRACKER_DJANGO_SECRET_KEY must be set") + +_NOMCOM_APP_SECRET_B64 = os.environ.get("DATATRACKER_NOMCOM_APP_SECRET_B64", None) +if _NOMCOM_APP_SECRET_B64 is not None: + NOMCOM_APP_SECRET = b64decode(_NOMCOM_APP_SECRET_B64) +else: + raise RuntimeError("DATATRACKER_NOMCOM_APP_SECRET_B64 must be set") + +_IANA_SYNC_PASSWORD = os.environ.get("DATATRACKER_IANA_SYNC_PASSWORD", None) +if _IANA_SYNC_PASSWORD is not None: + IANA_SYNC_PASSWORD = _IANA_SYNC_PASSWORD +else: + raise RuntimeError("DATATRACKER_IANA_SYNC_PASSWORD must be set") + +_RFC_EDITOR_SYNC_PASSWORD = os.environ.get("DATATRACKER_RFC_EDITOR_SYNC_PASSWORD", None) +if _RFC_EDITOR_SYNC_PASSWORD is not None: + RFC_EDITOR_SYNC_PASSWORD = os.environ.get("DATATRACKER_RFC_EDITOR_SYNC_PASSWORD") +else: + raise RuntimeError("DATATRACKER_RFC_EDITOR_SYNC_PASSWORD must be set") + +_YOUTUBE_API_KEY = os.environ.get("DATATRACKER_YOUTUBE_API_KEY", None) +if _YOUTUBE_API_KEY is not None: + YOUTUBE_API_KEY = _YOUTUBE_API_KEY +else: + raise RuntimeError("DATATRACKER_YOUTUBE_API_KEY must be set") + +_GITHUB_BACKUP_API_KEY = os.environ.get("DATATRACKER_GITHUB_BACKUP_API_KEY", None) +if _GITHUB_BACKUP_API_KEY is not None: + GITHUB_BACKUP_API_KEY = _GITHUB_BACKUP_API_KEY +else: + raise RuntimeError("DATATRACKER_GITHUB_BACKUP_API_KEY must be set") + +_API_KEY_TYPE = os.environ.get("DATATRACKER_API_KEY_TYPE", None) +if _API_KEY_TYPE is not None: + API_KEY_TYPE = _API_KEY_TYPE +else: + raise RuntimeError("DATATRACKER_API_KEY_TYPE must be set") + +_API_PUBLIC_KEY_PEM_B64 = os.environ.get("DATATRACKER_API_PUBLIC_KEY_PEM_B64", None) +if _API_PUBLIC_KEY_PEM_B64 is not None: + API_PUBLIC_KEY_PEM = b64decode(_API_PUBLIC_KEY_PEM_B64) +else: + raise RuntimeError("DATATRACKER_API_PUBLIC_KEY_PEM_B64 must be set") + +_API_PRIVATE_KEY_PEM_B64 = os.environ.get("DATATRACKER_API_PRIVATE_KEY_PEM_B64", None) +if _API_PRIVATE_KEY_PEM_B64 is not None: + API_PRIVATE_KEY_PEM = b64decode(_API_PRIVATE_KEY_PEM_B64) +else: + raise RuntimeError("DATATRACKER_API_PRIVATE_KEY_PEM_B64 must be set") + +_RED_PRECOMPUTER_TRIGGER_RETRY_DELAY = os.environ.get( + "DATATRACKER_RED_PRECOMPUTER_TRIGGER_RETRY_DELAY", None +) +if _RED_PRECOMPUTER_TRIGGER_RETRY_DELAY is not None: + RED_PRECOMPUTER_TRIGGER_RETRY_DELAY = _RED_PRECOMPUTER_TRIGGER_RETRY_DELAY +_RED_PRECOMPUTER_TRIGGER_MAX_RETRIES = os.environ.get( + "DATATRACKER_RED_PRECOMPUTER_TRIGGER_MAX_RETRIES", None +) +if _RED_PRECOMPUTER_TRIGGER_MAX_RETRIES is not None: + RED_PRECOMPUTER_TRIGGER_MAX_RETRIES = _RED_PRECOMPUTER_TRIGGER_MAX_RETRIES +_TRIGGER_RED_PRECOMPUTE_MULTIPLE_URL = os.environ.get( + "DATATRACKER_TRIGGER_RED_PRECOMPUTE_MULTIPLE_URL", None +) +if _TRIGGER_RED_PRECOMPUTE_MULTIPLE_URL is not None: + TRIGGER_RED_PRECOMPUTE_MULTIPLE_URL = _TRIGGER_RED_PRECOMPUTE_MULTIPLE_URL + +# Set DEBUG if DATATRACKER_DEBUG env var is the word "true" +DEBUG = os.environ.get("DATATRACKER_DEBUG", "false").lower() == "true" + +# DATATRACKER_ALLOWED_HOSTS env var is a newline-separated list of allowed hosts +_allowed_hosts_str = os.environ.get("DATATRACKER_ALLOWED_HOSTS", None) +if _allowed_hosts_str is not None: + ALLOWED_HOSTS = _multiline_to_list(_allowed_hosts_str) + +DATABASES = { + "default": { + "HOST": os.environ.get("DATATRACKER_DB_HOST", "db"), + "PORT": os.environ.get("DATATRACKER_DB_PORT", "5432"), + "NAME": os.environ.get("DATATRACKER_DB_NAME", "datatracker"), + "ENGINE": "django.db.backends.postgresql", + "USER": os.environ.get("DATATRACKER_DB_USER", "django"), + "PASSWORD": os.environ.get("DATATRACKER_DB_PASS", ""), + "OPTIONS": json.loads(os.environ.get("DATATRACKER_DB_OPTS_JSON", "{}")), + }, + "blobdb": { + "HOST": os.environ.get("BLOBDB_DB_HOST", "blobdb"), + "PORT": os.environ.get("BLOBDB_DB_PORT", "5432"), + "NAME": os.environ.get("BLOBDB_DB_NAME", "blob"), + "ENGINE": "django.db.backends.postgresql", + "USER": os.environ.get("BLOBDB_DB_USER", "django"), + "PASSWORD": os.environ.get("BLOBDB_DB_PASS", ""), + "OPTIONS": json.loads(os.environ.get("BLOBDB_DB_OPTS_JSON", "{}")), + }, +} + +DATABASE_ROUTERS = ["ietf.blobdb.routers.BlobdbStorageRouter"] +BLOBDB_DATABASE = "blobdb" + +# Configure persistent connections. A setting of 0 is Django's default. +_conn_max_age = os.environ.get("DATATRACKER_DB_CONN_MAX_AGE", "0") +for dbname in ["default", "blobdb"]: + # A string "none" means unlimited age. + DATABASES[dbname]["CONN_MAX_AGE"] = ( + None if _conn_max_age.lower() == "none" else int(_conn_max_age) + ) +# Enable connection health checks if DATATRACKER_DB_CONN_HEALTH_CHECK is the string "true" +_conn_health_checks = bool( + os.environ.get("DATATRACKER_DB_CONN_HEALTH_CHECKS", "false").lower() == "true" +) +for dbname in ["default", "blobdb"]: + DATABASES[dbname]["CONN_HEALTH_CHECKS"] = _conn_health_checks + +# DATATRACKER_ADMINS is a newline-delimited list of addresses parseable by email.utils.parseaddr +_admins_str = os.environ.get("DATATRACKER_ADMINS", None) +if _admins_str is not None: + ADMINS = [parseaddr(admin) for admin in _multiline_to_list(_admins_str)] +else: + raise RuntimeError("DATATRACKER_ADMINS must be set") + +USING_DEBUG_EMAIL_SERVER = ( + os.environ.get("DATATRACKER_EMAIL_DEBUG", "false").lower() == "true" +) +EMAIL_HOST = os.environ.get("DATATRACKER_EMAIL_HOST", "localhost") +EMAIL_PORT = int(os.environ.get("DATATRACKER_EMAIL_PORT", "2025")) + +_broker_url = os.environ.get("DATATRACKER_BROKER_URL", None) +_celery_password = os.environ.get("CELERY_PASSWORD", None) +if _broker_url is not None: + CELERY_BROKER_URL = _broker_url +elif _celery_password is not None: + CELERY_BROKER_URL = "amqp://datatracker:{password}@{host}/{queue}".format( + host=os.environ.get("RABBITMQ_HOSTNAME", "dt-rabbitmq"), + password=_celery_password, + queue=os.environ.get("RABBITMQ_QUEUE", "dt"), + ) +else: + raise RuntimeError("DATATRACKER_BROKER_URL or CELERY_PASSWORD must be set") + +# mailarchive API key +_mailing_list_archive_api_key = os.environ.get( + "DATATRACKER_MAILING_LIST_ARCHIVE_API_KEY", None +) +if _mailing_list_archive_api_key is None: + raise RuntimeError("DATATRACKER_MAILING_LIST_ARCHIVE_API_KEY must be set") +MAILING_LIST_ARCHIVE_API_KEY = _mailing_list_archive_api_key + +IANA_SYNC_USERNAME = "ietfsync" +IANA_SYNC_CHANGES_URL = "https://datatracker.iana.org:4443/data-tracker/changes" +IANA_SYNC_PROTOCOLS_URL = "http://www.iana.org/protocols/" + +RFC_EDITOR_NOTIFICATION_URL = "http://www.rfc-editor.org/parser/parser.php" + +_registration_api_key = os.environ.get("DATATRACKER_REGISTRATION_API_KEY", None) +if _registration_api_key is None: + raise RuntimeError("DATATRACKER_REGISTRATION_API_KEY must be set") +STATS_REGISTRATION_ATTENDEES_JSON_URL = f"https://registration.ietf.org/{{number}}/attendees/?apikey={_registration_api_key}" + +# Registration Participants API config - key must be set, but the URL can be left +# to the default in settings.py +_registration_participants_api_key = os.environ.get( + "DATATRACKER_REGISTRATION_PARTICIPANTS_API_KEY", None +) +if _registration_participants_api_key is None: + raise RuntimeError("DATATRACKER_REGISTRATION_PARTICIPANTS_API_KEY must be set") +REGISTRATION_PARTICIPANTS_API_KEY = _registration_participants_api_key + +_registration_participants_api_url = os.environ.get( + "DATATRACKER_REGISTRATION_PARTICIPANTS_API_URL", None +) +if _registration_participants_api_url is not None: + REGISTRATION_PARTICIPANTS_API_URL = _registration_participants_api_url + +# FIRST_CUTOFF_DAYS = 12 +# SECOND_CUTOFF_DAYS = 12 +# SUBMISSION_CUTOFF_DAYS = 26 +# SUBMISSION_CORRECTION_DAYS = 57 +MEETING_MATERIALS_SUBMISSION_CUTOFF_DAYS = 26 +MEETING_MATERIALS_SUBMISSION_CORRECTION_DAYS = 54 + +# disable htpasswd by setting to a do-nothing command +HTPASSWD_COMMAND = "/bin/true" + +_MEETECHO_CLIENT_ID = os.environ.get("DATATRACKER_MEETECHO_CLIENT_ID", None) +_MEETECHO_CLIENT_SECRET = os.environ.get("DATATRACKER_MEETECHO_CLIENT_SECRET", None) +if _MEETECHO_CLIENT_ID is not None and _MEETECHO_CLIENT_SECRET is not None: + MEETECHO_API_CONFIG = { + "api_base": os.environ.get( + "DATATRACKER_MEETECHO_API_BASE", + "https://meetings.conf.meetecho.com/api/v1/", + ), + "client_id": _MEETECHO_CLIENT_ID, + "client_secret": _MEETECHO_CLIENT_SECRET, + "request_timeout": 3.01, # python-requests doc recommend slightly > a multiple of 3 seconds + } +else: + raise RuntimeError( + "DATATRACKER_MEETECHO_CLIENT_ID and DATATRACKER_MEETECHO_CLIENT_SECRET must be set" + ) + +# For APP_API_TOKENS, accept either base64-encoded JSON or raw JSON, but not both. +# To decode / pretty-print the encoded form, run: +# base64 -d | jq . +# paste the encoded secret into stdin. Copy/paste that into an editor you trust not +# to leave a copy lying around. When done editing, copy/paste the final JSON through +# jq -c | base64 +# and copy/paste the output into the secret store. +if "DATATRACKER_APP_API_TOKENS_JSON_B64" in os.environ: + if "DATATRACKER_APP_API_TOKENS_JSON" in os.environ: + raise RuntimeError( + "Only one of DATATRACKER_APP_API_TOKENS_JSON and DATATRACKER_APP_API_TOKENS_JSON_B64 may be set" + ) + _APP_API_TOKENS_JSON = b64decode( + os.environ.get("DATATRACKER_APP_API_TOKENS_JSON_B64") + ) +else: + _APP_API_TOKENS_JSON = os.environ.get("DATATRACKER_APP_API_TOKENS_JSON", None) + +if _APP_API_TOKENS_JSON is not None: + APP_API_TOKENS = json.loads(_APP_API_TOKENS_JSON) +else: + APP_API_TOKENS = {} + +EMAIL_COPY_TO = "" + +# Until we teach the datatracker to look beyond cloudflare for this check +IDSUBMIT_MAX_DAILY_SAME_SUBMITTER = 5000 + +# Leave DATATRACKER_MATOMO_SITE_ID unset to disable Matomo reporting +if "DATATRACKER_MATOMO_SITE_ID" in os.environ: + MATOMO_DOMAIN_PATH = os.environ.get( + "DATATRACKER_MATOMO_DOMAIN_PATH", "analytics.ietf.org" + ) + MATOMO_SITE_ID = os.environ.get("DATATRACKER_MATOMO_SITE_ID") + MATOMO_DISABLE_COOKIES = True + +# Leave DATATRACKER_SCOUT_KEY unset to disable Scout APM agent +_SCOUT_KEY = os.environ.get("DATATRACKER_SCOUT_KEY", None) +if _SCOUT_KEY is not None: + if SERVER_MODE == "production": + PROD_PRE_APPS = [ + "scout_apm.django", + ] + else: + DEV_PRE_APPS = [ + "scout_apm.django", + ] + SCOUT_MONITOR = True + SCOUT_KEY = _SCOUT_KEY + SCOUT_NAME = os.environ.get("DATATRACKER_SCOUT_NAME", "Datatracker") + SCOUT_ERRORS_ENABLED = True + SCOUT_SHUTDOWN_MESSAGE_ENABLED = False + SCOUT_CORE_AGENT_SOCKET_PATH = "tcp://{host}:{port}".format( + host=os.environ.get("DATATRACKER_SCOUT_CORE_AGENT_HOST", "localhost"), + port=os.environ.get("DATATRACKER_SCOUT_CORE_AGENT_PORT", "6590"), + ) + SCOUT_CORE_AGENT_DOWNLOAD = False + SCOUT_CORE_AGENT_LAUNCH = False + SCOUT_REVISION_SHA = __release_hash__[:7] + +STATIC_URL = os.environ.get("DATATRACKER_STATIC_URL", None) +if STATIC_URL is None: + from ietf import __version__ + + STATIC_URL = f"https://static.ietf.org/dt/{__version__}/" + +# Set these to the same as "production" in settings.py, whether production mode or not +MEDIA_ROOT = "/a/www/www6s/lib/dt/media/" +MEDIA_URL = "https://www.ietf.org/lib/dt/media/" +PHOTOS_DIRNAME = "photo" +PHOTOS_DIR = MEDIA_ROOT + PHOTOS_DIRNAME + +# Normally only set for debug, but needed until we have a real FS +DJANGO_VITE["default"]["manifest_path"] = os.path.join( + BASE_DIR, "static/dist-neue/manifest.json" +) + +# Binaries that are different in the docker image +DE_GFM_BINARY = "/usr/local/bin/de-gfm" +IDSUBMIT_IDNITS_BINARY = "/usr/local/bin/idnits" + +# Duplicating production cache from settings.py and using it whether we're in production mode or not +MEMCACHED_HOST = os.environ.get("DT_MEMCACHED_SERVICE_HOST", "127.0.0.1") +MEMCACHED_PORT = os.environ.get("DT_MEMCACHED_SERVICE_PORT", "11211") +from ietf import __version__ + +CACHES = { + "default": { + "BACKEND": "ietf.utils.cache.LenientMemcacheCache", + "LOCATION": f"{MEMCACHED_HOST}:{MEMCACHED_PORT}", + "VERSION": __version__, + "KEY_PREFIX": "ietf:dt", + # Key function is default except with sha384-encoded key + "KEY_FUNCTION": lambda key, key_prefix, version: ( + f"{key_prefix}:{version}:{sha384(str(key).encode('utf8')).hexdigest()}" + ), + }, + "agenda": { + "BACKEND": "ietf.utils.cache.LenientMemcacheCache", + "LOCATION": f"{MEMCACHED_HOST}:{MEMCACHED_PORT}", + # No release-specific VERSION setting. + "KEY_PREFIX": "ietf:dt:agenda", + # Key function is default except with sha384-encoded key + "KEY_FUNCTION": lambda key, key_prefix, version: ( + f"{key_prefix}:{version}:{sha384(str(key).encode('utf8')).hexdigest()}" + ), + }, + "proceedings": { + "BACKEND": "ietf.utils.cache.LenientMemcacheCache", + "LOCATION": f"{MEMCACHED_HOST}:{MEMCACHED_PORT}", + # No release-specific VERSION setting. + "KEY_PREFIX": "ietf:dt:proceedings", + # Key function is default except with sha384-encoded key + "KEY_FUNCTION": lambda key, key_prefix, version: ( + f"{key_prefix}:{version}:{sha384(str(key).encode('utf8')).hexdigest()}" + ), + }, + "sessions": { + "BACKEND": "ietf.utils.cache.LenientMemcacheCache", + "LOCATION": f"{MEMCACHED_HOST}:{MEMCACHED_PORT}", + # No release-specific VERSION setting. + "KEY_PREFIX": "ietf:dt", + }, + "htmlized": { + "BACKEND": "django.core.cache.backends.filebased.FileBasedCache", + "LOCATION": "/a/cache/datatracker/htmlized", + "OPTIONS": { + "MAX_ENTRIES": 100000, # 100,000 + }, + }, + "pdfized": { + "BACKEND": "django.core.cache.backends.filebased.FileBasedCache", + "LOCATION": "/a/cache/datatracker/pdfized", + "OPTIONS": { + "MAX_ENTRIES": 100000, # 100,000 + }, + }, + "slowpages": { + "BACKEND": "django.core.cache.backends.filebased.FileBasedCache", + "LOCATION": "/a/cache/datatracker/slowpages", + "OPTIONS": { + "MAX_ENTRIES": 5000, + }, + }, + "celery-results": { + "BACKEND": "django.core.cache.backends.memcached.PyMemcacheCache", + "LOCATION": f"{MEMCACHED_HOST}:{MEMCACHED_PORT}", + "KEY_PREFIX": "ietf:celery", + }, +} + +_csrf_trusted_origins_str = os.environ.get("DATATRACKER_CSRF_TRUSTED_ORIGINS") +if _csrf_trusted_origins_str is not None: + CSRF_TRUSTED_ORIGINS = _multiline_to_list(_csrf_trusted_origins_str) + +# Console logs as JSON instead of plain when running in k8s +LOGGING["handlers"]["console"]["formatter"] = "json" + +# Configure storages for the replica blob store +_blob_store_endpoint_url = os.environ.get("DATATRACKER_BLOB_STORE_ENDPOINT_URL") +_blob_store_access_key = os.environ.get("DATATRACKER_BLOB_STORE_ACCESS_KEY") +_blob_store_secret_key = os.environ.get("DATATRACKER_BLOB_STORE_SECRET_KEY") +if None in (_blob_store_endpoint_url, _blob_store_access_key, _blob_store_secret_key): + raise RuntimeError( + "All of DATATRACKER_BLOB_STORE_ENDPOINT_URL, DATATRACKER_BLOB_STORE_ACCESS_KEY, " + "and DATATRACKER_BLOB_STORE_SECRET_KEY must be set" + ) +_blob_store_bucket_prefix = os.environ.get("DATATRACKER_BLOB_STORE_BUCKET_PREFIX", "") +_blob_store_bucket_suffix = os.environ.get("DATATRACKER_BLOB_STORE_BUCKET_SUFFIX", "") +_blob_store_enable_profiling = ( + os.environ.get("DATATRACKER_BLOB_STORE_ENABLE_PROFILING", "false").lower() == "true" +) +_blob_store_max_attempts = int( + os.environ.get("DATATRACKER_BLOB_STORE_MAX_ATTEMPTS", BLOBSTORAGE_MAX_ATTEMPTS) +) +_blob_store_connect_timeout = float( + os.environ.get( + "DATATRACKER_BLOB_STORE_CONNECT_TIMEOUT", BLOBSTORAGE_CONNECT_TIMEOUT + ) +) +_blob_store_read_timeout = float( + os.environ.get("DATATRACKER_BLOB_STORE_READ_TIMEOUT", BLOBSTORAGE_READ_TIMEOUT) +) + +for storagename in ARTIFACT_STORAGE_NAMES: + if storagename in ["staging"]: + continue + replica_storagename = f"r2-{storagename}" + adjusted_bucket_name = ( + _blob_store_bucket_prefix + storagename + _blob_store_bucket_suffix + ).strip() + STORAGES[replica_storagename] = { + "BACKEND": "ietf.doc.storage.MetadataS3Storage", + "OPTIONS": dict( + endpoint_url=_blob_store_endpoint_url, + access_key=_blob_store_access_key, + secret_key=_blob_store_secret_key, + security_token=None, + client_config=botocore.config.Config( + request_checksum_calculation="when_required", + response_checksum_validation="when_required", + signature_version="s3v4", + connect_timeout=_blob_store_connect_timeout, + read_timeout=_blob_store_read_timeout, + retries={"total_max_attempts": _blob_store_max_attempts}, + ), + verify=False, + bucket_name=adjusted_bucket_name, + ietf_log_blob_timing=_blob_store_enable_profiling, + ), + } + +# Configure storage for the red bucket - assume it uses the same credentials as +# other blobs +_red_bucket_name = os.environ.get("DATATRACKER_BLOB_STORE_RED_BUCKET_NAME", "").strip() +if _red_bucket_name == "": + raise RuntimeError("DATATRACKER_BLOB_STORE_RED_BUCKET_NAME must be set") + +STORAGES["red_bucket"] = { + "BACKEND": "storages.backends.s3.S3Storage", + "OPTIONS": dict( + endpoint_url=_blob_store_endpoint_url, + access_key=_blob_store_access_key, + secret_key=_blob_store_secret_key, + security_token=None, + client_config=botocore.config.Config( + request_checksum_calculation="when_required", + response_checksum_validation="when_required", + signature_version="s3v4", + connect_timeout=_blob_store_connect_timeout, + read_timeout=_blob_store_read_timeout, + retries={"total_max_attempts": _blob_store_max_attempts}, + ), + verify=False, + bucket_name=_red_bucket_name, + ), +} +RFCINDEX_DELETE_THEN_WRITE = False # S3Storage allows file_overwrite by default +RFCINDEX_OUTPUT_PATH = os.environ.get("DATATRACKER_RFCINDEX_OUTPUT_PATH", "other/") +RFCINDEX_INPUT_PATH = os.environ.get("DATATRACKER_RFCINDEX_INPUT_PATH", "") + +# Configure the blobdb app for artifact storage +_blobdb_replication_enabled = ( + os.environ.get("DATATRACKER_BLOBDB_REPLICATION_ENABLED", "true").lower() == "true" +) +_blobdb_replication_verbose_logging = ( + os.environ.get("DATATRACKER_BLOBDB_REPLICATION_VERBOSE_LOGGING", "false").lower() + == "true" +) + +BLOBDB_REPLICATION = { + "ENABLED": _blobdb_replication_enabled, + "DEST_STORAGE_PATTERN": "r2-{bucket}", + "INCLUDE_BUCKETS": ARTIFACT_STORAGE_NAMES, + "EXCLUDE_BUCKETS": ["staging"], + "VERBOSE_LOGGING": _blobdb_replication_verbose_logging, +} + +# Optionally disable password strength enforcement at login (on by default) +PASSWORD_POLICY_ENFORCE_AT_LOGIN = ( + os.environ.get("DATATRACKER_ENFORCE_PW_POLICY", "true").lower() != "false" +) + +# Typesense search indexing +SEARCHINDEX_CONFIG = { + "TYPESENSE_API_URL": os.environ.get("DATATRACKER_TYPESENSE_API_URL", ""), + "TYPESENSE_API_KEY": os.environ.get("DATATRACKER_TYPESENSE_API_KEY", ""), + "TASK_RETRY_DELAY": os.environ.get("DATATRACKER_SEARCHINDEX_TASK_RETRY_DELAY", 10), + "TASK_MAX_RETRIES": os.environ.get( + "DATATRACKER_SEARCHINDEX_TASK_MAX_RETRIES", "12" + ), +} + +# Errata system api configuration +ERRATA_METADATA_NOTIFICATION_API_KEY = os.environ.get( + "DATATRACKER_ERRATA_METADATA_NOTIFICATION_API_KEY", None +) +if ERRATA_METADATA_NOTIFICATION_API_KEY is not None: + ERRATA_METADATA_NOTIFICATION_URL = os.environ.get( + "DATATRACKER_ERRATA_METADATA_NOTIFICATION_URL", None + ) + if ERRATA_METADATA_NOTIFICATION_URL is None: + raise RuntimeError( + "DATATRACKER_ERRATA_METADATA_NOTIFICATION_URL must be set if " + "DATATRACKER_ERRATA_METADATA_NOTIFICATION_API_KEY is provided" + ) + +# name (with path) of errata.json in the red bucket +ERRATA_JSON_BLOB_NAME = os.environ.get( + "DATATRACKER_ERRATA_JSON_BLOB_NAME", "other/errata.json" +) diff --git a/media/.gitignore b/media/.gitignore deleted file mode 100644 index dfa8ca37ce..0000000000 --- a/media/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/floor diff --git a/media/photo/nopictureavailable.jpg b/media/photo/nopictureavailable.jpg deleted file mode 100644 index 0895f9f57c..0000000000 Binary files a/media/photo/nopictureavailable.jpg and /dev/null differ diff --git a/media/photo/profile-default.jpg b/media/photo/profile-default.jpg deleted file mode 100644 index d6b03e1004..0000000000 Binary files a/media/photo/profile-default.jpg and /dev/null differ diff --git a/mypy.ini b/mypy.ini index 19df7ec9b0..4acaf98c95 100644 --- a/mypy.ini +++ b/mypy.ini @@ -2,6 +2,9 @@ ignore_missing_imports = True +# allow PEP 695 type aliases (flag needed until mypy >= 1.13) +enable_incomplete_feature = NewGenericSyntax + plugins = mypy_django_plugin.main diff --git a/package.json b/package.json index 74baf2e1b4..29ead19d23 100644 --- a/package.json +++ b/package.json @@ -7,36 +7,40 @@ "legacy:build": "parcel build" }, "dependencies": { - "@fullcalendar/bootstrap5": "6.1.9", - "@fullcalendar/core": "6.1.9", - "@fullcalendar/daygrid": "6.1.9", - "@fullcalendar/icalendar": "6.1.9", - "@fullcalendar/interaction": "6.1.9", - "@fullcalendar/list": "6.1.9", - "@fullcalendar/luxon3": "6.1.9", - "@fullcalendar/timegrid": "6.1.9", - "@fullcalendar/vue3": "6.1.9", + "@fullcalendar/bootstrap5": "6.1.11", + "@fullcalendar/core": "6.1.11", + "@fullcalendar/daygrid": "6.1.11", + "@fullcalendar/icalendar": "6.1.11", + "@fullcalendar/interaction": "6.1.11", + "@fullcalendar/list": "6.1.11", + "@fullcalendar/luxon3": "6.1.11", + "@fullcalendar/timegrid": "6.1.11", + "@fullcalendar/vue3": "6.1.11", + "@kurkle/color": "0.3.1", "@popperjs/core": "2.11.8", "@twuni/emojify": "1.0.2", - "bootstrap": "5.3.2", - "bootstrap-icons": "1.11.1", + "bootstrap": "5.3.3", + "bootstrap-icons": "1.11.3", "browser-fs-access": "0.35.0", - "caniuse-lite": "1.0.30001538", - "d3": "7.8.5", + "caniuse-lite": "1.0.30001603", + "chart.js": "^4.5.1", + "chartjs-plugin-autocolors": "0.3.1", + "chartjs-plugin-zoom": "2.2.0", + "d3": "7.9.0", "file-saver": "2.0.5", - "highcharts": "11.1.0", + "highcharts": "11.4.0", "ical.js": "1.5.0", "jquery": "3.7.1", "js-cookie": "3.0.5", "list.js": "2.3.1", "lodash": "4.17.21", "lodash-es": "4.17.21", - "luxon": "3.4.3", - "moment": "2.29.4", - "moment-timezone": "0.5.43", + "luxon": "3.4.4", + "moment": "2.30.1", + "moment-timezone": "0.5.45", "ms": "2.1.3", "murmurhash-js": "1.0.0", - "naive-ui": "2.35.0", + "naive-ui": "2.38.1", "pinia": "2.1.7", "pinia-plugin-persist": "1.0.0", "select2": "4.1.0-rc.0", @@ -44,35 +48,36 @@ "send": "0.18.0", "shepherd.js": "11.2.0", "slugify": "1.6.6", - "sortablejs": "1.15.0", + "sortablejs": "1.15.2", "vanillajs-datepicker": "1.3.4", - "vue": "3.3.4", - "vue-router": "4.2.5", + "vue": "3.4.21", + "vue-router": "4.3.0", "zxcvbn": "4.4.2" }, "devDependencies": { - "@parcel/optimizer-data-url": "2.10.0", - "@parcel/transformer-inline-string": "2.10.0", - "@parcel/transformer-sass": "2.10.0", - "@rollup/pluginutils": "5.0.5", - "@vitejs/plugin-vue": "4.4.0", + "@parcel/optimizer-data-url": "2.12.0", + "@parcel/transformer-inline-string": "2.12.0", + "@parcel/transformer-sass": "2.12.0", + "@rollup/pluginutils": "5.1.0", + "@vitejs/plugin-vue": "4.6.2", + "@vue/language-plugin-pug": "2.0.7", "browserlist": "latest", - "c8": "8.0.1", - "eslint": "8.51.0", + "c8": "9.1.0", + "eslint": "8.57.0", "eslint-config-standard": "17.1.0", "eslint-plugin-cypress": "2.15.1", - "eslint-plugin-import": "2.28.1", - "eslint-plugin-n": "16.2.0", + "eslint-plugin-import": "2.29.1", + "eslint-plugin-n": "16.6.2", "eslint-plugin-node": "11.1.0", "eslint-plugin-promise": "6.1.1", - "eslint-plugin-vue": "9.17.0", - "html-validate": "8.5.0", + "eslint-plugin-vue": "9.24.0", + "html-validate": "8.18.1", "jquery-migrate": "3.4.1", - "parcel": "2.10.0", + "parcel": "2.12.0", "pug": "3.0.2", - "sass": "1.69.4", + "sass": "1.72.0", "seedrandom": "3.0.5", - "vite": "4.4.11" + "vite": "4.5.3" }, "targets": { "ietf": { @@ -110,11 +115,15 @@ "ietf/static/images/irtf-logo-card.png", "ietf/static/images/irtf-logo-white.svg", "ietf/static/images/irtf-logo.svg", + "ietf/static/js/add_session_recordings.js", + "ietf/static/js/attendees-chart.js", "ietf/static/js/agenda_filter.js", "ietf/static/js/agenda_materials.js", + "ietf/static/js/announcement.js", "ietf/static/js/complete-review.js", "ietf/static/js/create_timeslot.js", "ietf/static/js/create_timeslot.js", + "ietf/static/js/custom_striped.js", "ietf/static/js/d3.js", "ietf/static/js/datepicker.js", "ietf/static/js/doc-search.js", @@ -131,6 +140,7 @@ "ietf/static/js/highcharts.js", "ietf/static/js/highstock.js", "ietf/static/js/ietf.js", + "ietf/static/js/investigate.js", "ietf/static/js/ipr-edit.js", "ietf/static/js/ipr-search.js", "ietf/static/js/js-cookie.js", @@ -140,10 +150,16 @@ "ietf/static/js/manage-community-list.js", "ietf/static/js/manage-review-requests.js", "ietf/static/js/meeting-interim-request.js", + "ietf/static/js/meeting_stats.js", + "ietf/static/js/meeting_timeline.js", "ietf/static/js/moment.js", + "ietf/static/js/navbar-doc-search.js", "ietf/static/js/password_strength.js", "ietf/static/js/select2.js", + "ietf/static/js/session_details.js", "ietf/static/js/session_details_form.js", + "ietf/static/js/session_form.js", + "ietf/static/js/session_request.js", "ietf/static/js/sortable.js", "ietf/static/js/stats.js", "ietf/static/js/status-change-edit-relations.js", @@ -152,6 +168,7 @@ "ietf/static/js/timezone.js", "ietf/static/js/upcoming.js", "ietf/static/js/upload-material.js", + "ietf/static/js/upload-session-agenda.js", "ietf/static/js/upload_bofreq.js", "ietf/static/js/upload_statement.js", "ietf/static/js/zxcvbn.js" @@ -203,8 +220,6 @@ "ietf/secr/static/images/tooltag-arrowright.webp", "ietf/secr/static/images/tooltag-arrowright_over.webp", "ietf/secr/static/js/dynamic_inlines.js", - "ietf/secr/static/js/session_form.js", - "ietf/secr/static/js/sessions.js", "ietf/secr/static/js/utils.js" ] } diff --git a/patch/add-django-cprofile-filter.patch b/patch/add-django-cprofile-filter.patch index 128d5a9f09..bf684a0b33 100644 --- a/patch/add-django-cprofile-filter.patch +++ b/patch/add-django-cprofile-filter.patch @@ -1,15 +1,9 @@ ---- django_cprofile_middleware/middleware.py.old 2018-04-04 06:32:29.282187502 -0700 -+++ django_cprofile_middleware/middleware.py 2018-04-06 10:11:18.936855634 -0700 -@@ -1,4 +1,5 @@ - import pstats -+import re - - try: - import cProfile as profile -@@ -14,6 +15,15 @@ - from django.utils.deprecation import MiddlewareMixin - - +--- django_cprofile_middleware/middleware.py.old 2024-06-27 21:03:56.975128007 +0000 ++++ django_cprofile_middleware/middleware.py 2024-06-27 23:45:59.421683008 +0000 +@@ -19,6 +19,16 @@ + from django_cprofile_middleware.utils import MiddlewareMixin + + +class Stats(pstats.Stats): + def filter_stats(self, regex): + oldstats = self.stats @@ -18,17 +12,64 @@ + for func, (cc, nc, tt, ct, callers) in oldstats.iteritems(): + if filter.search(pstats.func_std_string(func)): + newstats[func] = (cc, nc, tt, ct, callers) ++ + class ProfilerMiddleware(MiddlewareMixin): """ Simple profile middleware to profile django views. To run it, add ?prof to -@@ -62,8 +72,13 @@ +@@ -38,9 +48,11 @@ + ?download => Download profile file suitable for visualization. For example + in snakeviz or RunSnakeRun + +- This is adapted from an example found here: +- http://www.slideshare.net/zeeg/django-con-high-performance-django-presentation. ++ Patched with https://github.com/omarish/django-cprofile-middleware/pull/23 ++ for operation with Django 4.2.5+ + """ ++ PROFILER_REQUEST_ATTR_NAME = '_django_cprofile_middleware_profiler' ++ + def can(self, request): + requires_staff = getattr( + settings, "DJANGO_CPROFILE_MIDDLEWARE_REQUIRE_STAFF", True) +@@ -52,10 +64,11 @@ + + def process_view(self, request, callback, callback_args, callback_kwargs): + if self.can(request): +- self.profiler = profile.Profile() ++ profiler = profile.Profile() ++ setattr(request, self.PROFILER_REQUEST_ATTR_NAME, profiler) + args = (request,) + callback_args + try: +- return self.profiler.runcall( ++ return profiler.runcall( + callback, *args, **callback_kwargs) + except Exception: + # we want the process_exception middleware to fire +@@ -63,12 +76,13 @@ + return + + def process_response(self, request, response): +- if self.can(request): +- self.profiler.create_stats() ++ if hasattr(request, self.PROFILER_REQUEST_ATTR_NAME): ++ profiler = getattr(request, self.PROFILER_REQUEST_ATTR_NAME) ++ profiler.create_stats() + if 'download' in request.GET: + import marshal + +- output = marshal.dumps(self.profiler.stats) ++ output = marshal.dumps(profiler.stats) + response = HttpResponse( + output, content_type='application/octet-stream') + response['Content-Disposition'] = 'attachment;' \ +@@ -76,9 +90,14 @@ response['Content-Length'] = len(output) else: io = StringIO() - stats = pstats.Stats(self.profiler, stream=io) ++ stats = Stats(profiler, stream=io) + - stats.strip_dirs().sort_stats(request.GET.get('sort', 'time')) -+ stats = Stats(self.profiler, stream=io) + if request.GET.get('stripdirs', False): + stats = stats.strip_dirs() + filter = request.GET.get('filter', None) @@ -36,5 +77,5 @@ + stats.filter_stats(filter) + stats.sort_stats(request.GET.get('psort') or 'time') stats.print_stats(int(request.GET.get('count', 100))) + response = HttpResponse('
    %s
    ' % io.getvalue()) - return response diff --git a/patch/django-cookie-delete-with-all-settings.patch b/patch/django-cookie-delete-with-all-settings.patch index fb8bbbe4fe..4ceaf8fceb 100644 --- a/patch/django-cookie-delete-with-all-settings.patch +++ b/patch/django-cookie-delete-with-all-settings.patch @@ -9,9 +9,9 @@ samesite=settings.SESSION_COOKIE_SAMESITE, ) ---- django/http/response.py.orig 2020-08-13 11:16:04.060627793 +0200 -+++ django/http/response.py 2020-08-13 11:54:03.482476973 +0200 -@@ -282,20 +282,28 @@ +--- django/http/response.py.orig 2025-12-02 22:12:05.197283001 +0000 ++++ django/http/response.py 2025-12-02 22:26:01.396576013 +0000 +@@ -286,20 +286,28 @@ value = signing.get_cookie_signer(salt=key + salt).sign(value) return self.set_cookie(key, value, **kwargs) diff --git a/patch/tastypie-django22-fielderror-response.patch b/patch/tastypie-django22-fielderror-response.patch index ffb152d319..3b4418fc66 100644 --- a/patch/tastypie-django22-fielderror-response.patch +++ b/patch/tastypie-django22-fielderror-response.patch @@ -1,5 +1,5 @@ ---- tastypie/resources.py.orig 2020-08-24 13:14:25.463166100 +0200 -+++ tastypie/resources.py 2020-08-24 13:15:55.133759224 +0200 +--- tastypie/resources.py.orig 2025-07-29 19:00:01.526948002 +0000 ++++ tastypie/resources.py 2025-07-29 19:07:15.324127008 +0000 @@ -12,7 +12,7 @@ ObjectDoesNotExist, MultipleObjectsReturned, ValidationError, FieldDoesNotExist ) @@ -9,13 +9,13 @@ from django.db.models.fields.related import ForeignKey from django.urls.conf import re_path from tastypie.utils.timezone import make_naive_utc -@@ -2198,6 +2198,8 @@ +@@ -2216,6 +2216,8 @@ return self.authorized_read_list(objects, bundle) except ValueError: raise BadRequest("Invalid resource lookup data provided (mismatched type).") + except FieldError as e: + raise BadRequest("Invalid resource lookup: %s." % e) - + def obj_get(self, bundle, **kwargs): """ --- tastypie/paginator.py.orig 2020-08-25 15:24:46.391588425 +0200 diff --git a/playwright/.gitignore b/playwright/.gitignore index 75e854d8dc..f38d036a79 100644 --- a/playwright/.gitignore +++ b/playwright/.gitignore @@ -2,3 +2,4 @@ node_modules/ /test-results/ /playwright-report/ /playwright/.cache/ +auth.json \ No newline at end of file diff --git a/playwright/helpers/common.js b/playwright/helpers/common.js index 5ba39ba022..c4dd7e2640 100644 --- a/playwright/helpers/common.js +++ b/playwright/helpers/common.js @@ -13,5 +13,29 @@ module.exports = { return rect.top < bottom && rect.top > 0 - rect.height }) + }, + /** + * Override page DateTime with a new value + * + * @param {Object} page Page object + * @param {Object} dateTimeOverride New DateTime object + */ + overridePageDateTime: async (page, dateTimeOverride) => { + await page.addInitScript(`{ + // Extend Date constructor to default to fixed time + Date = class extends Date { + constructor(...args) { + if (args.length === 0) { + super(${dateTimeOverride.toMillis()}); + } else { + super(...args); + } + } + } + // Override Date.now() to start from fixed time + const __DateNowOffset = ${dateTimeOverride.toMillis()} - Date.now(); + const __DateNow = Date.now; + Date.now = () => __DateNow() + __DateNowOffset; + }`) } } diff --git a/playwright/helpers/meeting.js b/playwright/helpers/meeting.js index f07228b473..634ca2e8c6 100644 --- a/playwright/helpers/meeting.js +++ b/playwright/helpers/meeting.js @@ -395,7 +395,7 @@ module.exports = { name: 'Hackathon Kickoff', startDateTime: day1.set({ hour: 10, minute: 30 }), duration: '30m', - ...findAreaGroup('hackathon-kickoff', categories[2]), + ...findAreaGroup('hackathon', categories[2]), showAgenda: true, hasAgenda: true, hasRecordings: true, @@ -609,6 +609,9 @@ module.exports = { startDateTime: curDay.set({ hour: 17, minute: 30 }), duration: '2h', type: 'plenary', + showAgenda: true, + hasAgenda: true, + hasRecordings: true, ...findAreaGroup('ietf-plenary', categories[2]) }, floors)) } @@ -630,7 +633,7 @@ module.exports = { }, categories, isCurrentMeeting: dateMode !== 'past', - useNotes: true, + usesNotes: true, schedule, floors } diff --git a/playwright/package-lock.json b/playwright/package-lock.json index 2d968a47dd..abe2518ef2 100644 --- a/playwright/package-lock.json +++ b/playwright/package-lock.json @@ -6,23 +6,23 @@ "packages": { "": { "dependencies": { - "@faker-js/faker": "8.2.0", + "@faker-js/faker": "8.4.1", "lodash": "4.17.21", "lodash-es": "4.17.21", - "luxon": "3.4.3", + "luxon": "3.4.4", "ms": "2.1.3", "seedrandom": "3.0.5", "slugify": "1.6.6" }, "devDependencies": { - "@playwright/test": "1.39.0", - "eslint": "8.51.0", + "@playwright/test": "1.42.1", + "eslint": "8.57.0", "eslint-config-standard": "17.1.0", - "eslint-plugin-import": "2.28.1", - "eslint-plugin-n": "16.2.0", + "eslint-plugin-import": "2.29.1", + "eslint-plugin-n": "16.6.2", "eslint-plugin-node": "11.1.0", "eslint-plugin-promise": "6.1.1", - "npm-check-updates": "16.14.6" + "npm-check-updates": "16.14.18" } }, "node_modules/@aashutoshrathi/word-wrap": { @@ -69,9 +69,9 @@ } }, "node_modules/@eslint/eslintrc": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.2.tgz", - "integrity": "sha512-+wvgpDsrB1YqAMdEUCcnTlpfVBH7Vqn6A/NT3D8WVXFIaKMlErPIZT3oCIAVCOtarRpMtelZLqJeU3t7WY6X6g==", + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz", + "integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==", "dev": true, "dependencies": { "ajv": "^6.12.4", @@ -92,18 +92,18 @@ } }, "node_modules/@eslint/js": { - "version": "8.51.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.51.0.tgz", - "integrity": "sha512-HxjQ8Qn+4SI3/AFv6sOrDB+g6PpUTDwSJiQqOrnneEk8L71161srI9gjzzZvYVbzHiVg/BvcH95+cK/zfIt4pg==", + "version": "8.57.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.0.tgz", + "integrity": "sha512-Ys+3g2TaW7gADOJzPt83SJtCDhMjndcDMFVQ/Tj9iA1BfJzFKD9mAUXT3OenpuPHbI6P/myECxRJrofUsDx/5g==", "dev": true, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" } }, "node_modules/@faker-js/faker": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/@faker-js/faker/-/faker-8.2.0.tgz", - "integrity": "sha512-VacmzZqVxdWdf9y64lDOMZNDMM/FQdtM9IsaOPKOm2suYwEatb8VkdHqOzXcDnZbk7YDE2BmsJmy/2Hmkn563g==", + "version": "8.4.1", + "resolved": "https://registry.npmjs.org/@faker-js/faker/-/faker-8.4.1.tgz", + "integrity": "sha512-XQ3cU+Q8Uqmrbf2e0cIC/QN43sTBSC8KF12u29Mb47tWrt2hAgBXSgpZMj4Ao8Uk0iJcU99QsOCaIL8934obCg==", "funding": [ { "type": "opencollective", @@ -116,13 +116,13 @@ } }, "node_modules/@humanwhocodes/config-array": { - "version": "0.11.11", - "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.11.tgz", - "integrity": "sha512-N2brEuAadi0CcdeMXUkhbZB84eskAc8MEX1By6qEchoVywSgXPIjou4rYsl0V3Hj0ZnuGycGCjdNgockbzeWNA==", + "version": "0.11.14", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.14.tgz", + "integrity": "sha512-3T8LkOmg45BV5FICb15QQMsyUSWrQ8AygVfC7ZG32zOalnqrilm018ZVCw0eapXux8FtA33q8PSRSstjee3jSg==", "dev": true, "dependencies": { - "@humanwhocodes/object-schema": "^1.2.1", - "debug": "^4.1.1", + "@humanwhocodes/object-schema": "^2.0.2", + "debug": "^4.3.1", "minimatch": "^3.0.5" }, "engines": { @@ -143,9 +143,9 @@ } }, "node_modules/@humanwhocodes/object-schema": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz", - "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.2.tgz", + "integrity": "sha512-6EwiSjwWYP7pTckG6I5eyFANjPhmPjUX9JRLUSfNPC7FX7zK9gyZAfUEaECL6ALTpGX5AjnBq3C9XmVWPitNpw==", "dev": true }, "node_modules/@isaacs/cliui": { @@ -399,12 +399,12 @@ } }, "node_modules/@playwright/test": { - "version": "1.39.0", - "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.39.0.tgz", - "integrity": "sha512-3u1iFqgzl7zr004bGPYiN/5EZpRUSFddQBra8Rqll5N0/vfpqlP9I9EXqAoGacuAbX6c9Ulg/Cjqglp5VkK6UQ==", + "version": "1.42.1", + "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.42.1.tgz", + "integrity": "sha512-Gq9rmS54mjBL/7/MvBaNOBwbfnh7beHvS6oS4srqXFcQHpQCV1+c8JXWE8VLPyRDhgS3H8x8A7hztqI9VnwrAQ==", "dev": true, "dependencies": { - "playwright": "1.39.0" + "playwright": "1.42.1" }, "bin": { "playwright": "cli.js" @@ -563,6 +563,18 @@ "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==", "dev": true }, + "node_modules/@types/semver-utils": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@types/semver-utils/-/semver-utils-1.1.3.tgz", + "integrity": "sha512-T+YwkslhsM+CeuhYUxyAjWm7mJ5am/K10UX40RuA6k6Lc7eGtq8iY2xOzy7Vq0GOqhl/xZl5l2FwURZMTPTUww==", + "dev": true + }, + "node_modules/@ungap/structured-clone": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz", + "integrity": "sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==", + "dev": true + }, "node_modules/abbrev": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", @@ -570,9 +582,9 @@ "dev": true }, "node_modules/acorn": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.10.0.tgz", - "integrity": "sha512-F0SAmZ8iUtS//m8DmCTA0jlh6TDKkHQyK6xc6V4KDTyZKA9dnvX9/3sRTVQrWm79glUAZbnmmNcdYwUIHWVybw==", + "version": "8.11.3", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.11.3.tgz", + "integrity": "sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg==", "dev": true, "bin": { "acorn": "bin/acorn" @@ -717,15 +729,15 @@ } }, "node_modules/array-includes": { - "version": "3.1.6", - "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.6.tgz", - "integrity": "sha512-sgTbLvL6cNnw24FnbaDyjmvddQ2ML8arZsgaJhoABMoplz/4QRhtrYS+alr1BUM1Bwp6dhx8vVCBSLG+StwOFw==", + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.7.tgz", + "integrity": "sha512-dlcsNBIiWhPkHdOEEKnehA+RNUWDc4UqFtnIXU4uuYDPtA4LDkr7qip2p0VvFAEXNDr0yWZ9PJyIRiGjRLQzwQ==", "dev": true, "dependencies": { "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", - "get-intrinsic": "^1.1.3", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", + "get-intrinsic": "^1.2.1", "is-string": "^1.0.7" }, "engines": { @@ -745,16 +757,16 @@ } }, "node_modules/array.prototype.findlastindex": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.2.tgz", - "integrity": "sha512-tb5thFFlUcp7NdNF6/MpDk/1r/4awWG1FIz3YqDf+/zJSTezBb+/5WViH41obXULHVpDzoiCLpJ/ZO9YbJMsdw==", + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.3.tgz", + "integrity": "sha512-LzLoiOMAxvy+Gd3BAq3B7VeIgPdo+Q8hthvKtXybMvRV0jrXfJM/t8mw7nNlpEcVlVUnCnM2KSX4XU5HmpodOA==", "dev": true, "dependencies": { "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", "es-shim-unscopables": "^1.0.0", - "get-intrinsic": "^1.1.3" + "get-intrinsic": "^1.2.1" }, "engines": { "node": ">= 0.4" @@ -764,14 +776,14 @@ } }, "node_modules/array.prototype.flat": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.1.tgz", - "integrity": "sha512-roTU0KWIOmJ4DRLmwKd19Otg0/mT3qPNt0Qb3GWW8iObuZXxrjB/pzn0R3hqpRSWg4HCwqx+0vwOnWnvlOyeIA==", + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.2.tgz", + "integrity": "sha512-djYB+Zx2vLewY8RWlNCUdHjDXs2XOgm602S9E7P/UpHgfeHL00cRiIF+IN/G/aUJ7kGPb6yO/ErDI5V2s8iycA==", "dev": true, "dependencies": { "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", "es-shim-unscopables": "^1.0.0" }, "engines": { @@ -782,14 +794,14 @@ } }, "node_modules/array.prototype.flatmap": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.1.tgz", - "integrity": "sha512-8UGn9O1FDVvMNB0UlLv4voxRMze7+FpHyF5mSMRjWHUMlpoDViniy05870VlxhfgTnLbpuwTzvD76MTtWxB/mQ==", + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.2.tgz", + "integrity": "sha512-Ewyx0c9PmpcsByhSW4r+9zDU7sGjFc86qf/kKtuSCRdhfbk0SNLLkaT5qvcHnRGgc5NP/ly/y+qkXkqONX54CQ==", "dev": true, "dependencies": { "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", "es-shim-unscopables": "^1.0.0" }, "engines": { @@ -799,6 +811,27 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/arraybuffer.prototype.slice": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.2.tgz", + "integrity": "sha512-yMBKppFur/fbHu9/6USUe03bZ4knMYiwFBcyiaXB8Go0qNehwX6inYPzK9U0NeQvGxKthcmHcaR8P5MStSRBAw==", + "dev": true, + "dependencies": { + "array-buffer-byte-length": "^1.0.0", + "call-bind": "^1.0.2", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", + "get-intrinsic": "^1.2.1", + "is-array-buffer": "^3.0.2", + "is-shared-array-buffer": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/available-typed-arrays": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz", @@ -941,6 +974,18 @@ "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", "dev": true }, + "node_modules/builtin-modules": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.3.0.tgz", + "integrity": "sha512-zhaCDicdLuWN5UbN5IMnFqNMhNfo919sH85y2/ea+5Yg9TsTkeZxpL+JLbp6cgYFS4sRLp3YV4S6yDuqVWHYOw==", + "dev": true, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/builtins": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/builtins/-/builtins-5.0.1.tgz", @@ -1056,13 +1101,14 @@ } }, "node_modules/call-bind": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", - "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.5.tgz", + "integrity": "sha512-C3nQxfFZxFRVoJoGKKI8y3MOEo129NQ+FgQ08iye+Mk4zNZZGdjfs06bVTr+DBSlA66Q2VEcMki/cUCP4SercQ==", "dev": true, "dependencies": { - "function-bind": "^1.1.1", - "get-intrinsic": "^1.0.2" + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.1", + "set-function-length": "^1.1.1" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -1357,12 +1403,27 @@ "node": ">=10" } }, + "node_modules/define-data-property": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.1.tgz", + "integrity": "sha512-E7uGkTzkk1d0ByLeSc6ZsFS79Axg+m1P/VsgYsxHgiuc3tFSj+MjMIwe90FC4lOAZzNBdY7kkO2P2wKdsQ1vgQ==", + "dev": true, + "dependencies": { + "get-intrinsic": "^1.2.1", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/define-properties": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.0.tgz", - "integrity": "sha512-xvqAVKGfT1+UAvPwKTVw/njhdQ8ZhXK4lI0bCIuCMrp2up9nPnaDftrLtmpTazqd1o+UY4zgzU+avtMbDP+ldA==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz", + "integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==", "dev": true, "dependencies": { + "define-data-property": "^1.0.1", "has-property-descriptors": "^1.0.0", "object-keys": "^1.1.1" }, @@ -1465,25 +1526,26 @@ "dev": true }, "node_modules/es-abstract": { - "version": "1.21.2", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.21.2.tgz", - "integrity": "sha512-y/B5POM2iBnIxCiernH1G7rC9qQoM77lLIMQLuob0zhp8C56Po81+2Nj0WFKnd0pNReDTnkYryc+zhOzpEIROg==", + "version": "1.22.3", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.22.3.tgz", + "integrity": "sha512-eiiY8HQeYfYH2Con2berK+To6GrK2RxbPawDkGq4UiCQQfZHb6wX9qQqkbpPqaxQFcl8d9QzZqo0tGE0VcrdwA==", "dev": true, "dependencies": { "array-buffer-byte-length": "^1.0.0", + "arraybuffer.prototype.slice": "^1.0.2", "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.2", + "call-bind": "^1.0.5", "es-set-tostringtag": "^2.0.1", "es-to-primitive": "^1.2.1", - "function.prototype.name": "^1.1.5", - "get-intrinsic": "^1.2.0", + "function.prototype.name": "^1.1.6", + "get-intrinsic": "^1.2.2", "get-symbol-description": "^1.0.0", "globalthis": "^1.0.3", "gopd": "^1.0.1", - "has": "^1.0.3", "has-property-descriptors": "^1.0.0", "has-proto": "^1.0.1", "has-symbols": "^1.0.3", + "hasown": "^2.0.0", "internal-slot": "^1.0.5", "is-array-buffer": "^3.0.2", "is-callable": "^1.2.7", @@ -1491,19 +1553,23 @@ "is-regex": "^1.1.4", "is-shared-array-buffer": "^1.0.2", "is-string": "^1.0.7", - "is-typed-array": "^1.1.10", + "is-typed-array": "^1.1.12", "is-weakref": "^1.0.2", - "object-inspect": "^1.12.3", + "object-inspect": "^1.13.1", "object-keys": "^1.1.1", "object.assign": "^4.1.4", - "regexp.prototype.flags": "^1.4.3", + "regexp.prototype.flags": "^1.5.1", + "safe-array-concat": "^1.0.1", "safe-regex-test": "^1.0.0", - "string.prototype.trim": "^1.2.7", - "string.prototype.trimend": "^1.0.6", - "string.prototype.trimstart": "^1.0.6", + "string.prototype.trim": "^1.2.8", + "string.prototype.trimend": "^1.0.7", + "string.prototype.trimstart": "^1.0.7", + "typed-array-buffer": "^1.0.0", + "typed-array-byte-length": "^1.0.0", + "typed-array-byte-offset": "^1.0.0", "typed-array-length": "^1.0.4", "unbox-primitive": "^1.0.2", - "which-typed-array": "^1.1.9" + "which-typed-array": "^1.1.13" }, "engines": { "node": ">= 0.4" @@ -1513,26 +1579,26 @@ } }, "node_modules/es-set-tostringtag": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.1.tgz", - "integrity": "sha512-g3OMbtlwY3QewlqAiMLI47KywjWZoEytKr8pf6iTC8uJq5bIAH52Z9pnQ8pVL6whrCto53JZDuUIsifGeLorTg==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.2.tgz", + "integrity": "sha512-BuDyupZt65P9D2D2vA/zqcI3G5xRsklm5N3xCwuiy+/vKy8i0ifdsQP1sLgO4tZDSCaQUSnmC48khknGMV3D2Q==", "dev": true, "dependencies": { - "get-intrinsic": "^1.1.3", - "has": "^1.0.3", - "has-tostringtag": "^1.0.0" + "get-intrinsic": "^1.2.2", + "has-tostringtag": "^1.0.0", + "hasown": "^2.0.0" }, "engines": { "node": ">= 0.4" } }, "node_modules/es-shim-unscopables": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz", - "integrity": "sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.2.tgz", + "integrity": "sha512-J3yBRXCzDu4ULnQwxyToo/OjdMx6akgVC7K6few0a7F/0wLtmKKN7I73AH5T2836UuXRqN7Qg+IIUw/+YJksRw==", "dev": true, "dependencies": { - "has": "^1.0.3" + "hasown": "^2.0.0" } }, "node_modules/es-to-primitive": { @@ -1577,18 +1643,19 @@ } }, "node_modules/eslint": { - "version": "8.51.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.51.0.tgz", - "integrity": "sha512-2WuxRZBrlwnXi+/vFSJyjMqrNjtJqiasMzehF0shoLaW7DzS3/9Yvrmq5JiT66+pNjiX4UBnLDiKHcWAr/OInA==", + "version": "8.57.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.0.tgz", + "integrity": "sha512-dZ6+mexnaTIbSBZWgou51U6OmzIhYM2VcNdtiTtI7qPNZm35Akpr0f6vtw3w1Kmn5PYo+tZVfh13WrhpS6oLqQ==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.6.1", - "@eslint/eslintrc": "^2.1.2", - "@eslint/js": "8.51.0", - "@humanwhocodes/config-array": "^0.11.11", + "@eslint/eslintrc": "^2.1.4", + "@eslint/js": "8.57.0", + "@humanwhocodes/config-array": "^0.11.14", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", + "@ungap/structured-clone": "^1.2.0", "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.2", @@ -1630,6 +1697,18 @@ "url": "https://opencollective.com/eslint" } }, + "node_modules/eslint-compat-utils": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/eslint-compat-utils/-/eslint-compat-utils-0.1.2.tgz", + "integrity": "sha512-Jia4JDldWnFNIru1Ehx1H5s9/yxiRHY/TimCuUc0jNexew3cF1gI6CYZil1ociakfWO3rRqFjl1mskBblB3RYg==", + "dev": true, + "engines": { + "node": ">=12" + }, + "peerDependencies": { + "eslint": ">=6.0.0" + } + }, "node_modules/eslint-config-standard": { "version": "17.1.0", "resolved": "https://registry.npmjs.org/eslint-config-standard/-/eslint-config-standard-17.1.0.tgz", @@ -1660,14 +1739,14 @@ } }, "node_modules/eslint-import-resolver-node": { - "version": "0.3.7", - "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.7.tgz", - "integrity": "sha512-gozW2blMLJCeFpBwugLTGyvVjNoeo1knonXAcatC6bjPBZitotxdWf7Gimr25N4c0AAOo4eOUfaG82IJPDpqCA==", + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.9.tgz", + "integrity": "sha512-WFj2isz22JahUv+B788TlO3N6zL3nNJGU8CcZbPZvVEkBPaJdCV4vy5wyghty5ROFbCRnm132v8BScu5/1BQ8g==", "dev": true, "dependencies": { "debug": "^3.2.7", - "is-core-module": "^2.11.0", - "resolve": "^1.22.1" + "is-core-module": "^2.13.0", + "resolve": "^1.22.4" } }, "node_modules/eslint-import-resolver-node/node_modules/debug": { @@ -1706,13 +1785,14 @@ } }, "node_modules/eslint-plugin-es-x": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-es-x/-/eslint-plugin-es-x-7.1.0.tgz", - "integrity": "sha512-AhiaF31syh4CCQ+C5ccJA0VG6+kJK8+5mXKKE7Qs1xcPRg02CDPOj3mWlQxuWS/AYtg7kxrDNgW9YW3vc0Q+Mw==", + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-es-x/-/eslint-plugin-es-x-7.5.0.tgz", + "integrity": "sha512-ODswlDSO0HJDzXU0XvgZ3lF3lS3XAZEossh15Q2UHjwrJggWeBoKqqEsLTZLXl+dh5eOAozG0zRcYtuE35oTuQ==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.1.2", - "@eslint-community/regexpp": "^4.5.0" + "@eslint-community/regexpp": "^4.6.0", + "eslint-compat-utils": "^0.1.2" }, "engines": { "node": "^14.18.0 || >=16.0.0" @@ -1725,28 +1805,28 @@ } }, "node_modules/eslint-plugin-import": { - "version": "2.28.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.28.1.tgz", - "integrity": "sha512-9I9hFlITvOV55alzoKBI+K9q74kv0iKMeY6av5+umsNwayt59fz692daGyjR+oStBQgx6nwR9rXldDev3Clw+A==", + "version": "2.29.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.29.1.tgz", + "integrity": "sha512-BbPC0cuExzhiMo4Ff1BTVwHpjjv28C5R+btTOGaCRC7UEz801up0JadwkeSk5Ued6TG34uaczuVuH6qyy5YUxw==", "dev": true, "dependencies": { - "array-includes": "^3.1.6", - "array.prototype.findlastindex": "^1.2.2", - "array.prototype.flat": "^1.3.1", - "array.prototype.flatmap": "^1.3.1", + "array-includes": "^3.1.7", + "array.prototype.findlastindex": "^1.2.3", + "array.prototype.flat": "^1.3.2", + "array.prototype.flatmap": "^1.3.2", "debug": "^3.2.7", "doctrine": "^2.1.0", - "eslint-import-resolver-node": "^0.3.7", + "eslint-import-resolver-node": "^0.3.9", "eslint-module-utils": "^2.8.0", - "has": "^1.0.3", - "is-core-module": "^2.13.0", + "hasown": "^2.0.0", + "is-core-module": "^2.13.1", "is-glob": "^4.0.3", "minimatch": "^3.1.2", - "object.fromentries": "^2.0.6", - "object.groupby": "^1.0.0", - "object.values": "^1.1.6", + "object.fromentries": "^2.0.7", + "object.groupby": "^1.0.1", + "object.values": "^1.1.7", "semver": "^6.3.1", - "tsconfig-paths": "^3.14.2" + "tsconfig-paths": "^3.15.0" }, "engines": { "node": ">=4" @@ -1786,16 +1866,18 @@ } }, "node_modules/eslint-plugin-n": { - "version": "16.2.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-n/-/eslint-plugin-n-16.2.0.tgz", - "integrity": "sha512-AQER2jEyQOt1LG6JkGJCCIFotzmlcCZFur2wdKrp1JX2cNotC7Ae0BcD/4lLv3lUAArM9uNS8z/fsvXTd0L71g==", + "version": "16.6.2", + "resolved": "https://registry.npmjs.org/eslint-plugin-n/-/eslint-plugin-n-16.6.2.tgz", + "integrity": "sha512-6TyDmZ1HXoFQXnhCTUjVFULReoBPOAjpuiKELMkeP40yffI/1ZRO+d9ug/VC6fqISo2WkuIBk3cvuRPALaWlOQ==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "builtins": "^5.0.1", - "eslint-plugin-es-x": "^7.1.0", + "eslint-plugin-es-x": "^7.5.0", "get-tsconfig": "^4.7.0", + "globals": "^13.24.0", "ignore": "^5.2.4", + "is-builtin-module": "^3.2.1", "is-core-module": "^2.12.1", "minimatch": "^3.1.2", "resolve": "^1.22.2", @@ -2196,21 +2278,24 @@ } }, "node_modules/function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", - "dev": true + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } }, "node_modules/function.prototype.name": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.5.tgz", - "integrity": "sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==", + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.6.tgz", + "integrity": "sha512-Z5kx79swU5P27WEayXM1tBi5Ze/lbIyiNgU3qyXUOf9b2rgXYyF9Dy9Cx+IQv/Lc8WCG6L82zwUPpSS9hGehIg==", "dev": true, "dependencies": { "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "es-abstract": "^1.19.0", - "functions-have-names": "^1.2.2" + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", + "functions-have-names": "^1.2.3" }, "engines": { "node": ">= 0.4" @@ -2248,15 +2333,15 @@ } }, "node_modules/get-intrinsic": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.1.tgz", - "integrity": "sha512-2DcsyfABl+gVHEfCOaTrWgyt+tb6MSEGmKq+kI5HwLbIYgjgmMcV8KQ41uaKz1xxUcn9tJtgFbQUEVcEbd0FYw==", + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.2.tgz", + "integrity": "sha512-0gSo4ml/0j98Y3lngkFEot/zhiCeWsbYIlZ+uZOVgzLyLaUw7wxUL+nCTP0XJvJg1AXulJRI3UJi8GsbDuxdGA==", "dev": true, "dependencies": { - "function-bind": "^1.1.1", - "has": "^1.0.3", + "function-bind": "^1.1.2", "has-proto": "^1.0.1", - "has-symbols": "^1.0.3" + "has-symbols": "^1.0.3", + "hasown": "^2.0.0" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -2371,9 +2456,9 @@ } }, "node_modules/globals": { - "version": "13.21.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.21.0.tgz", - "integrity": "sha512-ybyme3s4yy/t/3s35bewwXKOf7cvzfreG2lH0lZl0JB7I4GxRP2ghxOK/Nb9EkRXdbBXZLfq/p/0W2JUONB/Gg==", + "version": "13.24.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", + "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==", "dev": true, "dependencies": { "type-fest": "^0.20.2" @@ -2469,18 +2554,6 @@ "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", "dev": true }, - "node_modules/has": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", - "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", - "dev": true, - "dependencies": { - "function-bind": "^1.1.1" - }, - "engines": { - "node": ">= 0.4.0" - } - }, "node_modules/has-bigints": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", @@ -2500,12 +2573,12 @@ } }, "node_modules/has-property-descriptors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz", - "integrity": "sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.1.tgz", + "integrity": "sha512-VsX8eaIewvas0xnvinAe9bw4WfIeODpGYikiWYLH+dma0Jw6KHYqWiWfhQlgOVK8D6PvjubK5Uc4P0iIhIcNVg==", "dev": true, "dependencies": { - "get-intrinsic": "^1.1.1" + "get-intrinsic": "^1.2.2" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -2568,6 +2641,18 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/hasown": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.0.tgz", + "integrity": "sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/hosted-git-info": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-5.2.1.tgz", @@ -2771,13 +2856,13 @@ } }, "node_modules/internal-slot": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.5.tgz", - "integrity": "sha512-Y+R5hJrzs52QCG2laLn4udYVnxsfny9CpOhNhUvk/SSSVyF6T27FzRbF0sroPidSu3X8oEAkOn2K804mjpt6UQ==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.6.tgz", + "integrity": "sha512-Xj6dv+PsbtwyPpEflsejS+oIZxmMlV44zAhG479uYu89MsjcYOhCFnNyKrkJrihbsiasQyY0afoCl/9BLR65bg==", "dev": true, "dependencies": { - "get-intrinsic": "^1.2.0", - "has": "^1.0.3", + "get-intrinsic": "^1.2.2", + "hasown": "^2.0.0", "side-channel": "^1.0.4" }, "engines": { @@ -2832,6 +2917,21 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/is-builtin-module": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/is-builtin-module/-/is-builtin-module-3.2.1.tgz", + "integrity": "sha512-BSLE3HnV2syZ0FK0iMA/yUGplUeMmNz4AW5fnTunbCIqZi4vG3WjJT9FHMy5D69xmAYBHXQhJdALdpwVxV501A==", + "dev": true, + "dependencies": { + "builtin-modules": "^3.3.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/is-callable": { "version": "1.2.7", "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", @@ -2857,12 +2957,12 @@ } }, "node_modules/is-core-module": { - "version": "2.13.0", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.13.0.tgz", - "integrity": "sha512-Z7dk6Qo8pOCp3l4tsX2C5ZVas4V+UxwQodwZhLopL91TX8UyyHEXafPcyoeeWuLrwzHcr3igO78wNLwHJHsMCQ==", + "version": "2.13.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.13.1.tgz", + "integrity": "sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw==", "dev": true, "dependencies": { - "has": "^1.0.3" + "hasown": "^2.0.0" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -3060,16 +3160,12 @@ } }, "node_modules/is-typed-array": { - "version": "1.1.10", - "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.10.tgz", - "integrity": "sha512-PJqgEHiWZvMpaFZ3uTc8kHPM4+4ADTlDniuQL7cU/UDA0Ql7F70yGfHph3cLNe+c9toaigv+DFzTJKhc2CtO6A==", + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.12.tgz", + "integrity": "sha512-Z14TF2JNG8Lss5/HMqt0//T9JeHXttXy5pH/DBU4vi98ozO2btxzq9MwYDZYnKwU8nRsz/+GVFVRDq3DkVuSPg==", "dev": true, "dependencies": { - "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.2", - "for-each": "^0.3.3", - "gopd": "^1.0.1", - "has-tostringtag": "^1.0.0" + "which-typed-array": "^1.1.11" }, "engines": { "node": ">= 0.4" @@ -3105,6 +3201,12 @@ "node": ">=12" } }, + "node_modules/isarray": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", + "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==", + "dev": true + }, "node_modules/isexe": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", @@ -3312,9 +3414,9 @@ } }, "node_modules/luxon": { - "version": "3.4.3", - "resolved": "https://registry.npmjs.org/luxon/-/luxon-3.4.3.tgz", - "integrity": "sha512-tFWBiv3h7z+T/tDaoxA8rqTxy1CHV6gHS//QdaH4pulbq/JuBSGgQspQQqcgnwdAx6pNI7cmvz5Sv/addzHmUg==", + "version": "3.4.4", + "resolved": "https://registry.npmjs.org/luxon/-/luxon-3.4.4.tgz", + "integrity": "sha512-zobTr7akeGHnv7eBOXcRgMeCP6+uyYsczwmeRCauvpvaAltgNyTbLH/+VaEAPUeWBT+1GuNmz4wC/6jtQzbbVA==", "engines": { "node": ">=12" } @@ -3708,11 +3810,12 @@ } }, "node_modules/npm-check-updates": { - "version": "16.14.6", - "resolved": "https://registry.npmjs.org/npm-check-updates/-/npm-check-updates-16.14.6.tgz", - "integrity": "sha512-sJ6w4AmSDP7YzBXah94Ul2JhiIbjBDfx9XYgib15um2wtiQkOyjE7Lov3MNUSQ84Ry7T81mE4ynMbl/mGbK4HQ==", + "version": "16.14.18", + "resolved": "https://registry.npmjs.org/npm-check-updates/-/npm-check-updates-16.14.18.tgz", + "integrity": "sha512-9iaRe9ohx9ykdbLjPRIYcq1A0RkrPYUx9HmQK1JIXhfxtJCNE/+497H9Z4PGH6GWRALbz5KF+1iZoySK2uSEpQ==", "dev": true, "dependencies": { + "@types/semver-utils": "^1.1.1", "chalk": "^5.3.0", "cli-table3": "^0.6.3", "commander": "^10.0.1", @@ -3987,9 +4090,9 @@ } }, "node_modules/object-inspect": { - "version": "1.12.3", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.3.tgz", - "integrity": "sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g==", + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.1.tgz", + "integrity": "sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==", "dev": true, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -4023,14 +4126,14 @@ } }, "node_modules/object.fromentries": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.6.tgz", - "integrity": "sha512-VciD13dswC4j1Xt5394WR4MzmAQmlgN72phd/riNp9vtD7tp4QQWJ0R4wvclXcafgcYK8veHRed2W6XeGBvcfg==", + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.7.tgz", + "integrity": "sha512-UPbPHML6sL8PI/mOqPwsH4G6iyXcCGzLin8KvEPenOZN5lpCNBZZQ+V62vdjB1mQHrmqGQt5/OJzemUA+KJmEA==", "dev": true, "dependencies": { "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1" }, "engines": { "node": ">= 0.4" @@ -4040,26 +4143,26 @@ } }, "node_modules/object.groupby": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/object.groupby/-/object.groupby-1.0.0.tgz", - "integrity": "sha512-70MWG6NfRH9GnbZOikuhPPYzpUpof9iW2J9E4dW7FXTqPNb6rllE6u39SKwwiNh8lCwX3DDb5OgcKGiEBrTTyw==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/object.groupby/-/object.groupby-1.0.1.tgz", + "integrity": "sha512-HqaQtqLnp/8Bn4GL16cj+CUYbnpe1bh0TtEaWvybszDG4tgxCJuRpV8VGuvNaI1fAnI4lUJzDG55MXcOH4JZcQ==", "dev": true, "dependencies": { "call-bind": "^1.0.2", "define-properties": "^1.2.0", - "es-abstract": "^1.21.2", + "es-abstract": "^1.22.1", "get-intrinsic": "^1.2.1" } }, "node_modules/object.values": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.6.tgz", - "integrity": "sha512-FVVTkD1vENCsAcwNs9k6jea2uHC/X0+JcjG8YA60FN5CMaJmG95wT9jek/xX9nornqGRrBkKtzuAu2wuHpKqvw==", + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.7.tgz", + "integrity": "sha512-aU6xnDFYT3x17e/f0IiiwlGPTy2jzMySGfUB4fq6z7CV8l85CWHDk5ErhyhpfDHhrOMwGFhSQkhMGHaIotA6Ng==", "dev": true, "dependencies": { "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1" }, "engines": { "node": ">= 0.4" @@ -4311,12 +4414,12 @@ } }, "node_modules/playwright": { - "version": "1.39.0", - "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.39.0.tgz", - "integrity": "sha512-naE5QT11uC/Oiq0BwZ50gDmy8c8WLPRTEWuSSFVG2egBka/1qMoSqYQcROMT9zLwJ86oPofcTH2jBY/5wWOgIw==", + "version": "1.42.1", + "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.42.1.tgz", + "integrity": "sha512-PgwB03s2DZBcNRoW+1w9E+VkLBxweib6KTXM0M3tkiT4jVxKSi6PmVJ591J+0u10LUrgxB7dLRbiJqO5s2QPMg==", "dev": true, "dependencies": { - "playwright-core": "1.39.0" + "playwright-core": "1.42.1" }, "bin": { "playwright": "cli.js" @@ -4329,9 +4432,9 @@ } }, "node_modules/playwright-core": { - "version": "1.39.0", - "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.39.0.tgz", - "integrity": "sha512-+k4pdZgs1qiM+OUkSjx96YiKsXsmb59evFoqv8SKO067qBA+Z2s/dCzJij/ZhdQcs2zlTAgRKfeiiLm8PQ2qvw==", + "version": "1.42.1", + "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.42.1.tgz", + "integrity": "sha512-mxz6zclokgrke9p1vtdy/COWBH+eOZgYUVVU34C73M+4j4HLlQJHtfcqiqqxpP0o8HhMkflvfbquLX5dg6wlfA==", "dev": true, "bin": { "playwright-core": "cli.js" @@ -4406,9 +4509,9 @@ "dev": true }, "node_modules/punycode": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz", - "integrity": "sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==", + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", "dev": true, "engines": { "node": ">=6" @@ -4604,14 +4707,14 @@ } }, "node_modules/regexp.prototype.flags": { - "version": "1.4.3", - "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz", - "integrity": "sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA==", + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.1.tgz", + "integrity": "sha512-sy6TXMN+hnP/wMy+ISxg3krXx7BAtWVO4UouuCN/ziM9UEne0euamVNafDfvC83bRNr95y0V5iijeDQFUNpvrg==", "dev": true, "dependencies": { "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "functions-have-names": "^1.2.2" + "define-properties": "^1.2.0", + "set-function-name": "^2.0.0" }, "engines": { "node": ">= 0.4" @@ -4790,6 +4893,24 @@ "queue-microtask": "^1.2.2" } }, + "node_modules/safe-array-concat": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.0.1.tgz", + "integrity": "sha512-6XbUAseYE2KtOuGueyeobCySj9L4+66Tn6KQMOPQJrAJEowYKW/YR/MGJZl7FdydUdaFu4LYyDZjxf4/Nmo23Q==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.2.1", + "has-symbols": "^1.0.3", + "isarray": "^2.0.5" + }, + "engines": { + "node": ">=0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", @@ -4878,6 +4999,35 @@ "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==", "dev": true }, + "node_modules/set-function-length": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.1.1.tgz", + "integrity": "sha512-VoaqjbBJKiWtg4yRcKBQ7g7wnGnLV3M8oLvVWwOk2PdYY6PEFegR1vezXR0tw6fZGF9csVakIRjrJiy2veSBFQ==", + "dev": true, + "dependencies": { + "define-data-property": "^1.1.1", + "get-intrinsic": "^1.2.1", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/set-function-name": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.1.tgz", + "integrity": "sha512-tMNCiqYVkXIZgc2Hnoy2IvC/f8ezc5koaRFkCjrpWzGpCd3qbZXPzVy9MAZzK1ch/X0jvSkojys3oqJN0qCmdA==", + "dev": true, + "dependencies": { + "define-data-property": "^1.0.1", + "functions-have-names": "^1.2.3", + "has-property-descriptors": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/shebang-command": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", @@ -5112,14 +5262,14 @@ } }, "node_modules/string.prototype.trim": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.7.tgz", - "integrity": "sha512-p6TmeT1T3411M8Cgg9wBTMRtY2q9+PNy9EV1i2lIXUN/btt763oIfxwN3RR8VU6wHX8j/1CFy0L+YuThm6bgOg==", + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.8.tgz", + "integrity": "sha512-lfjY4HcixfQXOfaqCvcBuOIapyaroTXhbkfJN3gcB1OtyupngWK4sEET9Knd0cXd28kTUqu/kHoV4HKSJdnjiQ==", "dev": true, "dependencies": { "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1" }, "engines": { "node": ">= 0.4" @@ -5129,28 +5279,28 @@ } }, "node_modules/string.prototype.trimend": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.6.tgz", - "integrity": "sha512-JySq+4mrPf9EsDBEDYMOb/lM7XQLulwg5R/m1r0PXEFqrV0qHvl58sdTilSXtKOflCsK2E8jxf+GKC0T07RWwQ==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.7.tgz", + "integrity": "sha512-Ni79DqeB72ZFq1uH/L6zJ+DKZTkOtPIHovb3YZHQViE+HDouuU4mBrLOLDn5Dde3RF8qw5qVETEjhu9locMLvA==", "dev": true, "dependencies": { "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/string.prototype.trimstart": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.6.tgz", - "integrity": "sha512-omqjMDaY92pbn5HOX7f9IccLA+U1tA9GvtU4JrodiXFfYB7jPzzHpRzpglLAjtUV6bB557zwClJezTqnAiYnQA==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.7.tgz", + "integrity": "sha512-NGhtDFu3jCEm7B4Fy0DpLewdJQOZcQ0rGbwQ/+stjnrp2i+rlKeCvos9hOIeCmqwratM47OBxY7uFZzjxHXmrg==", "dev": true, "dependencies": { "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -5286,9 +5436,9 @@ } }, "node_modules/tsconfig-paths": { - "version": "3.14.2", - "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.2.tgz", - "integrity": "sha512-o/9iXgCYc5L/JxCHPe3Hvh8Q/2xm5Z+p18PESBU6Ff33695QnCHBEjcytY2q19ua7Mbl/DavtBOLq+oG0RCL+g==", + "version": "3.15.0", + "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz", + "integrity": "sha512-2Ac2RgzDe/cn48GvOe3M+o82pEFewD3UPbyoUHHdKasHwJKjds4fLXWf/Ux5kATBKN20oaFGu+jbElp1pos0mg==", "dev": true, "dependencies": { "@types/json5": "^0.0.29", @@ -5335,6 +5485,57 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/typed-array-buffer": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.0.tgz", + "integrity": "sha512-Y8KTSIglk9OZEr8zywiIHG/kmQ7KWyjseXs1CbSo8vC42w7hg2HgYTxSWwP0+is7bWDc1H+Fo026CpHFwm8tkw==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.2.1", + "is-typed-array": "^1.1.10" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/typed-array-byte-length": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.0.tgz", + "integrity": "sha512-Or/+kvLxNpeQ9DtSydonMxCx+9ZXOswtwJn17SNLvhptaXYDJvkFFP5zbfU/uLmvnBJlI4yrnXRxpdWH/M5tNA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "for-each": "^0.3.3", + "has-proto": "^1.0.1", + "is-typed-array": "^1.1.10" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/typed-array-byte-offset": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.0.tgz", + "integrity": "sha512-RD97prjEt9EL8YgAgpOkf3O4IF9lhJFr9g0htQkm0rchFp/Vx7LW5Q8fSXXub7BXAODyUQohRMyOc3faCPd0hg==", + "dev": true, + "dependencies": { + "available-typed-arrays": "^1.0.5", + "call-bind": "^1.0.2", + "for-each": "^0.3.3", + "has-proto": "^1.0.1", + "is-typed-array": "^1.1.10" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/typed-array-length": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.4.tgz", @@ -5530,17 +5731,16 @@ } }, "node_modules/which-typed-array": { - "version": "1.1.9", - "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.9.tgz", - "integrity": "sha512-w9c4xkx6mPidwp7180ckYWfMmvxpjlZuIudNtDf4N/tTAUB8VJbX25qZoAsrtGuYNnGw3pa0AXgbGKRB8/EceA==", + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.13.tgz", + "integrity": "sha512-P5Nra0qjSncduVPEAr7xhoF5guty49ArDTwzJ/yNuPIbZppyRxFQsRCWrocxIY+CnMVG+qfbU2FmDKyvSGClow==", "dev": true, "dependencies": { "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.2", + "call-bind": "^1.0.4", "for-each": "^0.3.3", "gopd": "^1.0.1", - "has-tostringtag": "^1.0.0", - "is-typed-array": "^1.1.10" + "has-tostringtag": "^1.0.0" }, "engines": { "node": ">= 0.4" @@ -5799,9 +5999,9 @@ "dev": true }, "@eslint/eslintrc": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.2.tgz", - "integrity": "sha512-+wvgpDsrB1YqAMdEUCcnTlpfVBH7Vqn6A/NT3D8WVXFIaKMlErPIZT3oCIAVCOtarRpMtelZLqJeU3t7WY6X6g==", + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz", + "integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==", "dev": true, "requires": { "ajv": "^6.12.4", @@ -5816,24 +6016,24 @@ } }, "@eslint/js": { - "version": "8.51.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.51.0.tgz", - "integrity": "sha512-HxjQ8Qn+4SI3/AFv6sOrDB+g6PpUTDwSJiQqOrnneEk8L71161srI9gjzzZvYVbzHiVg/BvcH95+cK/zfIt4pg==", + "version": "8.57.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.0.tgz", + "integrity": "sha512-Ys+3g2TaW7gADOJzPt83SJtCDhMjndcDMFVQ/Tj9iA1BfJzFKD9mAUXT3OenpuPHbI6P/myECxRJrofUsDx/5g==", "dev": true }, "@faker-js/faker": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/@faker-js/faker/-/faker-8.2.0.tgz", - "integrity": "sha512-VacmzZqVxdWdf9y64lDOMZNDMM/FQdtM9IsaOPKOm2suYwEatb8VkdHqOzXcDnZbk7YDE2BmsJmy/2Hmkn563g==" + "version": "8.4.1", + "resolved": "https://registry.npmjs.org/@faker-js/faker/-/faker-8.4.1.tgz", + "integrity": "sha512-XQ3cU+Q8Uqmrbf2e0cIC/QN43sTBSC8KF12u29Mb47tWrt2hAgBXSgpZMj4Ao8Uk0iJcU99QsOCaIL8934obCg==" }, "@humanwhocodes/config-array": { - "version": "0.11.11", - "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.11.tgz", - "integrity": "sha512-N2brEuAadi0CcdeMXUkhbZB84eskAc8MEX1By6qEchoVywSgXPIjou4rYsl0V3Hj0ZnuGycGCjdNgockbzeWNA==", + "version": "0.11.14", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.14.tgz", + "integrity": "sha512-3T8LkOmg45BV5FICb15QQMsyUSWrQ8AygVfC7ZG32zOalnqrilm018ZVCw0eapXux8FtA33q8PSRSstjee3jSg==", "dev": true, "requires": { - "@humanwhocodes/object-schema": "^1.2.1", - "debug": "^4.1.1", + "@humanwhocodes/object-schema": "^2.0.2", + "debug": "^4.3.1", "minimatch": "^3.0.5" } }, @@ -5844,9 +6044,9 @@ "dev": true }, "@humanwhocodes/object-schema": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz", - "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.2.tgz", + "integrity": "sha512-6EwiSjwWYP7pTckG6I5eyFANjPhmPjUX9JRLUSfNPC7FX7zK9gyZAfUEaECL6ALTpGX5AjnBq3C9XmVWPitNpw==", "dev": true }, "@isaacs/cliui": { @@ -6033,12 +6233,12 @@ "optional": true }, "@playwright/test": { - "version": "1.39.0", - "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.39.0.tgz", - "integrity": "sha512-3u1iFqgzl7zr004bGPYiN/5EZpRUSFddQBra8Rqll5N0/vfpqlP9I9EXqAoGacuAbX6c9Ulg/Cjqglp5VkK6UQ==", + "version": "1.42.1", + "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.42.1.tgz", + "integrity": "sha512-Gq9rmS54mjBL/7/MvBaNOBwbfnh7beHvS6oS4srqXFcQHpQCV1+c8JXWE8VLPyRDhgS3H8x8A7hztqI9VnwrAQ==", "dev": true, "requires": { - "playwright": "1.39.0" + "playwright": "1.42.1" } }, "@pnpm/network.ca-file": { @@ -6154,6 +6354,18 @@ "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==", "dev": true }, + "@types/semver-utils": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@types/semver-utils/-/semver-utils-1.1.3.tgz", + "integrity": "sha512-T+YwkslhsM+CeuhYUxyAjWm7mJ5am/K10UX40RuA6k6Lc7eGtq8iY2xOzy7Vq0GOqhl/xZl5l2FwURZMTPTUww==", + "dev": true + }, + "@ungap/structured-clone": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz", + "integrity": "sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==", + "dev": true + }, "abbrev": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", @@ -6161,9 +6373,9 @@ "dev": true }, "acorn": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.10.0.tgz", - "integrity": "sha512-F0SAmZ8iUtS//m8DmCTA0jlh6TDKkHQyK6xc6V4KDTyZKA9dnvX9/3sRTVQrWm79glUAZbnmmNcdYwUIHWVybw==", + "version": "8.11.3", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.11.3.tgz", + "integrity": "sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg==", "dev": true }, "acorn-jsx": { @@ -6272,15 +6484,15 @@ } }, "array-includes": { - "version": "3.1.6", - "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.6.tgz", - "integrity": "sha512-sgTbLvL6cNnw24FnbaDyjmvddQ2ML8arZsgaJhoABMoplz/4QRhtrYS+alr1BUM1Bwp6dhx8vVCBSLG+StwOFw==", + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.7.tgz", + "integrity": "sha512-dlcsNBIiWhPkHdOEEKnehA+RNUWDc4UqFtnIXU4uuYDPtA4LDkr7qip2p0VvFAEXNDr0yWZ9PJyIRiGjRLQzwQ==", "dev": true, "requires": { "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", - "get-intrinsic": "^1.1.3", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", + "get-intrinsic": "^1.2.1", "is-string": "^1.0.7" } }, @@ -6291,42 +6503,57 @@ "dev": true }, "array.prototype.findlastindex": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.2.tgz", - "integrity": "sha512-tb5thFFlUcp7NdNF6/MpDk/1r/4awWG1FIz3YqDf+/zJSTezBb+/5WViH41obXULHVpDzoiCLpJ/ZO9YbJMsdw==", + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.3.tgz", + "integrity": "sha512-LzLoiOMAxvy+Gd3BAq3B7VeIgPdo+Q8hthvKtXybMvRV0jrXfJM/t8mw7nNlpEcVlVUnCnM2KSX4XU5HmpodOA==", "dev": true, "requires": { "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", "es-shim-unscopables": "^1.0.0", - "get-intrinsic": "^1.1.3" + "get-intrinsic": "^1.2.1" } }, "array.prototype.flat": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.1.tgz", - "integrity": "sha512-roTU0KWIOmJ4DRLmwKd19Otg0/mT3qPNt0Qb3GWW8iObuZXxrjB/pzn0R3hqpRSWg4HCwqx+0vwOnWnvlOyeIA==", + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.2.tgz", + "integrity": "sha512-djYB+Zx2vLewY8RWlNCUdHjDXs2XOgm602S9E7P/UpHgfeHL00cRiIF+IN/G/aUJ7kGPb6yO/ErDI5V2s8iycA==", "dev": true, "requires": { "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", "es-shim-unscopables": "^1.0.0" } }, "array.prototype.flatmap": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.1.tgz", - "integrity": "sha512-8UGn9O1FDVvMNB0UlLv4voxRMze7+FpHyF5mSMRjWHUMlpoDViniy05870VlxhfgTnLbpuwTzvD76MTtWxB/mQ==", + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.2.tgz", + "integrity": "sha512-Ewyx0c9PmpcsByhSW4r+9zDU7sGjFc86qf/kKtuSCRdhfbk0SNLLkaT5qvcHnRGgc5NP/ly/y+qkXkqONX54CQ==", "dev": true, "requires": { "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", "es-shim-unscopables": "^1.0.0" } }, + "arraybuffer.prototype.slice": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.2.tgz", + "integrity": "sha512-yMBKppFur/fbHu9/6USUe03bZ4knMYiwFBcyiaXB8Go0qNehwX6inYPzK9U0NeQvGxKthcmHcaR8P5MStSRBAw==", + "dev": true, + "requires": { + "array-buffer-byte-length": "^1.0.0", + "call-bind": "^1.0.2", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", + "get-intrinsic": "^1.2.1", + "is-array-buffer": "^3.0.2", + "is-shared-array-buffer": "^1.0.2" + } + }, "available-typed-arrays": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz", @@ -6426,6 +6653,12 @@ "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", "dev": true }, + "builtin-modules": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.3.0.tgz", + "integrity": "sha512-zhaCDicdLuWN5UbN5IMnFqNMhNfo919sH85y2/ea+5Yg9TsTkeZxpL+JLbp6cgYFS4sRLp3YV4S6yDuqVWHYOw==", + "dev": true + }, "builtins": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/builtins/-/builtins-5.0.1.tgz", @@ -6516,13 +6749,14 @@ } }, "call-bind": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", - "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.5.tgz", + "integrity": "sha512-C3nQxfFZxFRVoJoGKKI8y3MOEo129NQ+FgQ08iye+Mk4zNZZGdjfs06bVTr+DBSlA66Q2VEcMki/cUCP4SercQ==", "dev": true, "requires": { - "function-bind": "^1.1.1", - "get-intrinsic": "^1.0.2" + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.1", + "set-function-length": "^1.1.1" } }, "callsites": { @@ -6731,12 +6965,24 @@ "integrity": "sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==", "dev": true }, + "define-data-property": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.1.tgz", + "integrity": "sha512-E7uGkTzkk1d0ByLeSc6ZsFS79Axg+m1P/VsgYsxHgiuc3tFSj+MjMIwe90FC4lOAZzNBdY7kkO2P2wKdsQ1vgQ==", + "dev": true, + "requires": { + "get-intrinsic": "^1.2.1", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.0" + } + }, "define-properties": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.0.tgz", - "integrity": "sha512-xvqAVKGfT1+UAvPwKTVw/njhdQ8ZhXK4lI0bCIuCMrp2up9nPnaDftrLtmpTazqd1o+UY4zgzU+avtMbDP+ldA==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz", + "integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==", "dev": true, "requires": { + "define-data-property": "^1.0.1", "has-property-descriptors": "^1.0.0", "object-keys": "^1.1.1" } @@ -6815,25 +7061,26 @@ "dev": true }, "es-abstract": { - "version": "1.21.2", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.21.2.tgz", - "integrity": "sha512-y/B5POM2iBnIxCiernH1G7rC9qQoM77lLIMQLuob0zhp8C56Po81+2Nj0WFKnd0pNReDTnkYryc+zhOzpEIROg==", + "version": "1.22.3", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.22.3.tgz", + "integrity": "sha512-eiiY8HQeYfYH2Con2berK+To6GrK2RxbPawDkGq4UiCQQfZHb6wX9qQqkbpPqaxQFcl8d9QzZqo0tGE0VcrdwA==", "dev": true, "requires": { "array-buffer-byte-length": "^1.0.0", + "arraybuffer.prototype.slice": "^1.0.2", "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.2", + "call-bind": "^1.0.5", "es-set-tostringtag": "^2.0.1", "es-to-primitive": "^1.2.1", - "function.prototype.name": "^1.1.5", - "get-intrinsic": "^1.2.0", + "function.prototype.name": "^1.1.6", + "get-intrinsic": "^1.2.2", "get-symbol-description": "^1.0.0", "globalthis": "^1.0.3", "gopd": "^1.0.1", - "has": "^1.0.3", "has-property-descriptors": "^1.0.0", "has-proto": "^1.0.1", "has-symbols": "^1.0.3", + "hasown": "^2.0.0", "internal-slot": "^1.0.5", "is-array-buffer": "^3.0.2", "is-callable": "^1.2.7", @@ -6841,39 +7088,43 @@ "is-regex": "^1.1.4", "is-shared-array-buffer": "^1.0.2", "is-string": "^1.0.7", - "is-typed-array": "^1.1.10", + "is-typed-array": "^1.1.12", "is-weakref": "^1.0.2", - "object-inspect": "^1.12.3", + "object-inspect": "^1.13.1", "object-keys": "^1.1.1", "object.assign": "^4.1.4", - "regexp.prototype.flags": "^1.4.3", + "regexp.prototype.flags": "^1.5.1", + "safe-array-concat": "^1.0.1", "safe-regex-test": "^1.0.0", - "string.prototype.trim": "^1.2.7", - "string.prototype.trimend": "^1.0.6", - "string.prototype.trimstart": "^1.0.6", + "string.prototype.trim": "^1.2.8", + "string.prototype.trimend": "^1.0.7", + "string.prototype.trimstart": "^1.0.7", + "typed-array-buffer": "^1.0.0", + "typed-array-byte-length": "^1.0.0", + "typed-array-byte-offset": "^1.0.0", "typed-array-length": "^1.0.4", "unbox-primitive": "^1.0.2", - "which-typed-array": "^1.1.9" + "which-typed-array": "^1.1.13" } }, "es-set-tostringtag": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.1.tgz", - "integrity": "sha512-g3OMbtlwY3QewlqAiMLI47KywjWZoEytKr8pf6iTC8uJq5bIAH52Z9pnQ8pVL6whrCto53JZDuUIsifGeLorTg==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.2.tgz", + "integrity": "sha512-BuDyupZt65P9D2D2vA/zqcI3G5xRsklm5N3xCwuiy+/vKy8i0ifdsQP1sLgO4tZDSCaQUSnmC48khknGMV3D2Q==", "dev": true, "requires": { - "get-intrinsic": "^1.1.3", - "has": "^1.0.3", - "has-tostringtag": "^1.0.0" + "get-intrinsic": "^1.2.2", + "has-tostringtag": "^1.0.0", + "hasown": "^2.0.0" } }, "es-shim-unscopables": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz", - "integrity": "sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.2.tgz", + "integrity": "sha512-J3yBRXCzDu4ULnQwxyToo/OjdMx6akgVC7K6few0a7F/0wLtmKKN7I73AH5T2836UuXRqN7Qg+IIUw/+YJksRw==", "dev": true, "requires": { - "has": "^1.0.3" + "hasown": "^2.0.0" } }, "es-to-primitive": { @@ -6900,18 +7151,19 @@ "dev": true }, "eslint": { - "version": "8.51.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.51.0.tgz", - "integrity": "sha512-2WuxRZBrlwnXi+/vFSJyjMqrNjtJqiasMzehF0shoLaW7DzS3/9Yvrmq5JiT66+pNjiX4UBnLDiKHcWAr/OInA==", + "version": "8.57.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.0.tgz", + "integrity": "sha512-dZ6+mexnaTIbSBZWgou51U6OmzIhYM2VcNdtiTtI7qPNZm35Akpr0f6vtw3w1Kmn5PYo+tZVfh13WrhpS6oLqQ==", "dev": true, "requires": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.6.1", - "@eslint/eslintrc": "^2.1.2", - "@eslint/js": "8.51.0", - "@humanwhocodes/config-array": "^0.11.11", + "@eslint/eslintrc": "^2.1.4", + "@eslint/js": "8.57.0", + "@humanwhocodes/config-array": "^0.11.14", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", + "@ungap/structured-clone": "^1.2.0", "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.2", @@ -6944,6 +7196,13 @@ "text-table": "^0.2.0" } }, + "eslint-compat-utils": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/eslint-compat-utils/-/eslint-compat-utils-0.1.2.tgz", + "integrity": "sha512-Jia4JDldWnFNIru1Ehx1H5s9/yxiRHY/TimCuUc0jNexew3cF1gI6CYZil1ociakfWO3rRqFjl1mskBblB3RYg==", + "dev": true, + "requires": {} + }, "eslint-config-standard": { "version": "17.1.0", "resolved": "https://registry.npmjs.org/eslint-config-standard/-/eslint-config-standard-17.1.0.tgz", @@ -6952,14 +7211,14 @@ "requires": {} }, "eslint-import-resolver-node": { - "version": "0.3.7", - "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.7.tgz", - "integrity": "sha512-gozW2blMLJCeFpBwugLTGyvVjNoeo1knonXAcatC6bjPBZitotxdWf7Gimr25N4c0AAOo4eOUfaG82IJPDpqCA==", + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.9.tgz", + "integrity": "sha512-WFj2isz22JahUv+B788TlO3N6zL3nNJGU8CcZbPZvVEkBPaJdCV4vy5wyghty5ROFbCRnm132v8BScu5/1BQ8g==", "dev": true, "requires": { "debug": "^3.2.7", - "is-core-module": "^2.11.0", - "resolve": "^1.22.1" + "is-core-module": "^2.13.0", + "resolve": "^1.22.4" }, "dependencies": { "debug": { @@ -6994,38 +7253,39 @@ } }, "eslint-plugin-es-x": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-es-x/-/eslint-plugin-es-x-7.1.0.tgz", - "integrity": "sha512-AhiaF31syh4CCQ+C5ccJA0VG6+kJK8+5mXKKE7Qs1xcPRg02CDPOj3mWlQxuWS/AYtg7kxrDNgW9YW3vc0Q+Mw==", + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-es-x/-/eslint-plugin-es-x-7.5.0.tgz", + "integrity": "sha512-ODswlDSO0HJDzXU0XvgZ3lF3lS3XAZEossh15Q2UHjwrJggWeBoKqqEsLTZLXl+dh5eOAozG0zRcYtuE35oTuQ==", "dev": true, "requires": { "@eslint-community/eslint-utils": "^4.1.2", - "@eslint-community/regexpp": "^4.5.0" + "@eslint-community/regexpp": "^4.6.0", + "eslint-compat-utils": "^0.1.2" } }, "eslint-plugin-import": { - "version": "2.28.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.28.1.tgz", - "integrity": "sha512-9I9hFlITvOV55alzoKBI+K9q74kv0iKMeY6av5+umsNwayt59fz692daGyjR+oStBQgx6nwR9rXldDev3Clw+A==", + "version": "2.29.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.29.1.tgz", + "integrity": "sha512-BbPC0cuExzhiMo4Ff1BTVwHpjjv28C5R+btTOGaCRC7UEz801up0JadwkeSk5Ued6TG34uaczuVuH6qyy5YUxw==", "dev": true, "requires": { - "array-includes": "^3.1.6", - "array.prototype.findlastindex": "^1.2.2", - "array.prototype.flat": "^1.3.1", - "array.prototype.flatmap": "^1.3.1", + "array-includes": "^3.1.7", + "array.prototype.findlastindex": "^1.2.3", + "array.prototype.flat": "^1.3.2", + "array.prototype.flatmap": "^1.3.2", "debug": "^3.2.7", "doctrine": "^2.1.0", - "eslint-import-resolver-node": "^0.3.7", + "eslint-import-resolver-node": "^0.3.9", "eslint-module-utils": "^2.8.0", - "has": "^1.0.3", - "is-core-module": "^2.13.0", + "hasown": "^2.0.0", + "is-core-module": "^2.13.1", "is-glob": "^4.0.3", "minimatch": "^3.1.2", - "object.fromentries": "^2.0.6", - "object.groupby": "^1.0.0", - "object.values": "^1.1.6", + "object.fromentries": "^2.0.7", + "object.groupby": "^1.0.1", + "object.values": "^1.1.7", "semver": "^6.3.1", - "tsconfig-paths": "^3.14.2" + "tsconfig-paths": "^3.15.0" }, "dependencies": { "debug": { @@ -7055,16 +7315,18 @@ } }, "eslint-plugin-n": { - "version": "16.2.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-n/-/eslint-plugin-n-16.2.0.tgz", - "integrity": "sha512-AQER2jEyQOt1LG6JkGJCCIFotzmlcCZFur2wdKrp1JX2cNotC7Ae0BcD/4lLv3lUAArM9uNS8z/fsvXTd0L71g==", + "version": "16.6.2", + "resolved": "https://registry.npmjs.org/eslint-plugin-n/-/eslint-plugin-n-16.6.2.tgz", + "integrity": "sha512-6TyDmZ1HXoFQXnhCTUjVFULReoBPOAjpuiKELMkeP40yffI/1ZRO+d9ug/VC6fqISo2WkuIBk3cvuRPALaWlOQ==", "dev": true, "requires": { "@eslint-community/eslint-utils": "^4.4.0", "builtins": "^5.0.1", - "eslint-plugin-es-x": "^7.1.0", + "eslint-plugin-es-x": "^7.5.0", "get-tsconfig": "^4.7.0", + "globals": "^13.24.0", "ignore": "^5.2.4", + "is-builtin-module": "^3.2.1", "is-core-module": "^2.12.1", "minimatch": "^3.1.2", "resolve": "^1.22.2", @@ -7351,21 +7613,21 @@ "optional": true }, "function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", "dev": true }, "function.prototype.name": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.5.tgz", - "integrity": "sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==", + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.6.tgz", + "integrity": "sha512-Z5kx79swU5P27WEayXM1tBi5Ze/lbIyiNgU3qyXUOf9b2rgXYyF9Dy9Cx+IQv/Lc8WCG6L82zwUPpSS9hGehIg==", "dev": true, "requires": { "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "es-abstract": "^1.19.0", - "functions-have-names": "^1.2.2" + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", + "functions-have-names": "^1.2.3" } }, "functions-have-names": { @@ -7391,15 +7653,15 @@ } }, "get-intrinsic": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.1.tgz", - "integrity": "sha512-2DcsyfABl+gVHEfCOaTrWgyt+tb6MSEGmKq+kI5HwLbIYgjgmMcV8KQ41uaKz1xxUcn9tJtgFbQUEVcEbd0FYw==", + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.2.tgz", + "integrity": "sha512-0gSo4ml/0j98Y3lngkFEot/zhiCeWsbYIlZ+uZOVgzLyLaUw7wxUL+nCTP0XJvJg1AXulJRI3UJi8GsbDuxdGA==", "dev": true, "requires": { - "function-bind": "^1.1.1", - "has": "^1.0.3", + "function-bind": "^1.1.2", "has-proto": "^1.0.1", - "has-symbols": "^1.0.3" + "has-symbols": "^1.0.3", + "hasown": "^2.0.0" } }, "get-stdin": { @@ -7474,9 +7736,9 @@ } }, "globals": { - "version": "13.21.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.21.0.tgz", - "integrity": "sha512-ybyme3s4yy/t/3s35bewwXKOf7cvzfreG2lH0lZl0JB7I4GxRP2ghxOK/Nb9EkRXdbBXZLfq/p/0W2JUONB/Gg==", + "version": "13.24.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", + "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==", "dev": true, "requires": { "type-fest": "^0.20.2" @@ -7545,15 +7807,6 @@ "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", "dev": true }, - "has": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", - "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", - "dev": true, - "requires": { - "function-bind": "^1.1.1" - } - }, "has-bigints": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", @@ -7567,12 +7820,12 @@ "dev": true }, "has-property-descriptors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz", - "integrity": "sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.1.tgz", + "integrity": "sha512-VsX8eaIewvas0xnvinAe9bw4WfIeODpGYikiWYLH+dma0Jw6KHYqWiWfhQlgOVK8D6PvjubK5Uc4P0iIhIcNVg==", "dev": true, "requires": { - "get-intrinsic": "^1.1.1" + "get-intrinsic": "^1.2.2" } }, "has-proto": { @@ -7608,6 +7861,15 @@ "integrity": "sha512-IrsVwUHhEULx3R8f/aA8AHuEzAorplsab/v8HBzEiIukwq5i/EC+xmOW+HfP1OaDP+2JkgT1yILHN2O3UFIbcA==", "dev": true }, + "hasown": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.0.tgz", + "integrity": "sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA==", + "dev": true, + "requires": { + "function-bind": "^1.1.2" + } + }, "hosted-git-info": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-5.2.1.tgz", @@ -7767,13 +8029,13 @@ "dev": true }, "internal-slot": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.5.tgz", - "integrity": "sha512-Y+R5hJrzs52QCG2laLn4udYVnxsfny9CpOhNhUvk/SSSVyF6T27FzRbF0sroPidSu3X8oEAkOn2K804mjpt6UQ==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.6.tgz", + "integrity": "sha512-Xj6dv+PsbtwyPpEflsejS+oIZxmMlV44zAhG479uYu89MsjcYOhCFnNyKrkJrihbsiasQyY0afoCl/9BLR65bg==", "dev": true, "requires": { - "get-intrinsic": "^1.2.0", - "has": "^1.0.3", + "get-intrinsic": "^1.2.2", + "hasown": "^2.0.0", "side-channel": "^1.0.4" } }, @@ -7813,6 +8075,15 @@ "has-tostringtag": "^1.0.0" } }, + "is-builtin-module": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/is-builtin-module/-/is-builtin-module-3.2.1.tgz", + "integrity": "sha512-BSLE3HnV2syZ0FK0iMA/yUGplUeMmNz4AW5fnTunbCIqZi4vG3WjJT9FHMy5D69xmAYBHXQhJdALdpwVxV501A==", + "dev": true, + "requires": { + "builtin-modules": "^3.3.0" + } + }, "is-callable": { "version": "1.2.7", "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", @@ -7829,12 +8100,12 @@ } }, "is-core-module": { - "version": "2.13.0", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.13.0.tgz", - "integrity": "sha512-Z7dk6Qo8pOCp3l4tsX2C5ZVas4V+UxwQodwZhLopL91TX8UyyHEXafPcyoeeWuLrwzHcr3igO78wNLwHJHsMCQ==", + "version": "2.13.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.13.1.tgz", + "integrity": "sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw==", "dev": true, "requires": { - "has": "^1.0.3" + "hasown": "^2.0.0" } }, "is-date-object": { @@ -7960,16 +8231,12 @@ } }, "is-typed-array": { - "version": "1.1.10", - "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.10.tgz", - "integrity": "sha512-PJqgEHiWZvMpaFZ3uTc8kHPM4+4ADTlDniuQL7cU/UDA0Ql7F70yGfHph3cLNe+c9toaigv+DFzTJKhc2CtO6A==", + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.12.tgz", + "integrity": "sha512-Z14TF2JNG8Lss5/HMqt0//T9JeHXttXy5pH/DBU4vi98ozO2btxzq9MwYDZYnKwU8nRsz/+GVFVRDq3DkVuSPg==", "dev": true, "requires": { - "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.2", - "for-each": "^0.3.3", - "gopd": "^1.0.1", - "has-tostringtag": "^1.0.0" + "which-typed-array": "^1.1.11" } }, "is-typedarray": { @@ -7993,6 +8260,12 @@ "integrity": "sha512-/kppl+R+LO5VmhYSEWARUFjodS25D68gvj8W7z0I7OWhUla5xWu8KL6CtB2V0R6yqhnRgbcaREMr4EEM6htLPQ==", "dev": true }, + "isarray": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", + "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==", + "dev": true + }, "isexe": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", @@ -8153,9 +8426,9 @@ } }, "luxon": { - "version": "3.4.3", - "resolved": "https://registry.npmjs.org/luxon/-/luxon-3.4.3.tgz", - "integrity": "sha512-tFWBiv3h7z+T/tDaoxA8rqTxy1CHV6gHS//QdaH4pulbq/JuBSGgQspQQqcgnwdAx6pNI7cmvz5Sv/addzHmUg==" + "version": "3.4.4", + "resolved": "https://registry.npmjs.org/luxon/-/luxon-3.4.4.tgz", + "integrity": "sha512-zobTr7akeGHnv7eBOXcRgMeCP6+uyYsczwmeRCauvpvaAltgNyTbLH/+VaEAPUeWBT+1GuNmz4wC/6jtQzbbVA==" }, "make-fetch-happen": { "version": "11.1.1", @@ -8461,11 +8734,12 @@ } }, "npm-check-updates": { - "version": "16.14.6", - "resolved": "https://registry.npmjs.org/npm-check-updates/-/npm-check-updates-16.14.6.tgz", - "integrity": "sha512-sJ6w4AmSDP7YzBXah94Ul2JhiIbjBDfx9XYgib15um2wtiQkOyjE7Lov3MNUSQ84Ry7T81mE4ynMbl/mGbK4HQ==", + "version": "16.14.18", + "resolved": "https://registry.npmjs.org/npm-check-updates/-/npm-check-updates-16.14.18.tgz", + "integrity": "sha512-9iaRe9ohx9ykdbLjPRIYcq1A0RkrPYUx9HmQK1JIXhfxtJCNE/+497H9Z4PGH6GWRALbz5KF+1iZoySK2uSEpQ==", "dev": true, "requires": { + "@types/semver-utils": "^1.1.1", "chalk": "^5.3.0", "cli-table3": "^0.6.3", "commander": "^10.0.1", @@ -8662,9 +8936,9 @@ } }, "object-inspect": { - "version": "1.12.3", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.3.tgz", - "integrity": "sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g==", + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.1.tgz", + "integrity": "sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==", "dev": true }, "object-keys": { @@ -8686,37 +8960,37 @@ } }, "object.fromentries": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.6.tgz", - "integrity": "sha512-VciD13dswC4j1Xt5394WR4MzmAQmlgN72phd/riNp9vtD7tp4QQWJ0R4wvclXcafgcYK8veHRed2W6XeGBvcfg==", + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.7.tgz", + "integrity": "sha512-UPbPHML6sL8PI/mOqPwsH4G6iyXcCGzLin8KvEPenOZN5lpCNBZZQ+V62vdjB1mQHrmqGQt5/OJzemUA+KJmEA==", "dev": true, "requires": { "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1" } }, "object.groupby": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/object.groupby/-/object.groupby-1.0.0.tgz", - "integrity": "sha512-70MWG6NfRH9GnbZOikuhPPYzpUpof9iW2J9E4dW7FXTqPNb6rllE6u39SKwwiNh8lCwX3DDb5OgcKGiEBrTTyw==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/object.groupby/-/object.groupby-1.0.1.tgz", + "integrity": "sha512-HqaQtqLnp/8Bn4GL16cj+CUYbnpe1bh0TtEaWvybszDG4tgxCJuRpV8VGuvNaI1fAnI4lUJzDG55MXcOH4JZcQ==", "dev": true, "requires": { "call-bind": "^1.0.2", "define-properties": "^1.2.0", - "es-abstract": "^1.21.2", + "es-abstract": "^1.22.1", "get-intrinsic": "^1.2.1" } }, "object.values": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.6.tgz", - "integrity": "sha512-FVVTkD1vENCsAcwNs9k6jea2uHC/X0+JcjG8YA60FN5CMaJmG95wT9jek/xX9nornqGRrBkKtzuAu2wuHpKqvw==", + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.7.tgz", + "integrity": "sha512-aU6xnDFYT3x17e/f0IiiwlGPTy2jzMySGfUB4fq6z7CV8l85CWHDk5ErhyhpfDHhrOMwGFhSQkhMGHaIotA6Ng==", "dev": true, "requires": { "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1" } }, "once": { @@ -8889,19 +9163,19 @@ "dev": true }, "playwright": { - "version": "1.39.0", - "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.39.0.tgz", - "integrity": "sha512-naE5QT11uC/Oiq0BwZ50gDmy8c8WLPRTEWuSSFVG2egBka/1qMoSqYQcROMT9zLwJ86oPofcTH2jBY/5wWOgIw==", + "version": "1.42.1", + "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.42.1.tgz", + "integrity": "sha512-PgwB03s2DZBcNRoW+1w9E+VkLBxweib6KTXM0M3tkiT4jVxKSi6PmVJ591J+0u10LUrgxB7dLRbiJqO5s2QPMg==", "dev": true, "requires": { "fsevents": "2.3.2", - "playwright-core": "1.39.0" + "playwright-core": "1.42.1" } }, "playwright-core": { - "version": "1.39.0", - "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.39.0.tgz", - "integrity": "sha512-+k4pdZgs1qiM+OUkSjx96YiKsXsmb59evFoqv8SKO067qBA+Z2s/dCzJij/ZhdQcs2zlTAgRKfeiiLm8PQ2qvw==", + "version": "1.42.1", + "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.42.1.tgz", + "integrity": "sha512-mxz6zclokgrke9p1vtdy/COWBH+eOZgYUVVU34C73M+4j4HLlQJHtfcqiqqxpP0o8HhMkflvfbquLX5dg6wlfA==", "dev": true }, "prelude-ls": { @@ -8955,9 +9229,9 @@ "dev": true }, "punycode": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz", - "integrity": "sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==", + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", "dev": true }, "pupa": { @@ -9094,14 +9368,14 @@ } }, "regexp.prototype.flags": { - "version": "1.4.3", - "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz", - "integrity": "sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA==", + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.1.tgz", + "integrity": "sha512-sy6TXMN+hnP/wMy+ISxg3krXx7BAtWVO4UouuCN/ziM9UEne0euamVNafDfvC83bRNr95y0V5iijeDQFUNpvrg==", "dev": true, "requires": { "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "functions-have-names": "^1.2.2" + "define-properties": "^1.2.0", + "set-function-name": "^2.0.0" } }, "regexpp": { @@ -9208,6 +9482,18 @@ "queue-microtask": "^1.2.2" } }, + "safe-array-concat": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.0.1.tgz", + "integrity": "sha512-6XbUAseYE2KtOuGueyeobCySj9L4+66Tn6KQMOPQJrAJEowYKW/YR/MGJZl7FdydUdaFu4LYyDZjxf4/Nmo23Q==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.2.1", + "has-symbols": "^1.0.3", + "isarray": "^2.0.5" + } + }, "safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", @@ -9267,6 +9553,29 @@ "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==", "dev": true }, + "set-function-length": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.1.1.tgz", + "integrity": "sha512-VoaqjbBJKiWtg4yRcKBQ7g7wnGnLV3M8oLvVWwOk2PdYY6PEFegR1vezXR0tw6fZGF9csVakIRjrJiy2veSBFQ==", + "dev": true, + "requires": { + "define-data-property": "^1.1.1", + "get-intrinsic": "^1.2.1", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.0" + } + }, + "set-function-name": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.1.tgz", + "integrity": "sha512-tMNCiqYVkXIZgc2Hnoy2IvC/f8ezc5koaRFkCjrpWzGpCd3qbZXPzVy9MAZzK1ch/X0jvSkojys3oqJN0qCmdA==", + "dev": true, + "requires": { + "define-data-property": "^1.0.1", + "functions-have-names": "^1.2.3", + "has-property-descriptors": "^1.0.0" + } + }, "shebang-command": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", @@ -9453,36 +9762,36 @@ } }, "string.prototype.trim": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.7.tgz", - "integrity": "sha512-p6TmeT1T3411M8Cgg9wBTMRtY2q9+PNy9EV1i2lIXUN/btt763oIfxwN3RR8VU6wHX8j/1CFy0L+YuThm6bgOg==", + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.8.tgz", + "integrity": "sha512-lfjY4HcixfQXOfaqCvcBuOIapyaroTXhbkfJN3gcB1OtyupngWK4sEET9Knd0cXd28kTUqu/kHoV4HKSJdnjiQ==", "dev": true, "requires": { "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1" } }, "string.prototype.trimend": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.6.tgz", - "integrity": "sha512-JySq+4mrPf9EsDBEDYMOb/lM7XQLulwg5R/m1r0PXEFqrV0qHvl58sdTilSXtKOflCsK2E8jxf+GKC0T07RWwQ==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.7.tgz", + "integrity": "sha512-Ni79DqeB72ZFq1uH/L6zJ+DKZTkOtPIHovb3YZHQViE+HDouuU4mBrLOLDn5Dde3RF8qw5qVETEjhu9locMLvA==", "dev": true, "requires": { "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1" } }, "string.prototype.trimstart": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.6.tgz", - "integrity": "sha512-omqjMDaY92pbn5HOX7f9IccLA+U1tA9GvtU4JrodiXFfYB7jPzzHpRzpglLAjtUV6bB557zwClJezTqnAiYnQA==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.7.tgz", + "integrity": "sha512-NGhtDFu3jCEm7B4Fy0DpLewdJQOZcQ0rGbwQ/+stjnrp2i+rlKeCvos9hOIeCmqwratM47OBxY7uFZzjxHXmrg==", "dev": true, "requires": { "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1" } }, "strip-ansi": { @@ -9582,9 +9891,9 @@ } }, "tsconfig-paths": { - "version": "3.14.2", - "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.2.tgz", - "integrity": "sha512-o/9iXgCYc5L/JxCHPe3Hvh8Q/2xm5Z+p18PESBU6Ff33695QnCHBEjcytY2q19ua7Mbl/DavtBOLq+oG0RCL+g==", + "version": "3.15.0", + "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz", + "integrity": "sha512-2Ac2RgzDe/cn48GvOe3M+o82pEFewD3UPbyoUHHdKasHwJKjds4fLXWf/Ux5kATBKN20oaFGu+jbElp1pos0mg==", "dev": true, "requires": { "@types/json5": "^0.0.29", @@ -9619,6 +9928,42 @@ "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", "dev": true }, + "typed-array-buffer": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.0.tgz", + "integrity": "sha512-Y8KTSIglk9OZEr8zywiIHG/kmQ7KWyjseXs1CbSo8vC42w7hg2HgYTxSWwP0+is7bWDc1H+Fo026CpHFwm8tkw==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.2.1", + "is-typed-array": "^1.1.10" + } + }, + "typed-array-byte-length": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.0.tgz", + "integrity": "sha512-Or/+kvLxNpeQ9DtSydonMxCx+9ZXOswtwJn17SNLvhptaXYDJvkFFP5zbfU/uLmvnBJlI4yrnXRxpdWH/M5tNA==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "for-each": "^0.3.3", + "has-proto": "^1.0.1", + "is-typed-array": "^1.1.10" + } + }, + "typed-array-byte-offset": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.0.tgz", + "integrity": "sha512-RD97prjEt9EL8YgAgpOkf3O4IF9lhJFr9g0htQkm0rchFp/Vx7LW5Q8fSXXub7BXAODyUQohRMyOc3faCPd0hg==", + "dev": true, + "requires": { + "available-typed-arrays": "^1.0.5", + "call-bind": "^1.0.2", + "for-each": "^0.3.3", + "has-proto": "^1.0.1", + "is-typed-array": "^1.1.10" + } + }, "typed-array-length": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.4.tgz", @@ -9771,17 +10116,16 @@ } }, "which-typed-array": { - "version": "1.1.9", - "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.9.tgz", - "integrity": "sha512-w9c4xkx6mPidwp7180ckYWfMmvxpjlZuIudNtDf4N/tTAUB8VJbX25qZoAsrtGuYNnGw3pa0AXgbGKRB8/EceA==", + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.13.tgz", + "integrity": "sha512-P5Nra0qjSncduVPEAr7xhoF5guty49ArDTwzJ/yNuPIbZppyRxFQsRCWrocxIY+CnMVG+qfbU2FmDKyvSGClow==", "dev": true, "requires": { "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.2", + "call-bind": "^1.0.4", "for-each": "^0.3.3", "gopd": "^1.0.1", - "has-tostringtag": "^1.0.0", - "is-typed-array": "^1.1.10" + "has-tostringtag": "^1.0.0" } }, "wide-align": { diff --git a/playwright/package.json b/playwright/package.json index 4bea3fff42..5e3ba94782 100644 --- a/playwright/package.json +++ b/playwright/package.json @@ -3,24 +3,25 @@ "install-deps": "playwright install --with-deps", "test": "playwright test", "test:legacy": "playwright test -c playwright-legacy.config.js", + "test:legacy:visual": "playwright test -c playwright-legacy.config.js --headed --workers=1", "test:visual": "playwright test --headed --workers=1", "test:debug": "playwright test --debug" }, "devDependencies": { - "@playwright/test": "1.39.0", - "eslint": "8.51.0", + "@playwright/test": "1.42.1", + "eslint": "8.57.0", "eslint-config-standard": "17.1.0", - "eslint-plugin-import": "2.28.1", - "eslint-plugin-n": "16.2.0", + "eslint-plugin-import": "2.29.1", + "eslint-plugin-n": "16.6.2", "eslint-plugin-node": "11.1.0", "eslint-plugin-promise": "6.1.1", - "npm-check-updates": "16.14.6" + "npm-check-updates": "16.14.18" }, "dependencies": { - "@faker-js/faker": "8.2.0", + "@faker-js/faker": "8.4.1", "lodash": "4.17.21", "lodash-es": "4.17.21", - "luxon": "3.4.3", + "luxon": "3.4.4", "ms": "2.1.3", "seedrandom": "3.0.5", "slugify": "1.6.6" diff --git a/playwright/tests-legacy/docs/ad.spec.js b/playwright/tests-legacy/docs/ad.spec.js new file mode 100644 index 0000000000..80b8b27cda --- /dev/null +++ b/playwright/tests-legacy/docs/ad.spec.js @@ -0,0 +1,26 @@ +const { test, expect } = require('@playwright/test') +const viewports = require('../../helpers/viewports') + +// ==================================================================== +// IESG Dashboard +// ==================================================================== + +test.describe('/doc/ad/', () => { + test.beforeEach(async ({ page }) => { + await page.setViewportSize({ + width: viewports.desktop[0], + height: viewports.desktop[1] + }) + + await page.goto('/doc/ad/') + }) + + test('Pre pubreq', async ({ page }) => { + const tablesLocator = page.locator('table') + const tablesCount = await tablesLocator.count() + expect(tablesCount).toBeGreaterThan(0) + const firstTable = tablesLocator.nth(0) + const theadTexts = await firstTable.locator('thead').allInnerTexts() + expect(theadTexts.join('')).toContain('Pre pubreq') + }) +}) diff --git a/playwright/tests-legacy/secr/announcement.spec.js b/playwright/tests-legacy/secr/announcement.spec.js new file mode 100644 index 0000000000..4dbbc25a81 --- /dev/null +++ b/playwright/tests-legacy/secr/announcement.spec.js @@ -0,0 +1,77 @@ +const { test, expect } = require('@playwright/test') +const viewports = require('../../helpers/viewports') +const { setTimeout } = require('timers/promises') + +// ==================================================================== +// ANNOUNCEMENT | DESKTOP viewport +// ==================================================================== + +test.describe('desktop', () => { + + test.beforeAll(async ({ browser }) => { + const context = await browser.newContext(); + const page = await context.newPage(); + + await page.goto('/accounts/login/'); + + await page.fill('input#id_username', 'glen'); + await page.fill('input#id_password', 'password'); + + await page.click('button[type="submit"]'); + await page.waitForURL('/accounts/profile/'); + + await context.storageState({ path: 'auth.json' }); + + await context.close(); + }); + + test.beforeEach(async ({ browser }) => { + // Reuse the authentication state in each test + const context = await browser.newContext({ storageState: 'auth.json' }); + const page = await context.newPage(); + await page.setViewportSize({ + width: viewports.desktop[0], + height: viewports.desktop[1] + }) + await page.goto(`/secr/announcement/`); + await page.locator('h1:text("Announcement")').waitFor({ state: 'visible' }) + await setTimeout(500) + // Attach the page to the test context + test.info().page = page; + }) + + test('show to custom', async () => { + const page = test.info().page; + + // to_custom should initially be hidden + const element = page.locator('#id_to_custom'); + await expect(element).toBeHidden(); + await page.selectOption('select#id_to', 'Other...'); + await expect(element).toBeVisible(); + }) + + test('back button', async () => { + const page = test.info().page; + + const element = page.locator('#id_to_custom'); + await page.selectOption('select#id_to', 'Other...'); + await expect(element).toBeVisible(); + await page.fill('input#id_to_custom', 'custom@example.com'); + await page.selectOption('select#id_frm', 'IETF Chair '); + await page.fill('input#id_reply_to', 'greg@example.com'); + await page.fill('input#id_subject', 'About Stuff'); + await page.fill('textarea#id_body', 'This is the stuff'); + + await page.click('text="Continue"'); + const h2Locator = page.locator('h2:text("Confirm Announcement")'); + await h2Locator.waitFor({ state: 'visible' }); + + // click back button and check to_custom + await page.click('text="Back"'); + const subjectLocator = page.locator('input#id_subject'); + await subjectLocator.waitFor({ state: 'visible' }); + await expect(element).toBeVisible(); + await expect(element).toHaveValue('custom@example.com'); + }) + +}) \ No newline at end of file diff --git a/playwright/tests/meeting/agenda.spec.js b/playwright/tests/meeting/agenda.spec.js index a68fdeff2a..2248027a38 100644 --- a/playwright/tests/meeting/agenda.spec.js +++ b/playwright/tests/meeting/agenda.spec.js @@ -238,7 +238,7 @@ test.describe('past - desktop', () => { // Name column // ----------- // Event icon - if (['break', 'plenary'].includes(event.type) || (event.type === 'other' && ['office hours', 'hackathon'].some(s => event.name.toLowerCase().indexOf(s) >= 0))) { + if (['break', 'plenary'].includes(event.type) || (event.type === 'other' && event.name.toLowerCase().indexOf('office hours') >= 0)) { await expect(row.locator('.agenda-table-cell-name > i.bi')).toBeVisible() } // Name link @@ -275,7 +275,7 @@ test.describe('past - desktop', () => { const eventButtons = row.locator('.agenda-table-cell-links > .agenda-table-cell-links-buttons') if (event.flags.agenda) { // Show meeting materials button - await expect(eventButtons.locator('i.bi.bi-collection')).toBeVisible() + await expect(eventButtons.locator(`#btn-btn-${event.id}-mat`)).toBeVisible() // ZIP materials button await expect(eventButtons.locator(`#btn-lnk-${event.id}-tar`)).toHaveAttribute('href', `/meeting/${meetingData.meeting.number}/agenda/${event.acronym}-drafts.tgz`) await expect(eventButtons.locator(`#btn-lnk-${event.id}-tar > i.bi`)).toBeVisible() @@ -286,10 +286,17 @@ test.describe('past - desktop', () => { // No meeting materials yet warning badge await expect(eventButtons.locator('.no-meeting-materials')).toBeVisible() } - // Notepad button - const hedgeDocLink = `https://notes.ietf.org/notes-ietf-${meetingData.meeting.number}-${event.type === 'plenary' ? 'plenary' : event.acronym}` - await expect(eventButtons.locator(`#btn-lnk-${event.id}-note`)).toHaveAttribute('href', hedgeDocLink) - await expect(eventButtons.locator(`#btn-lnk-${event.id}-note > i.bi`)).toBeVisible() + if (event.groupAcronym === 'hackathon') { + // Hackathon Wiki button + const hackathonWikiLink = `https://wiki.ietf.org/meeting/${meetingData.meeting.number}/hackathon` + await expect(eventButtons.locator(`#btn-lnk-${event.id}-wiki`)).toHaveAttribute('href', hackathonWikiLink) + await expect(eventButtons.locator(`#btn-lnk-${event.id}-wiki > i.bi`)).toBeVisible() + } else { + // Notepad button + const hedgeDocLink = `https://notes.ietf.org/notes-ietf-${meetingData.meeting.number}-${event.type === 'plenary' ? 'plenary' : event.acronym}` + await expect(eventButtons.locator(`#btn-lnk-${event.id}-note`)).toHaveAttribute('href', hedgeDocLink) + await expect(eventButtons.locator(`#btn-lnk-${event.id}-note > i.bi`)).toBeVisible() + } // Chat logs await expect(eventButtons.locator(`#btn-lnk-${event.id}-logs`)).toHaveAttribute('href', event.links.chatArchive) await expect(eventButtons.locator(`#btn-lnk-${event.id}-logs > i.bi`)).toBeVisible() @@ -418,7 +425,7 @@ test.describe('past - desktop', () => { }) }) // Open dialog - await page.locator(`#agenda-rowid-${event.id} #btn-lnk-${event.id}-mat`).click() + await page.locator(`#agenda-rowid-${event.id} #btn-btn-${event.id}-mat`).click() await expect(page.locator('.agenda-eventdetails')).toBeVisible() // Header await expect(page.locator('.agenda-eventdetails .n-card-header__main > .detail-header > .bi')).toBeVisible() @@ -500,7 +507,7 @@ test.describe('past - desktop', () => { }) }) // Open dialog - await page.locator(`#btn-lnk-${event.id}-mat`).click() + await page.locator(`#btn-btn-${event.id}-mat`).click() await expect(page.locator('.agenda-eventdetails')).toBeVisible() // Slides Tab await page.locator('.agenda-eventdetails .detail-nav > a').nth(1).click() @@ -1151,7 +1158,7 @@ test.describe('future - desktop', () => { if (event.flags.showAgenda || (['regular', 'plenary', 'other'].includes(event.type) && !['admin', 'closed_meeting', 'officehours', 'social'].includes(event.purpose))) { if (event.flags.agenda) { // Show meeting materials button - await expect(eventButtons.locator('i.bi.bi-collection')).toBeVisible() + await expect(eventButtons.locator(`#btn-btn-${event.id}-mat`)).toBeVisible() // ZIP materials button await expect(eventButtons.locator(`#btn-lnk-${event.id}-tar`)).toHaveAttribute('href', `/meeting/${meetingData.meeting.number}/agenda/${event.acronym}-drafts.tgz`) await expect(eventButtons.locator(`#btn-lnk-${event.id}-tar > i.bi`)).toBeVisible() @@ -1162,10 +1169,17 @@ test.describe('future - desktop', () => { // No meeting materials yet warning badge await expect(eventButtons.locator('.no-meeting-materials')).toBeVisible() } - // Notepad button - const hedgeDocLink = `https://notes.ietf.org/notes-ietf-${meetingData.meeting.number}-${event.type === 'plenary' ? 'plenary' : event.acronym}` - await expect(eventButtons.locator(`#btn-lnk-${event.id}-note`)).toHaveAttribute('href', hedgeDocLink) - await expect(eventButtons.locator(`#btn-lnk-${event.id}-note > i.bi`)).toBeVisible() + if (event.groupAcronym === 'hackathon') { + // Hackathon Wiki button + const hackathonWikiLink = `https://wiki.ietf.org/meeting/${meetingData.meeting.number}/hackathon` + await expect(eventButtons.locator(`#btn-lnk-${event.id}-wiki`)).toHaveAttribute('href', hackathonWikiLink) + await expect(eventButtons.locator(`#btn-lnk-${event.id}-wiki > i.bi`)).toBeVisible() + } else { + // Notepad button + const hedgeDocLink = `https://notes.ietf.org/notes-ietf-${meetingData.meeting.number}-${event.type === 'plenary' ? 'plenary' : event.acronym}` + await expect(eventButtons.locator(`#btn-lnk-${event.id}-note`)).toHaveAttribute('href', hedgeDocLink) + await expect(eventButtons.locator(`#btn-lnk-${event.id}-note > i.bi`)).toBeVisible() + } // Chat room await expect(eventButtons.locator(`#btn-lnk-${event.id}-room`)).toHaveAttribute('href', event.links.chat) await expect(eventButtons.locator(`#btn-lnk-${event.id}-room > i.bi`)).toBeVisible() @@ -1199,13 +1213,18 @@ test.describe('future - desktop', () => { await expect(eventButtons.locator(`#btn-lnk-${event.id}-remotecallin`)).toHaveAttribute('href', remoteCallInUrl) await expect(eventButtons.locator(`#btn-lnk-${event.id}-remotecallin > i.bi`)).toBeVisible() } - // calendar + // Calendar if (event.links.calendar) { await expect(eventButtons.locator(`#btn-lnk-${event.id}-calendar`)).toHaveAttribute('href', event.links.calendar) await expect(eventButtons.locator(`#btn-lnk-${event.id}-calendar > i.bi`)).toBeVisible() } } else { - await expect(eventButtons).toHaveCount(0) + if (event.links.calendar) { + await expect(eventButtons.locator(`#btn-lnk-${event.id}-calendar`)).toHaveAttribute('href', event.links.calendar) + await expect(eventButtons.locator(`#btn-lnk-${event.id}-calendar > i.bi`)).toBeVisible() + } else { + await expect(eventButtons).toHaveCount(0) + } } } } @@ -1264,22 +1283,7 @@ test.describe('live - desktop', () => { }) // Override Date in page to fixed time - await page.addInitScript(`{ - // Extend Date constructor to default to fixed time - Date = class extends Date { - constructor(...args) { - if (args.length === 0) { - super(${currentTime.toMillis()}); - } else { - super(...args); - } - } - } - // Override Date.now() to start from fixed time - const __DateNowOffset = ${currentTime.toMillis()} - Date.now(); - const __DateNow = Date.now; - Date.now = () => __DateNow() + __DateNowOffset; - }`) + await commonHelper.overridePageDateTime(page, currentTime) // Visit agenda page and await Meeting Data API call to complete await Promise.all([ @@ -1334,6 +1338,89 @@ test.describe('live - desktop', () => { }) }) +// ==================================================================== +// AGENDA (live meeting) | DESKTOP viewport | Plenary Extended Time Buttons +// ==================================================================== + +test.describe('live - desktop - plenary extended time buttons', () => { + let meetingData + let plenarySessionId + + test.beforeAll(async () => { + // Generate meeting data + meetingData = meetingHelper.generateAgendaResponse({ dateMode: 'current' }) + plenarySessionId = meetingData.schedule.find(s => s.type === 'plenary').id + }) + + test.beforeEach(async ({ page }) => { + // Intercept Meeting Data API + await page.route(`**/api/meeting/${meetingData.meeting.number}/agenda-data`, route => { + route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify(meetingData) + }) + }) + + await page.setViewportSize({ + width: viewports.desktop[0], + height: viewports.desktop[1] + }) + }) + + // -> BUTTONS PRESENT AFTER EVENT, SAME DAY + + test('same day - after event', async ({ page }) => { + // Override Date in page to fixed time + const currentTime = DateTime.fromISO('2022-02-01T13:45:15', { zone: 'Asia/Tokyo' }).plus({ days: 1 }).set({ hour: 20, minute: 30 }) + await commonHelper.overridePageDateTime(page, currentTime) + + // Visit agenda page and await Meeting Data API call to complete + await Promise.all([ + page.waitForResponse(`**/api/meeting/${meetingData.meeting.number}/agenda-data`), + page.goto(`/meeting/${meetingData.meeting.number}/agenda`) + ]) + + // Wait for page to be ready + await page.locator('.agenda h1').waitFor({ state: 'visible' }) + await setTimeout(500) + + // Check for plenary event + await expect(page.locator('.agenda .agenda-table-display-event.agenda-table-type-plenary')).toBeVisible() + await page.locator('.agenda .agenda-table-display-event.agenda-table-type-plenary').scrollIntoViewIfNeeded() + + // Check for full video client + on-site tool + await expect(page.locator(`.agenda .agenda-table-display-event.agenda-table-type-plenary .agenda-table-cell-links-buttons a#btn-lnk-${plenarySessionId}-video`)).toBeVisible() + await expect(page.locator(`.agenda .agenda-table-display-event.agenda-table-type-plenary .agenda-table-cell-links-buttons a#btn-lnk-${plenarySessionId}-onsitetool`)).toBeVisible() + }) + + // -> BUTTONS NO LONGER PRESENT AFTER EVENT, NEXT DAY + + test('next day - after event', async ({ page }) => { + // Override Date in page to fixed time + const currentTime = DateTime.fromISO('2022-02-01T13:45:15', { zone: 'Asia/Tokyo' }).plus({ days: 2 }).set({ hour: 2, minute: 30 }) + await commonHelper.overridePageDateTime(page, currentTime) + + // Visit agenda page and await Meeting Data API call to complete + await Promise.all([ + page.waitForResponse(`**/api/meeting/${meetingData.meeting.number}/agenda-data`), + page.goto(`/meeting/${meetingData.meeting.number}/agenda`) + ]) + + // Wait for page to be ready + await page.locator('.agenda h1').waitFor({ state: 'visible' }) + await setTimeout(500) + + // Check for plenary event + await expect(page.locator('.agenda .agenda-table-display-event.agenda-table-type-plenary')).toBeVisible() + await page.locator('.agenda .agenda-table-display-event.agenda-table-type-plenary').scrollIntoViewIfNeeded() + + // Check for full video client + on-site tool + await expect(page.locator(`.agenda .agenda-table-display-event.agenda-table-type-plenary .agenda-table-cell-links-buttons a#btn-lnk-${plenarySessionId}-video`)).not.toBeVisible() + await expect(page.locator(`.agenda .agenda-table-display-event.agenda-table-type-plenary .agenda-table-cell-links-buttons a#btn-lnk-${plenarySessionId}-onsitetool`)).not.toBeVisible() + }) +}) + // ==================================================================== // AGENDA (past meeting) | SMALL DESKTOP/TABLET/MOBILE viewports // ==================================================================== @@ -1427,27 +1514,34 @@ test.describe('past - small screens', () => { // has a bottom mobile bar await expect(page.locator('.agenda-mobile-bar')).toBeVisible() - await expect(barBtnLocator).toHaveCount(4) - await expect(barBtnLocator.first()).toContainText('Filters') - await expect(barBtnLocator.nth(1)).toContainText('Cal') - await expect(barBtnLocator.nth(2)).toContainText('.ics') - await expect(barBtnLocator.last().locator('> *')).toHaveCount(1) - await expect(barBtnLocator.last().locator('> *')).toHaveClass(/bi/) + await expect(barBtnLocator).toHaveCount(5) - // can open the filters overlay + // can open the jump to day dropdown await barBtnLocator.first().click() + const jumpDayDdnLocator = page.locator('.n-dropdown-menu [data-testid=mobile-link]') + await expect(jumpDayDdnLocator).toHaveCount(7) + for (let idx = 0; idx < 7; idx++) { + const localDateTime = DateTime.fromISO(meetingData.meeting.startDate, { zone: meetingData.meeting.timezone }) + .setLocale(BROWSER_LOCALE) + .plus({ days: idx }) + .toFormat('ccc LLL d') + await expect(jumpDayDdnLocator.nth(idx)).toContainText(`Jump to ${localDateTime}`) + } + + // can open the filters overlay + await barBtnLocator.nth(1).click() await expect(page.locator('.agenda-personalize')).toBeVisible() await page.locator('.agenda-personalize .agenda-personalize-actions > button').nth(1).click() await expect(page.locator('.agenda-personalize')).toBeHidden() // can open the calendar view - await barBtnLocator.nth(1).click() + await barBtnLocator.nth(2).click() await expect(page.locator('.agenda-calendar')).toBeVisible() await page.locator('.agenda-calendar .agenda-calendar-actions > button').nth(1).click() await expect(page.locator('.agenda-calendar')).toBeHidden() // can open the ics dropdown - await barBtnLocator.nth(2).click() + await barBtnLocator.nth(3).click() const calDdnLocator = page.locator('.n-dropdown-menu > .n-dropdown-option') await expect(calDdnLocator).toHaveCount(2) await expect(calDdnLocator.first()).toContainText('Subscribe') diff --git a/playwright/tests/status/status.spec.js b/playwright/tests/status/status.spec.js new file mode 100644 index 0000000000..c70617e2fd --- /dev/null +++ b/playwright/tests/status/status.spec.js @@ -0,0 +1,66 @@ +const { + test, + expect +} = require('@playwright/test') +const { STATUS_STORAGE_KEY, generateStatusTestId } = require('../../../client/shared/status-common.js') + +test.describe('site status', () => { + const noStatus = { + hasMessage: false + } + + const status1 = { + hasMessage: true, + id: 1, + slug: '2024-7-9fdfdf-sdfsdf', + title: 'My status title', + body: 'My status body', + url: '/status/2024-7-9fdfdf-sdfsdf', + date: '2024-07-09T07:05:13+00:00', + by: 'Exile is a cool Amiga game' + } + + test.beforeEach(({ page, browserName }) => { + page.setDefaultTimeout(15 * 1000) // increase default timeout + test.skip(browserName === 'firefox', 'bypassing flaky tests on Firefox') + }) + + test('Renders server status as Notification', async ({ page }) => { + await page.route('/status/latest.json', route => { + route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify(status1) + }) + }) + await page.goto('/') + await expect(page.getByTestId(generateStatusTestId(status1.id)), 'should have status').toHaveCount(1) + }) + + test("Doesn't render dismissed server statuses", async ({ page }) => { + await page.route('/status/latest.json', route => { + route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify(status1) + }) + }) + await page.goto('/') + await page.evaluate(({ key, value }) => localStorage.setItem(key, value), { key: STATUS_STORAGE_KEY, value: JSON.stringify([status1.id]) }) + await expect(page.getByTestId(generateStatusTestId(status1.id)), 'should have status').toHaveCount(0) + }) + + test('Handles no server status', async ({ page }) => { + await page.route('/status/latest.json', route => { + route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify(noStatus) + }) + }) + + await page.goto('/') + + await expect(page.getByTestId(generateStatusTestId(status1.id)), 'should have status').toHaveCount(0) + }) +}) diff --git a/pyzmail/__init__.py b/pyzmail/__init__.py deleted file mode 100644 index f6c8854abc..0000000000 --- a/pyzmail/__init__.py +++ /dev/null @@ -1,21 +0,0 @@ -# -# pyzmail/__init__.py -# (c) Alain Spineux -# http://www.magiksys.net/pyzmail -# Released under LGPL - -from . import utils -from .generate import compose_mail, send_mail, send_mail2 -from .parse import email_address_re, PyzMessage, PzMessage, decode_text -from .parse import message_from_string, message_from_file -from .parse import message_from_bytes, message_from_binary_file # python >= 3.2 -from .version import __version__ - -# to help epydoc to display functions available from top of the package -__all__= [ 'compose_mail', 'send_mail', 'send_mail2', 'email_address_re', \ - 'PyzMessage', 'PzMessage', 'decode_text', '__version__', - 'utils', 'generate', 'parse', 'version', - 'message_from_string','message_from_file', - 'message_from_binary_file', 'message_from_bytes', # python >= 3.2 - ] - diff --git a/pyzmail/generate.py b/pyzmail/generate.py deleted file mode 100644 index 2ad1f62246..0000000000 --- a/pyzmail/generate.py +++ /dev/null @@ -1,529 +0,0 @@ -# -# pyzmail/generate.py -# (c) Alain Spineux -# http://www.magiksys.net/pyzmail -# Released under LGPL - -""" -Useful functions to compose and send emails. - -For short: - ->>> payload, mail_from, rcpt_to, msg_id=compose_mail((u'Me', 'me@foo.com'), -... [(u'Him', 'him@bar.com')], u'the subject', 'iso-8859-1', ('Hello world', 'us-ascii'), -... attachments=[('attached', 'text', 'plain', 'text.txt', 'us-ascii')]) -... #doctest: +SKIP ->>> error=send_mail(payload, mail_from, rcpt_to, 'localhost', smtp_port=25) -... #doctest: +SKIP -""" - -import os, sys -import time -import base64 -import smtplib, socket -import email -import email.encoders -import email.header -import email.utils -import email.mime -import email.mime.base -import email.mime.text -import email.mime.image -import email.mime.multipart - -from . import utils - -def format_addresses(addresses, header_name=None, charset=None): - """ - Convert a list of addresses into a MIME-compliant header for a From, To, Cc, - or any other I{address} related field. - This mixes the use of email.utils.formataddr() and email.header.Header(). - - @type addresses: list - @param addresses: list of addresses, can be a mix of string a tuple of the form - C{[ 'address@domain', (u'Name', 'name@domain'), ...]}. - If C{u'Name'} contains non us-ascii characters, it must be a - unicode string or encoded using the I{charset} argument. - @type header_name: string or None - @keyword header_name: the name of the header. Its length is used to limit - the length of the first line of the header according the RFC's - requirements. (not very important, but it's better to match the - requirements when possible) - @type charset: str - @keyword charset: the encoding charset for non unicode I{name} and a B{hint} - for encoding of unicode string. In other words, - if the I{name} of an address in a byte string containing non - I{us-ascii} characters, then C{name.decode(charset)} - must generate the expected result. If a unicode string - is used instead, charset will be tried to encode the - string, if it fail, I{utf-8} will be used. - With B{Python 3.x} I{charset} is no more a hint and an exception will - be raised instead of using I{utf-8} has a fall back. - @rtype: str - @return: the encoded list of formated addresses separated by commas, - ready to use as I{Header} value. - - >>> print format_addresses([('John', 'john@foo.com') ], 'From', 'us-ascii').encode() - John - >>> print format_addresses([(u'l\\xe9o', 'leo@foo.com') ], 'To', 'iso-8859-1').encode() - =?iso-8859-1?q?l=E9o?= - >>> print format_addresses([(u'l\\xe9o', 'leo@foo.com') ], 'To', 'us-ascii').encode() - ... # don't work in 3.X because charset is more than a hint - ... #doctest: +SKIP - =?utf-8?q?l=C3=A9o?= - >>> # because u'l\xe9o' cannot be encoded into us-ascii, utf8 is used instead - >>> print format_addresses([('No\\xe9', 'noe@f.com'), (u'M\\u0101ori', 'maori@b.com') ], 'Cc', 'iso-8859-1').encode() - ... # don't work in 3.X because charset is more than a hint - ... #doctest: +SKIP - =?iso-8859-1?q?No=E9?= , =?utf-8?b?TcSBb3Jp?= - >>> # 'No\xe9' is already encoded into iso-8859-1, but u'M\\u0101ori' cannot be encoded into iso-8859-1 - >>> # then utf8 is used here - >>> print format_addresses(['a@bar.com', ('John', 'john@foo.com') ], 'From', 'us-ascii').encode() - a@bar.com , John - """ - header=email.header.Header(charset=charset, header_name=header_name) - for i, address in enumerate(addresses): - if i!=0: - # add separator between addresses - header.append(',', charset='us-ascii') - - try: - name, addr=address - except ValueError: - # address is not a tuple, their is no name, only email address - header.append(address, charset='us-ascii') - else: - # check if address name is a unicode or byte string in "pure" us-ascii - if utils.is_usascii(name): - # name is a us-ascii byte string, i can use formataddr - formated_addr=email.utils.formataddr((name, addr)) - # us-ascii must be used and not default 'charset' - header.append(formated_addr, charset='us-ascii') - else: - # this is not as "pure" us-ascii string - # Header will use "RFC2047" to encode the address name - # if name is byte string, charset will be used to decode it first - header.append(name) - # here us-ascii must be used and not default 'charset' - header.append('<%s>' % (addr,), charset='us-ascii') - - return header - - -def build_mail(text, html=None, attachments=None, embeddeds=None): - """ - Generate the core of the email message regarding the parameters. - The structure of the MIME email may vary, but the general one is as follow:: - - multipart/mixed (only if attachments are included) - | - +-- multipart/related (only if embedded contents are included) - | | - | +-- multipart/alternative (only if text AND html are available) - | | | - | | +-- text/plain (text version of the message) - | | +-- text/html (html version of the message) - | | - | +-- image/gif (where to include embedded contents) - | - +-- application/msword (where to add attachments) - - @param text: the text version of the message, under the form of a tuple: - C{(encoded_content, encoding)} where I{encoded_content} is a byte string - encoded using I{encoding}. - I{text} can be None if the message has no text version. - @type text: tuple or None - @keyword html: the HTML version of the message, under the form of a tuple: - C{(encoded_content, encoding)} where I{encoded_content} is a byte string - encoded using I{encoding} - I{html} can be None if the message has no HTML version. - @type html: tuple or None - @keyword attachments: the list of attachments to include into the mail, in the - form [(data, maintype, subtype, filename, charset), ..] where : - - I{data} : is the raw data, or a I{charset} encoded string for 'text' - content. - - I{maintype} : is a MIME main type like : 'text', 'image', 'application' .... - - I{subtype} : is a MIME sub type of the above I{maintype} for example : - 'plain', 'png', 'msword' for respectively 'text/plain', 'image/png', - 'application/msword'. - - I{filename} this is the filename of the attachment, it must be a - 'us-ascii' string or a tuple of the form - C{(encoding, language, encoded_filename)} - following the RFC2231 requirement, for example - C{('iso-8859-1', 'fr', u'r\\xe9pertoir.png'.encode('iso-8859-1'))} - - I{charset} : if I{maintype} is 'text', then I{data} must be encoded - using this I{charset}. It can be None for non 'text' content. - @type attachments: list - @keyword embeddeds: is a list of documents embedded inside the HTML or text - version of the message. It is similar to the I{attachments} list, - but I{filename} is replaced by I{content_id} that is related to - the B{cid} reference into the HTML or text version of the message. - @type embeddeds: list - @rtype: inherit from email.Message - @return: the message in a MIME object - - >>> mail=build_mail(('Hello world', 'us-ascii'), attachments=[('attached', 'text', 'plain', 'text.txt', 'us-ascii')]) - >>> mail.set_boundary('===limit1==') - >>> print mail.as_string(unixfrom=False) - Content-Type: multipart/mixed; boundary="===limit1==" - MIME-Version: 1.0 - - --===limit1== - Content-Type: text/plain; charset="us-ascii" - MIME-Version: 1.0 - Content-Transfer-Encoding: 7bit - - Hello world - --===limit1== - Content-Type: text/plain; charset="us-ascii" - MIME-Version: 1.0 - Content-Transfer-Encoding: 7bit - Content-Disposition: attachment; filename="text.txt" - - attached - --===limit1==-- - """ - - if attachments is None: - attachments = [] - if embeddeds is None: - embeddeds = [] - - main=text_part=html_part=None - if text: - content, charset=text - main=text_part=email.mime.text.MIMEText(content, 'plain', charset) - - if html: - content, charset=html - main=html_part=email.mime.text.MIMEText(content, 'html', charset) - - if not text_part and not html_part: - main=text_part=email.mime.text.MIMEText('', 'plain', 'us-ascii') - elif text_part and html_part: - # need to create a multipart/alternative to include text and html version - main=email.mime.multipart.MIMEMultipart('alternative', None, [text_part, html_part]) - - if embeddeds: - related=email.mime.multipart.MIMEMultipart('related') - related.attach(main) - for part in embeddeds: - if not isinstance(part, email.mime.base.MIMEBase): - data, maintype, subtype, content_id, charset=part - if (maintype=='text'): - part=email.mime.text.MIMEText(data, subtype, charset) - else: - part=email.mime.base.MIMEBase(maintype, subtype) - part.set_payload(data) - email.encoders.encode_base64(part) - part.add_header('Content-ID', '<'+content_id+'>') - part.add_header('Content-Disposition', 'inline') - related.attach(part) - main=related - - if attachments: - mixed=email.mime.multipart.MIMEMultipart('mixed') - mixed.attach(main) - for part in attachments: - if not isinstance(part, email.mime.base.MIMEBase): - data, maintype, subtype, filename, charset=part - if (maintype=='text'): - part=email.mime.text.MIMEText(data, subtype, charset) - else: - part=email.mime.base.MIMEBase(maintype, subtype) - part.set_payload(data) - email.encoders.encode_base64(part) - part.add_header('Content-Disposition', 'attachment', filename=filename) - mixed.attach(part) - main=mixed - - return main - -def complete_mail(message, sender, recipients, subject, default_charset, cc=None, bcc=None, message_id_string=None, date=None, headers=None): - """ - Fill in the From, To, Cc, Subject, Date and Message-Id I{headers} of - one existing message regarding the parameters. - - @type message:email.Message - @param message: the message to fill in - @type sender: tuple - @param sender: a tuple of the form (u'Sender Name', 'sender.address@domain.com') - @type recipients: list - @param recipients: a list of addresses. Address can be tuple or string like - expected by L{format_addresses()}, for example: C{[ 'address@dmain.com', - (u'Recipient Name', 'recipient.address@domain.com'), ... ]} - @type subject: str - @param subject: The subject of the message, can be a unicode string or a - string encoded using I{default_charset} encoding. Prefert unicode to - byte string here. - @type default_charset: str - @param default_charset: The default charset for this email. Arguments - that are non unicode string are supposed to be encoded using this charset. - This I{charset} will be used has an hint when encoding mail content. - @type cc: list - @keyword cc: The I{carbone copy} addresses. Same format as the I{recipients} - argument. - @type bcc: list - @keyword bcc: The I{blind carbone copy} addresses. Same format as the I{recipients} - argument. - @type message_id_string: str or None - @keyword message_id_string: if None, don't append any I{Message-ID} to the - mail, let the SMTP do the job, else use the string to generate a unique - I{ID} using C{email.utils.make_msgid()}. The generated value is - returned as last argument. For example use the name of your application. - @type date: int or None - @keyword date: utc time in second from the epoch or None. If None then - use curent time C{time.time()} instead. - @type headers: list of tuple - @keyword headers: a list of C{(field, value)} tuples to fill in the mail - header fields. Values are encoded using I{default_charset}. - @rtype: tuple - @return: B{(payload, mail_from, rcpt_to, msg_id)} - - I{payload} (str) is the content of the email, generated from the message - - I{mail_from} (str) is the address of the sender to pass to the SMTP host - - I{rcpt_to} (list) is a list of the recipients addresses to pass to the SMTP host - of the form C{[ 'a@b.com', c@d.com', ]}. This combine all recipients, - I{carbone copy} addresses and I{blind carbone copy} addresses. - - I{msg_id} (None or str) None if message_id_string==None else the generated value for - the message-id. If not None, this I{Message-ID} is already written - into the payload. - - >>> import email.mime.text - >>> msg=email.mime.text.MIMEText('The text.', 'plain', 'us-ascii') - >>> # I could use build_mail() instead - >>> payload, mail_from, rcpt_to, msg_id=complete_mail(msg, ('Me', 'me@foo.com'), - ... [ ('Him', 'him@bar.com'), ], 'Non unicode subject', 'iso-8859-1', - ... cc=['her@bar.com',], date=1313558269, headers=[('User-Agent', u'pyzmail'), ]) - >>> print payload - ... # 3.X encode User-Agent: using 'iso-8859-1' even if it contains only us-asccii - ... # doctest: +ELLIPSIS - Content-Type: text/plain; charset="us-ascii" - MIME-Version: 1.0 - Content-Transfer-Encoding: 7bit - From: Me - To: Him - Cc: her@bar.com - Subject: =?iso-8859-1?q?Non_unicode_subject?= - Date: ... - User-Agent: ...pyzmail... - - The text. - >>> print 'mail_from=%r rcpt_to=%r' % (mail_from, rcpt_to) - mail_from='me@foo.com' rcpt_to=['him@bar.com', 'her@bar.com'] - """ - def getaddr(address): - if isinstance(address, tuple): - return address[1] - else: - return address - - if cc is None: - cc=[] - if bcc is None: - bcc=[] - if headers is None: - headers=[] - - mail_from=getaddr(sender[1]) - rcpt_to=list(map(getaddr, recipients)) - rcpt_to.extend(list(map(getaddr, cc))) - rcpt_to.extend(list(map(getaddr, bcc))) - - message['From'] = format_addresses([ sender, ], header_name='from', charset=default_charset) - if recipients: - message['To'] = format_addresses(recipients, header_name='to', charset=default_charset) - if cc: - message['Cc'] = format_addresses(cc, header_name='cc', charset=default_charset) - message['Subject'] = email.header.Header(subject, default_charset) - if date: - utc_from_epoch=date - else: - utc_from_epoch=time.time() - message['Date'] = email.utils.formatdate(utc_from_epoch, localtime=True) - - if message_id_string: - msg_id=message['Message-Id']=email.utils.make_msgid(message_id_string) - else: - msg_id=None - - for field, value in headers: - message[field]=email.header.Header(value, default_charset) - - payload=message.as_string() - - return payload, mail_from, rcpt_to, msg_id - -def compose_mail(sender, recipients, subject, default_charset, text, html=None, attachments=None, embeddeds=None, cc=None, bcc=None, message_id_string=None, date=None, headers=None): - """ - Compose an email regarding the arguments. Call L{build_mail()} and - L{complete_mail()} at once. - - Read the B{parameters} descriptions of both functions L{build_mail()} and L{complete_mail()}. - - Returned value is the same as for L{build_mail()} and L{complete_mail()}. - You can pass the returned values to L{send_mail()} or L{send_mail2()}. - - @rtype: tuple - @return: B{(payload, mail_from, rcpt_to, msg_id)} - - >>> payload, mail_from, rcpt_to, msg_id=compose_mail((u'Me', 'me@foo.com'), [(u'Him', 'him@bar.com')], u'the subject', 'iso-8859-1', ('Hello world', 'us-ascii'), attachments=[('attached', 'text', 'plain', 'text.txt', 'us-ascii')]) - """ - if attachments is None: - attachments=[] - if embeddeds is None: - embeddeds=[] - if cc is None: - cc=[] - if bcc is None: - bcc = [] - if headers is None: - headers=[] - - message=build_mail(text, html, attachments, embeddeds) - return complete_mail(message, sender, recipients, subject, default_charset, cc, bcc, message_id_string, date, headers) - - -def send_mail2(payload, mail_from, rcpt_to, smtp_host, smtp_port=25, smtp_mode='normal', smtp_login=None, smtp_password=None): - """ - Send the message to a SMTP host. Look at the L{send_mail()} documentation. - L{send_mail()} call this function and catch all exceptions to convert them - into a user friendly error message. The returned value - is always a dictionary. It can be empty if all recipients have been - accepted. - - @rtype: dict - @return: This function return the value returnd by C{smtplib.SMTP.sendmail()} - or raise the same exceptions. - - This method will return normally if the mail is accepted for at least one - recipient. Otherwise it will raise an exception. That is, if this - method does not raise an exception, then someone should get your mail. - If this method does not raise an exception, it returns a dictionary, - with one entry for each recipient that was refused. Each entry contains a - tuple of the SMTP error code and the accompanying error message sent by the server. - - @raise smtplib.SMTPException: Look at the standard C{smtplib.SMTP.sendmail()} documentation. - - """ - if smtp_mode=='ssl': - smtp=smtplib.SMTP_SSL(smtp_host, smtp_port) - else: - smtp=smtplib.SMTP(smtp_host, smtp_port) - if smtp_mode=='tls': - smtp.starttls() - - if smtp_login and smtp_password: - if sys.version_info<(3, 0): - # python 2.x - # login and password must be encoded - # because HMAC used in CRAM_MD5 require non unicode string - smtp.login(smtp_login.encode('utf-8'), smtp_password.encode('utf-8')) - else: - #python 3.x - smtp.login(smtp_login, smtp_password) - try: - ret=smtp.sendmail(mail_from, rcpt_to, payload) - finally: - try: - smtp.quit() - except Exception as e: - pass - - return ret - -def send_mail(payload, mail_from, rcpt_to, smtp_host, smtp_port=25, smtp_mode='normal', smtp_login=None, smtp_password=None): - """ - Send the message to a SMTP host. Handle SSL, TLS and authentication. - I{payload}, I{mail_from} and I{rcpt_to} can come from values returned by - L{complete_mail()}. This function call L{send_mail2()} but catch all - exceptions and return friendly error message instead. - - @type payload: str - @param payload: the mail content. - @type mail_from: str - @param mail_from: the sender address, for example: C{'me@domain.com'}. - @type rcpt_to: list - @param rcpt_to: The list of the recipient addresses in the form - C{[ 'a@b.com', c@d.com', ]}. No names here, only email addresses. - @type smtp_host: str - @param smtp_host: the IP address or the name of the SMTP host. - @type smtp_port: int - @keyword smtp_port: the port to connect to on the SMTP host. Default is C{25}. - @type smtp_mode: str - @keyword smtp_mode: the way to connect to the SMTP host, can be: - C{'normal'}, C{'ssl'} or C{'tls'}. default is C{'normal'} - @type smtp_login: str or None - @keyword smtp_login: If authentication is required, this is the login. - Be carefull to I{UTF8} encode your login if it contains - non I{us-ascii} characters. - @type smtp_password: str or None - @keyword smtp_password: If authentication is required, this is the password. - Be carefull to I{UTF8} encode your password if it - contains non I{us-ascii} characters. - - @rtype: dict or str - @return: This function return a dictionary of failed recipients - or a string with an error message. - - If all recipients have been accepted the dictionary is empty. If the - returned value is a string, none of the recipients will get the message. - - The dictionary is exactly of the same sort as - smtplib.SMTP.sendmail() returns with one entry for each recipient that - was refused. Each entry contains a tuple of the SMTP error code and - the accompanying error message sent by the server. - - Example: - - >>> send_mail('Subject: hello\\n\\nmessage', 'a@foo.com', [ 'b@bar.com', ], 'localhost') #doctest: +SKIP - {} - - Here is how to use the returned value:: - if isinstance(ret, dict): - if ret: - print 'failed' recipients: - for recipient, (code, msg) in ret.iteritems(): - print 'code=%d recipient=%s\terror=%s' % (code, recipient, msg) - else: - print 'success' - else: - print 'Error:', ret - - To use your GMail account to send your mail:: - smtp_host='smtp.gmail.com' - smtp_port=587 - smtp_mode='tls' - smtp_login='your.gmail.addresse@gmail.com' - smtp_password='your.gmail.password' - - Use your GMail address for the sender ! - - """ - - error=dict() - try: - ret=send_mail2(payload, mail_from, rcpt_to, smtp_host, smtp_port, smtp_mode, smtp_login, smtp_password) - except (socket.error, ) as e: - error='server %s:%s not responding: %s' % (smtp_host, smtp_port, e) - except smtplib.SMTPAuthenticationError as e: - error='authentication error: %s' % (e, ) - except smtplib.SMTPRecipientsRefused as e: - # code, error=e.recipients[recipient_addr] - error='all recipients refused: '+', '.join(list(e.recipients.keys())) - except smtplib.SMTPSenderRefused as e: - # e.sender, e.smtp_code, e.smtp_error - error='sender refused: %s' % (e.sender, ) - except smtplib.SMTPDataError as e: - error='SMTP protocol mismatch: %s' % (e, ) - except smtplib.SMTPHeloError as e: - error="server didn't reply properly to the HELO greeting: %s" % (e, ) - except smtplib.SMTPException as e: - error='SMTP error: %s' % (e, ) -# except Exception, e: -# raise # unknown error - else: - # failed addresses and error messages - error=ret - - return error - diff --git a/pyzmail/parse.py b/pyzmail/parse.py deleted file mode 100644 index 9f86c37177..0000000000 --- a/pyzmail/parse.py +++ /dev/null @@ -1,817 +0,0 @@ -# -# pyzmail/parse.py -# (c) Alain Spineux -# http://www.magiksys.net/pyzmail -# Released under LGPL - -""" -Useful functions to parse emails - -@var email_address_re: a regex that match well formed email address (from perlfaq9) -@undocumented: atom_rfc2822 -@undocumented: atom_posfix_restricted -@undocumented: atom -@undocumented: dot_atom -@undocumented: local -@undocumented: domain_lit -@undocumented: domain -@undocumented: addr_spec -""" - -import re -import io -import email -import email.errors -import email.header -import email.message -import mimetypes - -from .utils import * - -# email address REGEX matching the RFC 2822 spec from perlfaq9 -# my $atom = qr{[a-zA-Z0-9_!#\$\%&'*+/=?\^`{}~|\-]+}; -# my $dot_atom = qr{$atom(?:\.$atom)*}; -# my $quoted = qr{"(?:\\[^\r\n]|[^\\"])*"}; -# my $local = qr{(?:$dot_atom|$quoted)}; -# my $domain_lit = qr{\[(?:\\\S|[\x21-\x5a\x5e-\x7e])*\]}; -# my $domain = qr{(?:$dot_atom|$domain_lit)}; -# my $addr_spec = qr{$local\@$domain}; -# -# Python's translation -atom_rfc2822=r"[a-zA-Z0-9_!#\$\%&'*+/=?\^`{}~|\-]+" -atom_posfix_restricted=r"[a-zA-Z0-9_#\$&'*+/=?\^`{}~|\-]+" # without '!' and '%' -atom=atom_rfc2822 -dot_atom=atom + r"(?:\." + atom + ")*" -quoted=r'"(?:\\[^\r\n]|[^\\"])*"' -local="(?:" + dot_atom + "|" + quoted + ")" -domain_lit=r"\[(?:\\\S|[\x21-\x5a\x5e-\x7e])*\]" -domain="(?:" + dot_atom + "|" + domain_lit + ")" -addr_spec=local + "@" + domain -# and the result -email_address_re=re.compile('^'+addr_spec+'$') - -class MailPart: - """ - Data related to a mail part (aka message content, attachment or - embedded content in an email) - - @type charset: str or None - @ivar charset: the encoding of the I{get_payload()} content if I{type} is 'text/*' - and charset has been specified in the message - @type content_id: str or None - @ivar content_id: the MIME Content-ID if specified in the message. - @type description: str or None - @ivar description: the MIME Content-Description if specified in the message. - @type disposition: str or None - @ivar disposition: C{None}, C{'inline'} or C{'attachment'} depending - the MIME Content-Disposition value - @type filename: unicode or None - @ivar filename: the name of the file, if specified in the message. - @type part: inherit from email.mime.base.MIMEBase - @ivar part: the related part inside the message. - @type is_body: str or None - @ivar is_body: None if this part is not the mail content itself (an - attachment or embedded content), C{'text/plain'} if this part is the - text content or C{'text/html'} if this part is the HTML version. - @type sanitized_filename: str or None - @ivar sanitized_filename: This field is filled by L{PyzMessage} to store - a valid unique filename related or not with the original filename. - @type type: str - @ivar type: the MIME type, like 'text/plain', 'image/png', 'application/msword' ... - """ - - def __init__(self, part, filename=None, type=None, charset=None, content_id=None, description=None, disposition=None, sanitized_filename=None, is_body=None): - """ - Create an mail part and initialize all attributes - """ - self.part=part # original python part - self.filename=filename # filename in unicode (if any) - self.type=type # the mime-type - self.charset=charset # the charset (if any) - self.description=description # if any - self.disposition=disposition # 'inline', 'attachment' or None - self.sanitized_filename=sanitized_filename # cleanup your filename here (TODO) - self.is_body=is_body # usually in (None, 'text/plain' or 'text/html') - self.content_id=content_id # if any - if self.content_id: - # strip '<>' to ease search and replace in "root" content (TODO) - if self.content_id.startswith('<') and self.content_id.endswith('>'): - self.content_id=self.content_id[1:-1] - - def get_payload(self): - """ - decode and return part payload. if I{type} is 'text/*' and I{charset} - not C{None}, be careful to take care of the text encoding. Use - something like C{part.get_payload().decode(part.charset)} - """ - - payload=None - if self.type.startswith('message/'): - # I don't use msg.as_string() because I want to use mangle_from_=False - if sys.version_info<(3, 0): - # python 2.x - from email.generator import Generator - fp = io.StringIO() - g = Generator(fp, mangle_from_=False) - g.flatten(self.part, unixfrom=False) - payload=fp.getvalue() - else: - # support only for python >= 3.2 - from email.generator import BytesGenerator - import io - fp = io.BytesIO() - g = BytesGenerator(fp, mangle_from_=False) - g.flatten(self.part, unixfrom=False) - payload=fp.getvalue() - - else: - payload=self.part.get_payload(decode=True) - return payload - - def __repr__(self): - st='MailPart<' - if self.is_body: - st+='*' - st+=self.type - if self.charset: - st+=' charset='+self.charset - if self.filename: - st+=' filename='+self.filename - if self.content_id: - st+=' content_id='+self.content_id - st+=' len=%d' % (len(self.get_payload()), ) - st+='>' - return st - - - -_line_end_re=re.compile('\r\n|\n\r|\n|\r') - -def _friendly_header(header): - """ - Convert header returned by C{email.message.Message.get()} into a - user friendly string. - - Py3k C{email.message.Message.get()} return C{header.Header()} with charset - set to C{charset.UNKNOWN8BIT} when the header contains invalid characters, - else it return I{str} as Python 2.X does - - @type header: str or email.header.Header - @param header: the header to convert into a user friendly string - - @rtype: str - @returns: the converter header - """ - - save=header - if isinstance(header, email.header.Header): - header=str(header) - - return re.sub(_line_end_re, ' ', header) - -def decode_mail_header(value, default_charset='us-ascii'): - """ - Decode a header value into a unicode string. - Works like a more smarter python - C{u"".join(email.header.decode_header()} function - - @type value: str - @param value: the value of the header. - @type default_charset: str - @keyword default_charset: if one charset used in the header (multiple charset - can be mixed) is unknown, then use this charset instead. - - >>> decode_mail_header('=?iso-8859-1?q?Courrier_=E8lectronique_en_Fran=E7ais?=') - u'Courrier \\xe8lectronique en Fran\\xe7ais' - """ - -# value=_friendly_header(value) - try: - headers=email.header.decode_header(value) - except email.errors.HeaderParseError: - # this can append in email.base64mime.decode(), for example for this value: - # '=?UTF-8?B?15HXmdeh15jXqNeVINeY15DXpteUINeTJ9eV16jXlSDXkdeg15XXldeUINem15PXpywg15TXptei16bXldei15nXnSDXqdecINek15zXmdeZ?==?UTF-8?B?157XldeR15nXnCwg157Xldek16Ig157Xl9eV15wg15HXodeV15bXnyDXk9ec15DXnCDXldeh15gg157Xl9eR16rXldeqINep15wg15HXmdeQ?==?UTF-8?B?15zXmNeZ?=' - # then return a sanitized ascii string - # TODO: some improvements are possible here, but a failure here is - # unlikely - return value.encode('us-ascii', 'replace').decode('us-ascii') - else: - for i, (text, charset) in enumerate(headers): - # python 3.x - # email.header.decode_header('a') -> [('a', None)] - # email.header.decode_header('a =?ISO-8859-1?Q?foo?= b') - # --> [(b'a', None), (b'foo', 'iso-8859-1'), (b'b', None)] - # in Py3 text is sometime str and sometime byte :-( - # python 2.x - # email.header.decode_header('a') -> [('a', None)] - # email.header.decode_header('a =?ISO-8859-1?Q?foo?= b') - # --> [('a', None), ('foo', 'iso-8859-1'), ('b', None)] - if (charset is None and sys.version_info>=(3, 0)): - # Py3 - if isinstance(text, str): - # convert Py3 string into bytes string to be sure their is no - # non us-ascii chars and because next line expect byte string - text=text.encode('us-ascii', 'replace') - try: - headers[i]=text.decode(charset or 'us-ascii', 'replace') - except LookupError: - # if the charset is unknown, force default - headers[i]=text.decode(default_charset, 'replace') - - return "".join(headers) - -def get_mail_addresses(message, header_name): - """ - retrieve all email addresses from one message header - - @type message: email.message.Message - @param message: the email message - @type header_name: str - @param header_name: the name of the header, can be 'from', 'to', 'cc' or - any other header containing one or more email addresses - @rtype: list - @returns: a list of the addresses in the form of tuples - C{[(u'Name', 'addresse@domain.com'), ...]} - - >>> import email - >>> import email.mime.text - >>> msg=email.mime.text.MIMEText('The text.', 'plain', 'us-ascii') - >>> msg['From']=email.email.utils.formataddr(('Me', 'me@foo.com')) - >>> msg['To']=email.email.utils.formataddr(('A', 'a@foo.com'))+', '+email.email.utils.formataddr(('B', 'b@foo.com')) - >>> print msg.as_string(unixfrom=False) - Content-Type: text/plain; charset="us-ascii" - MIME-Version: 1.0 - Content-Transfer-Encoding: 7bit - From: Me - To: A , B - - The text. - >>> get_mail_addresses(msg, 'from') - [(u'Me', 'me@foo.com')] - >>> get_mail_addresses(msg, 'to') - [(u'A', 'a@foo.com'), (u'B', 'b@foo.com')] - """ - addrs=email.utils.getaddresses([ _friendly_header(h) for h in message.get_all(header_name, [])]) - for i, (addr_name, addr) in enumerate(addrs): - if not addr_name and addr: - # only one string! Is it the address or the address name ? - # use the same for both and see later - addr_name=addr - - if is_usascii(addr): - # address must be ascii only and must match address regex - if not email_address_re.match(addr): - addr='' - else: - addr='' - addrs[i]=(decode_mail_header(addr_name), addr) - return addrs - -def get_filename(part): - """ - Find the filename of a mail part. Many MUA send attachments with the - filename in the I{name} parameter of the I{Content-type} header instead - of in the I{filename} parameter of the I{Content-Disposition} header. - - @type part: inherit from email.mime.base.MIMEBase - @param part: the mail part - @rtype: None or unicode - @returns: the filename or None if not found - - >>> import email.mime.image - >>> attach=email.mime.image.MIMEImage('data', 'png') - >>> attach.add_header('Content-Disposition', 'attachment', filename='image.png') - >>> get_filename(attach) - u'image.png' - >>> print attach.as_string(unixfrom=False) - Content-Type: image/png - MIME-Version: 1.0 - Content-Transfer-Encoding: base64 - Content-Disposition: attachment; filename="image.png" - - ZGF0YQ== - >>> import email.mime.text - >>> attach=email.mime.text.MIMEText('The text.', 'plain', 'us-ascii') - >>> attach.add_header('Content-Disposition', 'attachment', filename=('iso-8859-1', 'fr', u'Fran\\xe7ais.txt'.encode('iso-8859-1'))) - >>> get_filename(attach) - u'Fran\\xe7ais.txt' - >>> print attach.as_string(unixfrom=False) - Content-Type: text/plain; charset="us-ascii" - MIME-Version: 1.0 - Content-Transfer-Encoding: 7bit - Content-Disposition: attachment; filename*="iso-8859-1'fr'Fran%E7ais.txt" - - The text. - """ - filename=part.get_param('filename', None, 'content-disposition') - if not filename: - filename=part.get_param('name', None) # default is 'content-type' - - if filename: - if isinstance(filename, tuple): - # RFC 2231 must be used to encode parameters inside MIME header - filename=email.utils.collapse_rfc2231_value(filename).strip() - else: - # But a lot of MUA erroneously use RFC 2047 instead of RFC 2231 - # in fact anybody missuse RFC2047 here !!! - filename=decode_mail_header(filename) - - return filename - -def _search_message_content(contents, part): - """ - recursive search of message content (text or HTML) inside - the structure of the email. Used by L{search_message_content()} - - @type contents: dict - @param contents: contents already found in parents or brothers I{parts}. - The dictionary will be completed as and when. key is the MIME type of the part. - @type part: inherit email.mime.base.MIMEBase - @param part: the part of the mail to look inside recursively. - """ - type=part.get_content_type() - if part.is_multipart(): # type.startswith('multipart/'): - # explore only True 'multipart/*' - # because 'messages/rfc822' are 'multipart/*' too but - # must not be explored here - if type=='multipart/related': - # the first part or the one pointed by start - start=part.get_param('start', None) - related_type=part.get_param('type', None) - for i, subpart in enumerate(part.get_payload()): - if (not start and i==0) or (start and start==subpart.get('Content-Id')): - _search_message_content(contents, subpart) - return - elif type=='multipart/alternative': - # all parts are candidates and latest is the best - for subpart in part.get_payload(): - _search_message_content(contents, subpart) - elif type in ('multipart/report', 'multipart/signed'): - # only the first part is candidate - try: - subpart=part.get_payload()[0] - except IndexError: - return - else: - _search_message_content(contents, subpart) - return - - elif type=='multipart/encrypted': - # the second part is the good one, but we need to de-crypt it - # using the first part. Do nothing - return - - else: - # unknown types must be handled as 'multipart/mixed' - # This is the peace of code that could probably be improved, - # I use a heuristic : if not already found, use first valid non - # 'attachment' parts found - for subpart in part.get_payload(): - tmp_contents=dict() - _search_message_content(tmp_contents, subpart) - for k, v in tmp_contents.items(): - if not subpart.get_param('attachment', None, 'content-disposition')=='': - # if not an attachment, initiate value if not already found - contents.setdefault(k, v) - return - else: - contents[part.get_content_type().lower()]=part - return - - return - -def search_message_content(mail): - """ - search of message content (text or HTML) inside - the structure of the mail. This function is used by L{get_mail_parts()} - to set the C{is_body} part of the L{MailPart}s - - @type mail: inherit from email.message.Message - @param mail: the message to search in. - @rtype: dict - @returns: a dictionary of the form C{{'text/plain': text_part, 'text/html': html_part}} - where text_part and html_part inherite from C{email.mime.text.MIMEText} - and are respectively the I{text} and I{HTML} version of the message content. - One part can be missing. The dictionay can aven be empty if none of the - parts math the requirements to be considered as the content. - """ - contents=dict() - _search_message_content(contents, mail) - return contents - -def get_mail_parts(msg): - """ - return a list of all parts of the message as a list of L{MailPart}. - Retrieve parts attributes to fill in L{MailPart} object. - - @type msg: inherit email.message.Message - @param msg: the message - @rtype: list - @returns: list of mail parts - - >>> import email.mime.multipart - >>> msg=email.mime.multipart.MIMEMultipart(boundary='===limit1==') - >>> import email.mime.text - >>> txt=email.mime.text.MIMEText('The text.', 'plain', 'us-ascii') - >>> msg.attach(txt) - >>> import email.mime.image - >>> image=email.mime.image.MIMEImage('data', 'png') - >>> image.add_header('Content-Disposition', 'attachment', filename='image.png') - >>> msg.attach(image) - >>> print msg.as_string(unixfrom=False) - Content-Type: multipart/mixed; boundary="===limit1==" - MIME-Version: 1.0 - - --===limit1== - Content-Type: text/plain; charset="us-ascii" - MIME-Version: 1.0 - Content-Transfer-Encoding: 7bit - - The text. - --===limit1== - Content-Type: image/png - MIME-Version: 1.0 - Content-Transfer-Encoding: base64 - Content-Disposition: attachment; filename="image.png" - - ZGF0YQ== - --===limit1==-- - >>> parts=get_mail_parts(msg) - >>> parts - [MailPart<*text/plain charset=us-ascii len=9>, MailPart] - >>> # the star "*" means this is the mail content, not an attachment - >>> parts[0].get_payload().decode(parts[0].charset) - u'The text.' - >>> parts[1].filename, len(parts[1].get_payload()) - (u'image.png', 4) - - """ - mailparts=[] - - # retrieve messages of the email - contents=search_message_content(msg) - # reverse contents dict - parts=dict((v,k) for k, v in contents.items()) - - # organize the stack to handle deep first search - stack=[ msg, ] - while stack: - part=stack.pop(0) - type=part.get_content_type() - if type.startswith('message/'): - # ('message/delivery-status', 'message/rfc822', 'message/disposition-notification'): - # I don't want to explore the tree deeper her and just save source using msg.as_string() - # but I don't use msg.as_string() because I want to use mangle_from_=False - filename='message.eml' - mailparts.append(MailPart(part, filename=filename, type=type, charset=part.get_param('charset'), description=part.get('Content-Description'))) - elif part.is_multipart(): - # insert new parts at the beginning of the stack (deep first search) - stack[:0]=part.get_payload() - else: - charset=part.get_param('charset') - filename=get_filename(part) - - disposition=None - if part.get_param('inline', None, 'content-disposition')=='': - disposition='inline' - elif part.get_param('attachment', None, 'content-disposition')=='': - disposition='attachment' - - mailparts.append(MailPart(part, filename=filename, type=type, charset=charset, content_id=part.get('Content-Id'), description=part.get('Content-Description'), disposition=disposition, is_body=parts.get(part, False))) - - return mailparts - - -def decode_text(payload, charset, default_charset): - """ - Try to decode text content by trying multiple charset until success. - First try I{charset}, else try I{default_charset} finally - try popular charsets in order : ascii, utf-8, utf-16, windows-1252, cp850 - If all fail then use I{default_charset} and replace wrong characters - - @type payload: str - @param payload: the content to decode - @type charset: str or None - @param charset: the first charset to try if != C{None} - @type default_charset: str or None - @param default_charset: the second charset to try if != C{None} - - @rtype: tuple - @returns: a tuple of the form C{(payload, charset)} - - I{payload}: this is the decoded payload if charset is not None and - payload is a unicode string - - I{charset}: the charset that was used to decode I{payload} If charset is - C{None} then something goes wrong: if I{payload} is unicode then - invalid characters have been replaced and the used charset is I{default_charset} - else, if I{payload} is still byte string then nothing has been done. - - - """ - for chset in [ charset, default_charset, 'ascii', 'utf-8', 'utf-16', 'windows-1252', 'cp850' ]: - if chset: - try: - return payload.decode(chset), chset - except UnicodeError: - pass - - if default_charset: - return payload.decode(chset, 'replace'), None - - return payload, None - -class PyzMessage(email.message.Message): - """ - Inherit from email.message.Message. Combine L{get_mail_parts()}, - L{get_mail_addresses()} and L{decode_mail_header()} into a - B{convenient} object to access mail contents and attributes. - This class also B{sanitize} part filenames. - - @type mailparts: list of L{MailPart} - @ivar mailparts: list of L{MailPart} objects composing the email, I{text_part} - and I{html_part} are part of this list as are other attachements and embedded - contents. - @type text_part: L{MailPart} or None - @ivar text_part: the L{MailPart} object that contains the I{text} - version of the message, None if the mail has not I{text} content. - @type html_part: L{MailPart} or None - @ivar html_part: the L{MailPart} object that contains the I{HTML} - version of the message, None if the mail has not I{HTML} content. - - @note: Sample: - - >>> raw='''Content-Type: text/plain; charset="us-ascii" - ... MIME-Version: 1.0 - ... Content-Transfer-Encoding: 7bit - ... Subject: The subject - ... From: Me - ... To: A , B - ... - ... The text. - ... ''' - >>> msg=PyzMessage.factory(raw) - >>> print 'Subject: %r' % (msg.get_subject(), ) - Subject: u'The subject' - >>> print 'From: %r' % (msg.get_address('from'), ) - From: (u'Me', 'me@foo.com') - >>> print 'To: %r' % (msg.get_addresses('to'), ) - To: [(u'A', 'a@foo.com'), (u'B', 'b@foo.com')] - >>> print 'Cc: %r' % (msg.get_addresses('cc'), ) - Cc: [] - >>> for mailpart in msg.mailparts: - ... print ' %sfilename=%r sanitized_filename=%r type=%s charset=%s desc=%s size=%d' % ('*'if mailpart.is_body else ' ', mailpart.filename, mailpart.sanitized_filename, mailpart.type, mailpart.charset, mailpart.part.get('Content-Description'), 0 if mailpart.get_payload()==None else len(mailpart.get_payload())) - ... if mailpart.is_body=='text/plain': - ... payload, used_charset=decode_text(mailpart.get_payload(), mailpart.charset, None) - ... print ' >', payload.split('\\n')[0] - ... - *filename=None sanitized_filename='text.txt' type=text/plain charset=us-ascii desc=None size=10 - > The text. - """ - - @staticmethod - def smart_parser(input): - """ - Use the appropriate parser and return a email.message.Message object - (this is not a L{PyzMessage} object) - - @type input: string, file, bytes, binary_file or email.message.Message - @param input: the source of the message - @rtype: email.message.Message - @returns: the message - """ - if isinstance(input, email.message.Message): - return input - - if sys.version_info<(3, 0): - # python 2.x - if isinstance(input, str): - return email.message_from_string(input) - elif hasattr(input, 'read') and hasattr(input, 'readline'): - return email.message_from_file(input) - else: - raise ValueError('input must be a string, a file or a Message') - else: - # python 3.x - if isinstance(input, str): - return email.message_from_string(input) - elif isinstance(input, bytes): - # python >= 3.2 only - return email.message_from_bytes(input) - elif hasattr(input, 'read') and hasattr(input, 'readline'): - if hasattr(input, 'encoding'): - # python >= 3.2 only - return email.message_from_file(input) - else: - return email.message_from_binary_file(input) - else: - raise ValueError('input must be a string a bytes, a file or a Message') - - @staticmethod - def factory(input): - """ - Use the appropriate parser and return a L{PyzMessage} object - see L{smart_parser} - @type input: string, file, bytes, binary_file or email.message.Message - @param input: the source of the message - @rtype: L{PyzMessage} - @returns: the L{PyzMessage} message - """ - return PyzMessage(PyzMessage.smart_parser(input)) - - - def __init__(self, message): - """ - Initialize the object with data coming from I{message}. - - @type message: inherit email.message.Message - @param message: The message - """ - if not isinstance(message, email.message.Message): - raise ValueError("message must inherit from email.message.Message use PyzMessage.factory() instead") - self.__dict__.update(message.__dict__) - - self.mailparts=get_mail_parts(self) - self.text_part=None - self.html_part=None - - filenames=[] - for part in self.mailparts: - ext=mimetypes.guess_extension(part.type) - if not ext: - # default to .bin - ext='.bin' - elif ext=='.ksh': - # guess_extension() is not very accurate, .txt is more - # appropriate than .ksh - ext='.txt' - - sanitized_filename=sanitize_filename(part.filename, part.type.split('/', 1)[0], ext) - sanitized_filename=handle_filename_collision(sanitized_filename, filenames) - filenames.append(sanitized_filename.lower()) - part.sanitized_filename=sanitized_filename - - if part.is_body=='text/plain': - self.text_part=part - - if part.is_body=='text/html': - self.html_part=part - - def get_addresses(self, name): - """ - return the I{name} header value as an list of addresses tuple as - returned by L{get_mail_addresses()} - - @type name: str - @param name: the name of the header to read value from: 'to', 'cc' are - valid I{name} here. - @rtype: tuple - @returns: a tuple of the form C{('Sender Name', 'sender.address@domain.com')} - or C{('', '')} if no header match that I{name}. - """ - return get_mail_addresses(self, name) - - def get_address(self, name): - """ - return the I{name} header value as an address tuple as returned by - L{get_mail_addresses()} - - @type name: str - @param name: the name of the header to read value from: : C{'from'} can - be used to return the sender address. - @rtype: list of tuple - @returns: a list of tuple of the form C{[('Recipient Name', 'recipient.address@domain.com'), ...]} - or an empty list if no header match that I{name}. - """ - value=get_mail_addresses(self, name) - if value: - return value[0] - else: - return ('', '') - - def get_subject(self, default=''): - """ - return the RFC2047 decoded subject. - - @type default: any - @param default: The value to return if the message has no I{Subject} - @rtype: unicode - @returns: the subject or C{default} - """ - return self.get_decoded_header('subject', default) - - def get_decoded_header(self, name, default=''): - """ - return decoded header I{name} using RFC2047. Always use this function - to access header, because any header can contain invalid characters - and this function sanitize the string and avoid unicode exception later - in your program. - EVEN for date, I already saw a "Center box bar horizontal" instead - of a minus character. - - @type name: str - @param name: the name of the header to read value from. - @type default: any - @param default: The value to return if the I{name} field don't exist - in this message. - @rtype: unicode - @returns: the value of the header having that I{name} or C{default} if no - header have that name. - """ - value=self.get(name) - if value==None: - value=default - else: - value=decode_mail_header(value) - return value - -class PzMessage(PyzMessage): - """ - Old name and interface for PyzMessage. - B{Deprecated} - """ - - def __init__(self, input): - """ - Initialize the object with data coming from I{input}. - - @type input: str or file or email.message.Message - @param input: used as the raw content for the email, can be a string, - a file object or an email.message.Message object. - """ - PyzMessage.__init__(self, self.smart_parser(input)) - - -def message_from_string(s, *args, **kws): - """ - Parse a string into a L{PyzMessage} object model. - @type s: str - @param s: the input string - @rtype: L{PyzMessage} - @return: the L{PyzMessage} object - """ - return PyzMessage(email.message_from_string(s, *args, **kws)) - -def message_from_file(fp, *args, **kws): - """ - Read a file and parse its contents into a L{PyzMessage} object model. - @type fp: text_file - @param fp: the input file (must be open in text mode if Python >= 3.0) - @rtype: L{PyzMessage} - @return: the L{PyzMessage} object - """ - return PyzMessage(email.message_from_file(fp, *args, **kws)) - -def message_from_bytes(s, *args, **kws): - """ - Parse a bytes string into a L{PyzMessage} object model. - B{(Python >= 3.2)} - @type s: bytes - @param s: the input bytes string - @rtype: L{PyzMessage} - @return: the L{PyzMessage} object - """ - return PyzMessage(email.message_from_bytes(s, *args, **kws)) - -def message_from_binary_file(fp, *args, **kws): - """ - Read a binary file and parse its contents into a L{PyzMessage} object model. - B{(Python >= 3.2)} - @type fp: binary_file - @param fp: the input file, must be open in binary mode - @rtype: L{PyzMessage} - @return: the L{PyzMessage} object - """ - return PyzMessage(email.message_from_binary_file(fp, *args, **kws)) - - -if __name__ == "__main__": - import sys - - if len(sys.argv)<=1: - print('usage : %s filename' % sys.argv[0]) - print('read an email from file and display a resume of its content') - sys.exit(1) - - msg=PyzMessage.factory(open(sys.argv[1], 'rb')) - - print('Subject: %r' % (msg.get_subject(), )) - print('From: %r' % (msg.get_address('from'), )) - print('To: %r' % (msg.get_addresses('to'), )) - print('Cc: %r' % (msg.get_addresses('cc'), )) - print('Date: %r' % (msg.get_decoded_header('date', ''), )) - print('Message-Id: %r' % (msg.get_decoded_header('message-id', ''), )) - - for mailpart in msg.mailparts: - # dont forget to be careful to sanitize 'filename' and be carefull - # for filename collision, to before to save : - print(' %sfilename=%r type=%s charset=%s desc=%s size=%d' % ('*'if mailpart.is_body else ' ', mailpart.filename, mailpart.type, mailpart.charset, mailpart.part.get('Content-Description'), 0 if mailpart.get_payload()==None else len(mailpart.get_payload()))) - - if mailpart.is_body=='text/plain': - # print first 3 lines - payload, used_charset=decode_text(mailpart.get_payload(), mailpart.charset, None) - for line in payload.split('\n')[:3]: - # be careful console can be unable to display unicode characters - if line: - print(' >', line) - - - diff --git a/pyzmail/tests/test_both.py b/pyzmail/tests/test_both.py deleted file mode 100644 index 2607ed8d7f..0000000000 --- a/pyzmail/tests/test_both.py +++ /dev/null @@ -1,99 +0,0 @@ -import unittest -import pyzmail -from pyzmail.generate import * -from pyzmail.parse import * - -class TestBoth(unittest.TestCase): - - def setUp(self): - pass - - def test_compose_and_parse(self): - """test generate and parse""" - - sender=('Me', 'me@foo.com') - recipients=[('Him', 'him@bar.com'), 'just@me.com'] - subject='Le sujet en Fran\xe7ais' - text_content='Bonjour aux Fran\xe7ais' - prefered_encoding='iso-8859-1' - text_encoding='iso-8859-1' - attachments=[('attached content', 'text', 'plain', 'textfile1.txt', 'us-ascii'), - ('Fran\xe7ais', 'text', 'plain', 'textfile2.txt', 'iso-8859-1'), - ('Fran\xe7ais', 'text', 'plain', 'textfile3.txt', 'iso-8859-1'), - (b'image', 'image', 'jpg', 'imagefile.jpg', None), - ] - embeddeds=[('embedded content', 'text', 'plain', 'embedded', 'us-ascii'), - (b'picture', 'image', 'png', 'picture', None), - ] - headers=[ ('X-extra', 'extra value'), ('X-extra2', "Seconde ent\xe8te"), ('X-extra3', 'last extra'),] - - message_id_string='pyzmail' - date=1313558269 - - payload, mail_from, rcpt_to, msg_id=pyzmail.compose_mail(\ - sender, \ - recipients, \ - subject, \ - prefered_encoding, \ - (text_content, text_encoding), \ - html=None, \ - attachments=attachments, \ - embeddeds=embeddeds, \ - headers=headers, \ - message_id_string=message_id_string, \ - date=date\ - ) - - msg=PyzMessage.factory(payload) - - self.assertEqual(sender, msg.get_address('from')) - self.assertEqual(recipients[0], msg.get_addresses('to')[0]) - self.assertEqual(recipients[1], msg.get_addresses('to')[1][1]) - self.assertEqual(subject, msg.get_subject()) - self.assertEqual(subject, msg.get_decoded_header('subject')) - - # try to handle different timezone carefully - mail_date=list(email.utils.parsedate(msg.get_decoded_header('date'))) - self.assertEqual(mail_date[:6], list(time.localtime(date))[:6]) - - self.assertNotEqual(msg.get('message-id').find(message_id_string), -1) - for name, value in headers: - self.assertEqual(value, msg.get_decoded_header(name)) - - for mailpart in msg.mailparts: - if mailpart.is_body: - self.assertEqual(mailpart.content_id, None) - self.assertEqual(mailpart.filename, None) - self.assertEqual(type(mailpart.sanitized_filename), str) - if mailpart.type=='text/plain': - self.assertEqual(mailpart.get_payload(), text_content.encode(text_encoding)) - else: - self.fail('found unknown body part') - else: - if mailpart.filename: - lst=attachments - self.assertEqual(mailpart.filename, mailpart.sanitized_filename) - self.assertEqual(mailpart.content_id, None) - elif mailpart.content_id: - lst=embeddeds - self.assertEqual(mailpart.filename, None) - else: - self.fail('found unknown part') - - found=False - for attach in lst: - found=(mailpart.filename and attach[3]==mailpart.filename) \ - or (mailpart.content_id and attach[3]==mailpart.content_id) - if found: - break - - if found: - self.assertEqual(mailpart.type, attach[1]+'/'+attach[2]) - payload=mailpart.get_payload() - if attach[1]=='text' and attach[4] and isinstance(attach[0], str): - payload=payload.decode(attach[4]) - self.assertEqual(payload, attach[0]) - else: - self.fail('found unknown attachment') - - diff --git a/pyzmail/tests/test_generate.py b/pyzmail/tests/test_generate.py deleted file mode 100644 index 7afdebc593..0000000000 --- a/pyzmail/tests/test_generate.py +++ /dev/null @@ -1,30 +0,0 @@ -import unittest, doctest -import pyzmail -from pyzmail.generate import * - -class TestGenerate(unittest.TestCase): - - def setUp(self): - pass - - def test_format_addresses(self): - """test format_addresse""" - self.assertEqual('foo@example.com', str(format_addresses([ 'foo@example.com', ]))) - self.assertEqual('Foo ', str(format_addresses([ ('Foo', 'foo@example.com'), ]))) - # notice the space around the comma - self.assertEqual('foo@example.com , bar@example.com', str(format_addresses([ 'foo@example.com', 'bar@example.com']))) - # notice the space around the comma - self.assertEqual('Foo , Bar ', str(format_addresses([ ('Foo', 'foo@example.com'), ( 'Bar', 'bar@example.com')]))) - -# Add doctest -def load_tests(loader, tests, ignore): - # this works with python 2.7 and 3.x - tests.addTests(doctest.DocTestSuite(pyzmail.generate)) - return tests - -def additional_tests(): - # Add doctest for python 2.6 and below - if sys.version_info<(2, 7): - return doctest.DocTestSuite(pyzmail.generate) - else: - return unittest.TestSuite() diff --git a/pyzmail/tests/test_parse.py b/pyzmail/tests/test_parse.py deleted file mode 100644 index f7a5adb9e2..0000000000 --- a/pyzmail/tests/test_parse.py +++ /dev/null @@ -1,290 +0,0 @@ -import unittest, doctest -import pyzmail -from pyzmail.parse import * - - -class Msg: - """mimic a email.Message""" - def __init__(self, value): - self.value=value - - def get_all(self, header_name, default): - if self.value: - return [self.value, ] - else: - return [] - -class TestParse(unittest.TestCase): - - def setUp(self): - pass - - def test_decode_mail_header(self): - """test decode_mail_header()""" - self.assertEqual(decode_mail_header(''), '') - self.assertEqual(decode_mail_header('hello'), 'hello') - self.assertEqual(decode_mail_header('hello '), 'hello ') - self.assertEqual(decode_mail_header('=?iso-8859-1?q?Courrier_=E8lectronique_Fran=E7ais?='), 'Courrier \xe8lectronique Fran\xe7ais') - self.assertEqual(decode_mail_header('=?utf8?q?Courrier_=C3=A8lectronique_Fran=C3=A7ais?='), 'Courrier \xe8lectronique Fran\xe7ais') - self.assertEqual(decode_mail_header('=?utf-8?b?RnJhbsOnYWlz?='), 'Fran\xe7ais') - self.assertEqual(decode_mail_header('=?iso-8859-1?q?Courrier_=E8lectronique_?= =?utf8?q?Fran=C3=A7ais?='), 'Courrier \xe8lectronique Fran\xe7ais') - self.assertEqual(decode_mail_header('=?iso-8859-1?q?Courrier_=E8lectronique_?= =?utf-8?b?RnJhbsOnYWlz?='), 'Courrier \xe8lectronique Fran\xe7ais') - self.assertEqual(decode_mail_header('h_subject_q_iso_8858_1 : =?ISO-8859-1?Q?Fran=E7ais=E20accentu=E9?= !'), 'h_subject_q_iso_8858_1 :Fran\xe7ais\xe20accentu\xe9!') - - def test_get_mail_addresses(self): - """test get_mail_addresses()""" - self.assertEqual([ ('foo@example.com', 'foo@example.com') ], get_mail_addresses(Msg('foo@example.com'), 'to')) - self.assertEqual([ ('Foo', 'foo@example.com'), ], get_mail_addresses(Msg('Foo '), 'to')) - # notice the space around the comma - self.assertEqual([ ('foo@example.com', 'foo@example.com'), ('bar@example.com', 'bar@example.com')], get_mail_addresses(Msg('foo@example.com , bar@example.com'), 'to')) - self.assertEqual([ ('Foo', 'foo@example.com'), ( 'Bar', 'bar@example.com')], get_mail_addresses(Msg('Foo , Bar '), 'to')) - self.assertEqual([ ('Foo', 'foo@example.com'), ('bar@example.com', 'bar@example.com')], get_mail_addresses(Msg('Foo , bar@example.com'), 'to')) - self.assertEqual([ ('Mr Foo', 'foo@example.com'), ('bar@example.com', 'bar@example.com')], get_mail_addresses(Msg('Mr\nFoo , bar@example.com'), 'to')) - - self.assertEqual([ ('Beno\xeet', 'benoit@example.com')], get_mail_addresses(Msg('=?utf-8?q?Beno=C3=AEt?= '), 'to')) - - # address already encoded into utf8 (bad) - address='Ant\xf3nio Foo '.encode('utf8') - if sys.version_info<(3, 0): - self.assertEqual([('Ant\ufffd\ufffdnio Foo', 'a.foo@example.com')], get_mail_addresses(Msg(address), 'to')) - else: - # Python 3.2 return header when surrogate characters are used in header - self.assertEqual([('Ant??nio Foo', 'a.foo@example.com'), ], get_mail_addresses(Msg(email.header.Header(address, charset=email.charset.UNKNOWN8BIT, header_name='to')), 'to')) - - def test_get_filename(self): - """test get_filename()""" - import email.mime.image - - filename='Fran\xe7ais.png' - if sys.version_info<(3, 0): - encoded_filename=filename.encode('iso-8859-1') - else: - encoded_filename=filename - - payload=b'data' - attach=email.mime.image.MIMEImage(payload, 'png') - attach.add_header('Content-Disposition', 'attachment', filename='image.png') - self.assertEqual('image.png', get_filename(attach)) - - attach=email.mime.image.MIMEImage(payload, 'png') - attach.add_header('Content-Disposition', 'attachment', filename=('iso-8859-1', 'fr', encoded_filename)) - self.assertEqual('Fran\xe7ais.png', get_filename(attach)) - - attach=email.mime.image.MIMEImage(payload, 'png') - attach.set_param('name', 'image.png') - self.assertEqual('image.png', get_filename(attach)) - - attach=email.mime.image.MIMEImage(payload, 'png') - attach.set_param('name', ('iso-8859-1', 'fr', encoded_filename)) - self.assertEqual('Fran\xe7ais.png', get_filename(attach)) - - attach=email.mime.image.MIMEImage(payload, 'png') - attach.add_header('Content-Disposition', 'attachment', filename='image.png') - attach.set_param('name', 'image_wrong.png') - self.assertEqual('image.png', get_filename(attach)) - - def test_get_mailparts(self): - """test get_mailparts()""" - import email.mime.multipart - import email.mime.text - import email.mime.image - msg=email.mime.multipart.MIMEMultipart(boundary='===limit1==') - txt=email.mime.text.MIMEText('The text.', 'plain', 'us-ascii') - msg.attach(txt) - image=email.mime.image.MIMEImage(b'data', 'png') - image.add_header('Content-Disposition', 'attachment', filename='image.png') - image.add_header('Content-Description', 'the description') - image.add_header('Content-ID', '') - msg.attach(image) - - raw=msg.as_string(unixfrom=False) - expected_raw="""Content-Type: multipart/mixed; boundary="===limit1==" -MIME-Version: 1.0 - ---===limit1== -Content-Type: text/plain; charset="us-ascii" -MIME-Version: 1.0 -Content-Transfer-Encoding: 7bit - -The text. ---===limit1== -Content-Type: image/png -MIME-Version: 1.0 -Content-Transfer-Encoding: base64 -Content-Disposition: attachment; filename="image.png" -Content-Description: the description -Content-ID: - -ZGF0YQ== ---===limit1==--""" - - if sys.version_info<(3, 0): - expected_raw=expected_raw.replace('','') - else: - expected_raw=expected_raw.replace('','\n') - - self.assertEqual(raw, expected_raw) - - parts=get_mail_parts(msg) - # [MailPart<*text/plain charset=us-ascii len=9>, MailPart] - - self.assertEqual(len(parts), 2) - - self.assertEqual(parts[0].type, 'text/plain') - self.assertEqual(parts[0].is_body, 'text/plain') # not a error, is_body must be type - self.assertEqual(parts[0].charset, 'us-ascii') - self.assertEqual(parts[0].get_payload().decode(parts[0].charset), 'The text.') - - self.assertEqual(parts[1].type, 'image/png') - self.assertEqual(parts[1].is_body, False) - self.assertEqual(parts[1].charset, None) - self.assertEqual(parts[1].filename, 'image.png') - self.assertEqual(parts[1].description, 'the description') - self.assertEqual(parts[1].content_id, 'this.is.the.normaly.unique.contentid') - self.assertEqual(parts[1].get_payload(), b'data') - - - raw_1='''Content-Type: text/plain; charset="us-ascii" -MIME-Version: 1.0 -Content-Transfer-Encoding: 7bit -Subject: simple test -From: Me -To: A , B -Cc: C , d@foo.com -User-Agent: pyzmail - -The text. -''' - - def check_message_1(self, msg): - self.assertEqual(msg.get_subject(), 'simple test') - self.assertEqual(msg.get_decoded_header('subject'), 'simple test') - self.assertEqual(msg.get_decoded_header('User-Agent'), 'pyzmail') - self.assertEqual(msg.get('User-Agent'), 'pyzmail') - self.assertEqual(msg.get_address('from'), ('Me', 'me@foo.com')) - self.assertEqual(msg.get_addresses('to'), [('A', 'a@foo.com'), ('B', 'b@foo.com')]) - self.assertEqual(msg.get_addresses('cc'), [('C', 'c@foo.com'), ('d@foo.com', 'd@foo.com')]) - self.assertEqual(len(msg.mailparts), 1) - self.assertEqual(msg.text_part, msg.mailparts[0]) - self.assertEqual(msg.html_part, None) - - # use 8bits encoding and 2 different charsets ! python 3.0 & 3.1 are not eable to parse this sample - raw_2=b"""From: sender@domain.com -To: recipient@domain.com -Date: Tue, 7 Jun 2011 16:32:17 +0200 -Subject: contains 8bits attachments using different encoding -Content-Type: multipart/mixed; boundary=mixed - ---mixed -Content-Type: text/plain; charset="us-ascii" -MIME-Version: 1.0 -Content-Transfer-Encoding: 7bit - -body ---mixed -Content-Type: text/plain; charset="windows-1252" -MIME-Version: 1.0 -Content-Transfer-Encoding: 8bit -Content-Disposition: attachment; filename="file1.txt" - -bo\xeete mail = mailbox ---mixed -Content-Type: text/plain; charset="utf-8" -MIME-Version: 1.0 -Content-Transfer-Encoding: 8bit -Content-Disposition: attachment; filename="file2.txt" - -bo\xc3\xaete mail = mailbox ---mixed-- -""" - - def check_message_2(self, msg): - self.assertEqual(msg.get_subject(), 'contains 8bits attachments using different encoding') - - body, file1, file2=msg.mailparts - - self.assertEqual('file1.txt', file1.filename) - self.assertEqual('file2.txt', file2.filename) - self.assertEqual('windows-1252', file1.charset) - self.assertEqual('utf-8', file2.charset) - content=b'bo\xeete mail = mailbox'.decode("windows-1252") - content1=file1.get_payload().decode(file1.charset) - content2=file2.get_payload().decode(file2.charset) - self.assertEqual(content, content1) - self.assertEqual(content, content2) - - # this one contain non us-ascii chars in the header - # py 2x and py3k return different value here - raw_3=b'Content-Type: text/plain; charset="us-ascii"\n' \ - b'MIME-Version: 1.0\n' \ - b'Content-Transfer-Encoding: 7bit\n' \ - + 'Subject: Beno\xeet & Ant\xf3nio\n'.encode('utf8') +\ - b'From: =?utf-8?q?Beno=C3=AEt?= \n' \ - + 'To: Ant\xf3nio Foo \n'.encode('utf8') \ - + 'Cc: Beno\xeet , d@foo.com\n'.encode('utf8') +\ - b'User-Agent: pyzmail\n' \ - b'\n' \ - b'The text.\n' - - def check_message_3(self, msg): - subject='Beno\ufffd\ufffdt & Ant\ufffd\ufffdnio' # if sys.version_info<(3, 0) else u'Beno??t & Ant??nio' - self.assertEqual(msg.get_subject(), subject) - self.assertEqual(msg.get_decoded_header('subject'), subject) - self.assertEqual(msg.get_decoded_header('User-Agent'), 'pyzmail') - self.assertEqual(msg.get('User-Agent'), 'pyzmail') - self.assertEqual(msg.get_address('from'), ('Beno\xeet', 'benoit@example.com')) - - to=msg.get_addresses('to') - self.assertEqual(to[0][1], 'a.foo@example.com') - self.assertEqual(to[0][0], 'Ant\ufffd\ufffdnio Foo' if sys.version_info<(3, 0) else 'Ant??nio Foo') - - cc=msg.get_addresses('cc') - self.assertEqual(cc[0][1], 'benoit@foo.com') - self.assertEqual(cc[0][0], 'Beno\ufffd\ufffdt' if sys.version_info<(3, 0) else 'Beno??t') - self.assertEqual(cc[1], ('d@foo.com', 'd@foo.com')) - - self.assertEqual(len(msg.mailparts), 1) - self.assertEqual(msg.text_part, msg.mailparts[0]) - self.assertEqual(msg.html_part, None) - - - def check_pyzmessage_factories(self, input, check): - """test PyzMessage from different sources""" - if isinstance(input, bytes) and sys.version_info>=(3, 2): - check(PyzMessage.factory(input)) - check(message_from_bytes(input)) - - import io - check(PyzMessage.factory(io.BytesIO(input))) - check(message_from_binary_file(io.BytesIO(input))) - - if isinstance(input, str): - - check(PyzMessage.factory(input)) - check(message_from_string(input)) - - import io - check(PyzMessage.factory(io.StringIO(input))) - check(message_from_file(io.StringIO(input))) - - def test_pyzmessage_factories(self): - """test PyzMessage class different sources""" - self.check_pyzmessage_factories(self.raw_1, self.check_message_1) - self.check_pyzmessage_factories(self.raw_2, self.check_message_2) - self.check_pyzmessage_factories(self.raw_3, self.check_message_3) - - -# Add doctest -def load_tests(loader, tests, ignore): - # this works with python 2.7 and 3.x - if sys.version_info<(3, 0): - tests.addTests(doctest.DocTestSuite(pyzmail.parse)) - return tests - -def additional_tests(): - # Add doctest for python 2.6 and below - if sys.version_info<(2, 7): - return doctest.DocTestSuite(pyzmail.parse) - else: - return unittest.TestSuite() - diff --git a/pyzmail/tests/test_send.py b/pyzmail/tests/test_send.py deleted file mode 100644 index 554f8549a4..0000000000 --- a/pyzmail/tests/test_send.py +++ /dev/null @@ -1,77 +0,0 @@ -import threading, smtpd, asyncore, socket, smtplib, time -import unittest -import pyzmail -from pyzmail.generate import * - - -smtpd_addr='127.0.0.1' -smtpd_port=32525 -smtp_bad_port=smtpd_port-1 - -smtp_mode='normal' -smtp_login=None -smtp_password=None - - -class SMTPServer(smtpd.SMTPServer): - def __init__(self, localaddr, remoteaddr, received): - smtpd.SMTPServer.__init__(self, localaddr, remoteaddr) - self.set_reuse_addr() - # put the received mail into received list - self.received=received - - def process_message(self, peer, mail_from, rcpt_to, data): - ret=None - if mail_from.startswith('data_error'): - ret='552 Requested mail action aborted: exceeded storage allocation' - self.received.append((ret, peer, mail_from, rcpt_to, data)) - return ret - -class TestSend(unittest.TestCase): - - def setUp(self): - self.received=[] - self.smtp_server=SMTPServer((smtpd_addr, smtpd_port), None, self.received) - - def asyncloop(): - # check every sec if all channel are close - asyncore.loop(1) - - - self.payload, self.mail_from, self.rcpt_to, self.msg_id=compose_mail(('Me', 'me@foo.com'), [('Him', 'him@bar.com')], 'the subject', 'iso-8859-1', ('Hello world', 'us-ascii')) - - # start the server after having built the payload, to handle failure in - # the code above - self.smtpd_thread=threading.Thread(target=asyncloop) - self.smtpd_thread.daemon=True - self.smtpd_thread.start() - - - def tearDown(self): - self.smtp_server.close() - self.smtpd_thread.join() - - def test_simple_send(self): - """simple send""" - ret=send_mail(self.payload, self.mail_from, self.rcpt_to, smtpd_addr, smtpd_port, smtp_mode=smtp_mode, smtp_login=smtp_login, smtp_password=smtp_password) - self.assertEqual(ret, dict()) - (ret, peer, mail_from, rcpt_to, payload)=self.received[0] - self.assertEqual(self.payload, payload) - self.assertEqual(self.mail_from, mail_from) - self.assertEqual(self.rcpt_to, rcpt_to) - self.assertEqual('127.0.0.1', peer[0]) - - def test_send_to_a_wrong_port(self): - """send to a wrong port""" - self.smtp_server.close() - ret=send_mail(self.payload, self.mail_from, self.rcpt_to, smtpd_addr, smtpd_port, smtp_mode=smtp_mode, smtp_login=smtp_login, smtp_password=smtp_password) - self.assertEqual(type(ret), str) - - def test_send_data_error(self): - """smtp server return error code""" - ret=send_mail(self.payload, 'data_error@foo.com', self.rcpt_to, smtpd_addr, smtp_bad_port, smtp_mode=smtp_mode, smtp_login=smtp_login, smtp_password=smtp_password) - self.assertEqual(type(ret), str) - -if __name__ == '__main__': - unittest.main() - diff --git a/pyzmail/tests/test_utils.py b/pyzmail/tests/test_utils.py deleted file mode 100644 index e03d07d0d9..0000000000 --- a/pyzmail/tests/test_utils.py +++ /dev/null @@ -1,24 +0,0 @@ -import unittest, doctest -import pyzmail -from pyzmail.utils import * - -class TestUtils(unittest.TestCase): - - def setUp(self): - pass - - def test_nothing(self): - pass - -# Add doctest -def load_tests(loader, tests, ignore): - # this works with python 2.7 and 3.x - tests.addTests(doctest.DocTestSuite(pyzmail.utils)) - return tests - -def additional_tests(): - # Add doctest for python 2.6 and below - if sys.version_info<(2, 7): - return doctest.DocTestSuite(pyzmail.utils) - else: - return unittest.TestSuite() diff --git a/pyzmail/utils.py b/pyzmail/utils.py deleted file mode 100644 index 436e2a4c34..0000000000 --- a/pyzmail/utils.py +++ /dev/null @@ -1,155 +0,0 @@ -# -# pyzmail/utils.py -# (c) Alain Spineux -# http://www.magiksys.net/pyzmail -# Released under LGPL - -""" -Various functions used by other modules -@var invalid_chars_in_filename: a mix of characters not permitted in most used filesystems -@var invalid_windows_name: a list of unauthorized filenames under Windows -""" - -import sys - -invalid_chars_in_filename=b'\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f' \ - b'\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f' \ - b'<>:"/\\|?*%\'' - -invalid_windows_name=[b'CON', b'PRN', b'AUX', b'NUL', b'COM1', b'COM2', b'COM3', - b'COM4', b'COM5', b'COM6', b'COM7', b'COM8', b'COM9', - b'LPT1', b'LPT2', b'LPT3', b'LPT4', b'LPT5', b'LPT6', b'LPT7', - b'LPT8', b'LPT9' ] - -def sanitize_filename(filename, alt_name, alt_ext): - """ - Convert the given filename into a name that should work on all - platform. Remove non us-ascii characters, and drop invalid filename. - Use the I{alternative} filename if needed. - - @type filename: unicode or None - @param filename: the originale filename or None. Can be unicode. - @type alt_name: str - @param alt_name: the alternative filename if filename is None or useless - @type alt_ext: str - @param alt_ext: the alternative filename extension (including the '.') - - @rtype: str - @returns: a valid filename. - - >>> sanitize_filename('document.txt', 'file', '.txt') - 'document.txt' - >>> sanitize_filename('number1.txt', 'file', '.txt') - 'number1.txt' - >>> sanitize_filename(None, 'file', '.txt') - 'file.txt' - >>> sanitize_filename(u'R\\xe9pertoir.txt', 'file', '.txt') - 'Rpertoir.txt' - >>> # the '\\xe9' has been removed - >>> sanitize_filename(u'\\xe9\\xe6.html', 'file', '.txt') - 'file.html' - >>> # all non us-ascii characters have been removed, the alternative name - >>> # has been used the replace empty string. The originale extention - >>> # is still valid - >>> sanitize_filename(u'COM1.txt', 'file', '.txt') - 'COM1A.txt' - >>> # if name match an invalid name or assimilated then a A is added - """ - - if not filename: - return alt_name+alt_ext - - if ((sys.version_info<(3, 0) and isinstance(filename, str)) or \ - (sys.version_info>=(3, 0) and isinstance(filename, str))): - filename=filename.encode('ascii', 'ignore') - - filename=filename.translate(None, invalid_chars_in_filename) - filename=filename.strip() - - upper=filename.upper() - for name in invalid_windows_name: - if upper==name: - filename=filename+b'A' - break - if upper.startswith(name+b'.'): - filename=filename[:len(name)]+b'A'+filename[len(name):] - break - - if sys.version_info>=(3, 0): - # back to string - filename=filename.decode('us-ascii') - - if filename.rfind('.')==0: - filename=alt_name+filename - - return filename - -def handle_filename_collision(filename, filenames): - """ - Avoid filename collision, add a sequence number to the name when required. - 'file.txt' will be renamed into 'file-01.txt' then 'file-02.txt' ... - until their is no more collision. The file is not added to the list. - - Windows don't make the difference between lower and upper case. To avoid - "case" collision, the function compare C{filename.lower()} to the list. - If you provide a list in lower case only, then any collisions will be avoided. - - @type filename: str - @param filename: the filename - @type filenames: list or set - @param filenames: a list of filenames. - - @rtype: str - @returns: the I{filename} or the appropriately I{indexed} I{filename} - - >>> handle_filename_collision('file.txt', [ ]) - 'file.txt' - >>> handle_filename_collision('file.txt', [ 'file.txt' ]) - 'file-01.txt' - >>> handle_filename_collision('file.txt', [ 'file.txt', 'file-01.txt',]) - 'file-02.txt' - >>> handle_filename_collision('foo', [ 'foo',]) - 'foo-01' - >>> handle_filename_collision('foo', [ 'foo', 'foo-01',]) - 'foo-02' - >>> handle_filename_collision('FOO', [ 'foo', 'foo-01',]) - 'FOO-02' - """ - if filename.lower() in filenames: - try: - basename, ext=filename.rsplit('.', 1) - ext='.'+ext - except ValueError: - basename, ext=filename, '' - - i=1 - while True: - filename='%s-%02d%s' % (basename, i, ext) - if filename.lower() not in filenames: - break - i+=1 - - return filename - -def is_usascii(value): - """" - test if string contains us-ascii characters only - - >>> is_usascii('foo') - True - >>> is_usascii(u'foo') - True - >>> is_usascii(u'Fran\xe7ais') - False - >>> is_usascii('bad\x81') - False - """ - try: - # if value is byte string, it will be decoded first using us-ascii - # and will generate UnicodeEncodeError, this is fine too - value.encode('us-ascii') - except UnicodeError: - return False - - return True - \ No newline at end of file diff --git a/pyzmail/version.py b/pyzmail/version.py deleted file mode 100644 index 03e1b0c6dc..0000000000 --- a/pyzmail/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__='1.0.3' diff --git a/requirements.txt b/requirements.txt index bbd2ee5f70..ca9a6740e1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,76 +1,95 @@ # -*- conf-mode -*- -setuptools>=51.1.0 # Require this first, to prevent later errors +setuptools>=80.9.0 # Require this first, to prevent later errors # -argon2-cffi>=21.3.0 # For the Argon2 password hasher option -beautifulsoup4>=4.11.1 # Only used in tests -bibtexparser>=1.2.0 # Only used in tests -bleach>=6 -types-bleach>=6 -celery>=5.2.6 -coverage>=4.5.4,<5.0 # Coverage 5.x moves from a json database to SQLite. Moving to 5.x will require substantial rewrites in ietf.utils.test_runner and ietf.release.views -decorator>=5.1.1 -types-decorator>=5.1.1 +aiosmtpd>=1.4.6 +argon2-cffi>=25.1.0 # For the Argon2 password hasher option +beautifulsoup4>=4.13.4 # Only used in tests +bibtexparser>=1.4.4 # Only used in tests +bleach>=6.2.0 # project is deprecated but supported +types-bleach>=6.2.0 +boto3>=1.39.15 +boto3-stubs[s3]>=1.39.15 +botocore>=1.39.15 +celery>=5.5.3 +coverage>=7.9.2 defusedxml>=0.7.1 # for TastyPie when using xml; not a declared dependency Django>4.2,<5 -django-analytical>=3.1.0 -django-bootstrap5>=21.3 -django-celery-beat>=2.3.0 +django-admin-rangefilter>=0.13.3 +django-analytical>=3.2.0 +django-bootstrap5>=25.1 +django-celery-beat>=2.9.0 +django-celery-results>=2.6.0 django-csp>=3.7 -django-cors-headers>=3.11.0 -django-debug-toolbar>=3.2.4 -django-markup>=1.5 # Limited use - need to reconcile against direct use of markdown -django-oidc-provider>=0.8.1 # 0.8 dropped Django 2 support -django-referrer-policy>=1.0 -django-simple-history>=3.0.0 -#django-stubs>=4.2.0 # The django-stubs version used determines the the mypy version indicated below -django-stubs==4.2.4 # Pin here until we fix test failures (and update mypy version, too) -django-tastypie>=0.14.5 # Version must be locked in sync with version of Django -django-vite>=2.0.2 -django-webtest>=1.9.10 # Only used in tests +django-cors-headers>=4.7.0 +django-debug-toolbar>=6.0.0 +django-filter>=24.3 +django-markup>=1.10 # Limited use - need to reconcile against direct use of markdown +django-oidc-provider==0.8.2 # 0.8.3 changes logout flow and claim return +django-simple-history>=3.10.1 +django-storages>=1.14.6 +django-stubs>=4.2.7,<5 # The django-stubs version used determines the the mypy version indicated below +django-tastypie>=0.15.1 # Version must be kept in sync with Django +django-vite>=3.1.0 django-widget-tweaks>=1.4.12 -djlint>=1.0.0 # To auto-indent templates via "djlint --profile django --reformat" -docutils>=0.18.1 # Used only by dbtemplates for RestructuredText -types-docutils>=0.18.1 -factory-boy>=3.3 -github3.py>=3.2.0 -gunicorn>=20.1.0 +djangorestframework>=3.16.0 +docutils>=0.22.0 # Used only by dbtemplates for RestructuredText +types-docutils>=0.21.0 # should match docutils (0.22.0 not out yet) +drf-spectacular>=0.27 +drf-standardized-errors[openapi] >= 0.15.0 +factory-boy>=3.3.3 +gunicorn>=23.0.0 hashids>=1.3.1 -html2text>=2020.1.16 # Used only to clean comment field of secr/sreq +html2text>=2025.4.15 # Used only to clean comment field of secr/sreq html5lib>=1.1 # Only used in tests -inflect>= 6.0.2 -jsonfield>=3.1.0 # for SubmissionCheck. This is https://github.com/bradjasper/django-jsonfield/. -jwcrypto>=1.2 # for signed notifications - this is aspirational, and is not really used. -logging_tree>=1.9 # Used only by the showloggers management command -lxml>=4.8.0,<5 -markdown>=3.3.6 -types-markdown>=3.3.6 -mock>=4.0.3 # Used only by tests, of course -types-mock>=4.0.3 -mypy~=1.2.0 # Version requirements determined by django-stubs. -oic>=1.3 # Used only by tests -Pillow>=9.1.0 -psycopg2>=2.9.6 -pyang>=2.5.3 -pyflakes>=2.4.0 -pyopenssl>=22.0.0 # Used by urllib3.contrib, which is used by PyQuery but not marked as a dependency -pyquery>=1.4.3 -python-dateutil>=2.8.2 -types-python-dateutil>=2.8.2 +httpx>=0.28.1 # Indirect req of typesense, but we import and refer to exceptions +icalendar>=5.0.0 +inflect>= 7.5.0 +jsonfield>=3.2.0 # deprecated - need to replace with Django's JSONField +jsonschema[format]>=4.25.0 +jwcrypto>=1.5.6 # for signed notifications - this is aspirational, and is not really used. +logging_tree>=1.10 # Used only by the showloggers management command +lxml>=6.0.0 +markdown>=3.8.0 +types-markdown>=3.8.0 +mock>=5.2.0 # should replace with unittest.mock and remove dependency +types-mock>=5.2.0 +mypy~=1.11.2 # Version requirements loosely determined by django-stubs. +oic>=1.7.0 # Used only by tests +opentelemetry-sdk>=1.38.0 +opentelemetry-instrumentation-django>=0.59b0 +opentelemetry-instrumentation-psycopg2>=0.59b0 +opentelemetry-instrumentation-pymemcache>=0.59b0 +opentelemetry-instrumentation-requests>=0.59b0 +opentelemetry-exporter-otlp-proto-http>=1.38.0 +pillow>=11.3.0 +psycopg2>=2.9.10 +pyang>=2.6.1 +pydyf>=0.11.0 +pyflakes>=3.4.0 +pyopenssl>=25.1.0 # Used by urllib3.contrib, which is used by PyQuery but not marked as a dependency +pyquery>=2.0.1 +python-dateutil>=2.9.0 +types-python-dateutil>=2.9.0 +python-json-logger>=3.3.0 python-magic==0.4.18 # Versions beyond the yanked .19 and .20 introduce form failures -pymemcache>=4.0.0 # for django.core.cache.backends.memcached.PyMemcacheCache -python-mimeparse>=1.6 # from TastyPie -pytz==2022.2.1 # Pinned as changes need to be vetted for their effect on Meeting fields -requests>=2.27.1 -types-requests>=2.27.1 -requests-mock>=1.9.3 +pymemcache>=4.0.0 # for django.core.cache.backends.memcached.PyMemcacheCache +python-mimeparse>=2.0.0 # from TastyPie +pytz==2025.2 # Pinned as changes need to be vetted for their effect on Meeting fields +types-pytz==2025.2.0.20251108 # match pytz version +typesense>=2.0.0 +requests>=2.32.4 +types-requests>=2.32.4 +requests-mock>=1.12.1 rfc2html>=2.0.3 -scout-apm>=2.24.2 -selenium>=3.141.0,<4.0 -tblib>=1.7.0 # So that the django test runner provides tracebacks -tlds>=2022042700 # Used to teach bleach about which TLDs currently exist -tqdm>=4.64.0 -Unidecode>=1.3.4 -urllib3<2 # v2 causes selenium tests to fail with "Timeout value was =59 -xml2rfc>=3.12.4 +scout-apm>=3.4.0 +selenium>=4.34.2 +tblib>=3.1.0 # So that the django test runner provides tracebacks +tlds>=2022042700 # Used to teach bleach about which TLDs currently exist +tqdm>=4.67.1 +unidecode>=1.4.0 +urllib3>=2.5.0 +weasyprint>=66.0 +xml2rfc>=3.30.0 xym>=0.6,<1.0 +zxcvbn>=4.5.0 +types-zxcvbn~=4.5.0.20250223 # match zxcvbn version diff --git a/test/data/profile-default.jpg b/test/data/profile-default.jpg deleted file mode 100644 index d6b03e1004..0000000000 Binary files a/test/data/profile-default.jpg and /dev/null differ diff --git a/test/data/youtube-discovery.json b/test/data/youtube-discovery.json deleted file mode 100644 index 983e1650c2..0000000000 --- a/test/data/youtube-discovery.json +++ /dev/null @@ -1,10879 +0,0 @@ -{ - "kind": "discovery#restDescription", - "etag": "\"YWOzh2SDasdU84ArJnpYek-OMdg/f81k8b4sv9uLeywfoj2KpL2xcPg\"", - "discoveryVersion": "v1", - "id": "youtube:v3", - "name": "youtube", - "canonicalName": "YouTube", - "version": "v3", - "revision": "20170130", - "title": "YouTube Data API", - "description": "Supports core YouTube features, such as uploading videos, creating and managing playlists, searching for content, and much more.", - "ownerDomain": "google.com", - "ownerName": "Google", - "icons": { - "x16": "https://www.google.com/images/icons/product/youtube-16.png", - "x32": "https://www.google.com/images/icons/product/youtube-32.png" - }, - "documentationLink": "https://developers.google.com/youtube/v3", - "protocol": "rest", - "baseUrl": "https://www.googleapis.com/youtube/v3/", - "basePath": "/youtube/v3/", - "rootUrl": "https://www.googleapis.com/", - "servicePath": "youtube/v3/", - "batchPath": "batch", - "parameters": { - "alt": { - "type": "string", - "description": "Data format for the response.", - "default": "json", - "enum": [ - "json" - ], - "enumDescriptions": [ - "Responses with Content-Type of application/json" - ], - "location": "query" - }, - "fields": { - "type": "string", - "description": "Selector specifying which fields to include in a partial response.", - "location": "query" - }, - "key": { - "type": "string", - "description": "API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.", - "location": "query" - }, - "oauth_token": { - "type": "string", - "description": "OAuth 2.0 token for the current user.", - "location": "query" - }, - "prettyPrint": { - "type": "boolean", - "description": "Returns response with indentations and line breaks.", - "default": "true", - "location": "query" - }, - "quotaUser": { - "type": "string", - "description": "Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.", - "location": "query" - }, - "userIp": { - "type": "string", - "description": "IP address of the site where the request originates. Use this if you want to enforce per-user limits.", - "location": "query" - } - }, - "auth": { - "oauth2": { - "scopes": { - "https://www.googleapis.com/auth/youtube": { - "description": "Manage your YouTube account" - }, - "https://www.googleapis.com/auth/youtube.force-ssl": { - "description": "Manage your YouTube account" - }, - "https://www.googleapis.com/auth/youtube.readonly": { - "description": "View your YouTube account" - }, - "https://www.googleapis.com/auth/youtube.upload": { - "description": "Manage your YouTube videos" - }, - "https://www.googleapis.com/auth/youtubepartner": { - "description": "View and manage your assets and associated content on YouTube" - }, - "https://www.googleapis.com/auth/youtubepartner-channel-audit": { - "description": "View private information of your YouTube channel relevant during the audit process with a YouTube partner" - } - } - } - }, - "schemas": { - "AccessPolicy": { - "id": "AccessPolicy", - "type": "object", - "description": "Rights management policy for YouTube resources.", - "properties": { - "allowed": { - "type": "boolean", - "description": "The value of allowed indicates whether the access to the policy is allowed or denied by default." - }, - "exception": { - "type": "array", - "description": "A list of region codes that identify countries where the default policy do not apply.", - "items": { - "type": "string" - } - } - } - }, - "Activity": { - "id": "Activity", - "type": "object", - "description": "An activity resource contains information about an action that a particular channel, or user, has taken on YouTube.The actions reported in activity feeds include rating a video, sharing a video, marking a video as a favorite, commenting on a video, uploading a video, and so forth. Each activity resource identifies the type of action, the channel associated with the action, and the resource(s) associated with the action, such as the video that was rated or uploaded.", - "properties": { - "contentDetails": { - "$ref": "ActivityContentDetails", - "description": "The contentDetails object contains information about the content associated with the activity. For example, if the snippet.type value is videoRated, then the contentDetails object's content identifies the rated video." - }, - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "id": { - "type": "string", - "description": "The ID that YouTube uses to uniquely identify the activity." - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#activity\".", - "default": "youtube#activity" - }, - "snippet": { - "$ref": "ActivitySnippet", - "description": "The snippet object contains basic details about the activity, including the activity's type and group ID." - } - } - }, - "ActivityContentDetails": { - "id": "ActivityContentDetails", - "type": "object", - "description": "Details about the content of an activity: the video that was shared, the channel that was subscribed to, etc.", - "properties": { - "bulletin": { - "$ref": "ActivityContentDetailsBulletin", - "description": "The bulletin object contains details about a channel bulletin post. This object is only present if the snippet.type is bulletin." - }, - "channelItem": { - "$ref": "ActivityContentDetailsChannelItem", - "description": "The channelItem object contains details about a resource which was added to a channel. This property is only present if the snippet.type is channelItem." - }, - "comment": { - "$ref": "ActivityContentDetailsComment", - "description": "The comment object contains information about a resource that received a comment. This property is only present if the snippet.type is comment." - }, - "favorite": { - "$ref": "ActivityContentDetailsFavorite", - "description": "The favorite object contains information about a video that was marked as a favorite video. This property is only present if the snippet.type is favorite." - }, - "like": { - "$ref": "ActivityContentDetailsLike", - "description": "The like object contains information about a resource that received a positive (like) rating. This property is only present if the snippet.type is like." - }, - "playlistItem": { - "$ref": "ActivityContentDetailsPlaylistItem", - "description": "The playlistItem object contains information about a new playlist item. This property is only present if the snippet.type is playlistItem." - }, - "promotedItem": { - "$ref": "ActivityContentDetailsPromotedItem", - "description": "The promotedItem object contains details about a resource which is being promoted. This property is only present if the snippet.type is promotedItem." - }, - "recommendation": { - "$ref": "ActivityContentDetailsRecommendation", - "description": "The recommendation object contains information about a recommended resource. This property is only present if the snippet.type is recommendation." - }, - "social": { - "$ref": "ActivityContentDetailsSocial", - "description": "The social object contains details about a social network post. This property is only present if the snippet.type is social." - }, - "subscription": { - "$ref": "ActivityContentDetailsSubscription", - "description": "The subscription object contains information about a channel that a user subscribed to. This property is only present if the snippet.type is subscription." - }, - "upload": { - "$ref": "ActivityContentDetailsUpload", - "description": "The upload object contains information about the uploaded video. This property is only present if the snippet.type is upload." - } - } - }, - "ActivityContentDetailsBulletin": { - "id": "ActivityContentDetailsBulletin", - "type": "object", - "description": "Details about a channel bulletin post.", - "properties": { - "resourceId": { - "$ref": "ResourceId", - "description": "The resourceId object contains information that identifies the resource associated with a bulletin post." - } - } - }, - "ActivityContentDetailsChannelItem": { - "id": "ActivityContentDetailsChannelItem", - "type": "object", - "description": "Details about a resource which was added to a channel.", - "properties": { - "resourceId": { - "$ref": "ResourceId", - "description": "The resourceId object contains information that identifies the resource that was added to the channel." - } - } - }, - "ActivityContentDetailsComment": { - "id": "ActivityContentDetailsComment", - "type": "object", - "description": "Information about a resource that received a comment.", - "properties": { - "resourceId": { - "$ref": "ResourceId", - "description": "The resourceId object contains information that identifies the resource associated with the comment." - } - } - }, - "ActivityContentDetailsFavorite": { - "id": "ActivityContentDetailsFavorite", - "type": "object", - "description": "Information about a video that was marked as a favorite video.", - "properties": { - "resourceId": { - "$ref": "ResourceId", - "description": "The resourceId object contains information that identifies the resource that was marked as a favorite." - } - } - }, - "ActivityContentDetailsLike": { - "id": "ActivityContentDetailsLike", - "type": "object", - "description": "Information about a resource that received a positive (like) rating.", - "properties": { - "resourceId": { - "$ref": "ResourceId", - "description": "The resourceId object contains information that identifies the rated resource." - } - } - }, - "ActivityContentDetailsPlaylistItem": { - "id": "ActivityContentDetailsPlaylistItem", - "type": "object", - "description": "Information about a new playlist item.", - "properties": { - "playlistId": { - "type": "string", - "description": "The value that YouTube uses to uniquely identify the playlist." - }, - "playlistItemId": { - "type": "string", - "description": "ID of the item within the playlist." - }, - "resourceId": { - "$ref": "ResourceId", - "description": "The resourceId object contains information about the resource that was added to the playlist." - } - } - }, - "ActivityContentDetailsPromotedItem": { - "id": "ActivityContentDetailsPromotedItem", - "type": "object", - "description": "Details about a resource which is being promoted.", - "properties": { - "adTag": { - "type": "string", - "description": "The URL the client should fetch to request a promoted item." - }, - "clickTrackingUrl": { - "type": "string", - "description": "The URL the client should ping to indicate that the user clicked through on this promoted item." - }, - "creativeViewUrl": { - "type": "string", - "description": "The URL the client should ping to indicate that the user was shown this promoted item." - }, - "ctaType": { - "type": "string", - "description": "The type of call-to-action, a message to the user indicating action that can be taken.", - "enum": [ - "unspecified", - "visitAdvertiserSite" - ], - "enumDescriptions": [ - "", - "" - ] - }, - "customCtaButtonText": { - "type": "string", - "description": "The custom call-to-action button text. If specified, it will override the default button text for the cta_type." - }, - "descriptionText": { - "type": "string", - "description": "The text description to accompany the promoted item." - }, - "destinationUrl": { - "type": "string", - "description": "The URL the client should direct the user to, if the user chooses to visit the advertiser's website." - }, - "forecastingUrl": { - "type": "array", - "description": "The list of forecasting URLs. The client should ping all of these URLs when a promoted item is not available, to indicate that a promoted item could have been shown.", - "items": { - "type": "string" - } - }, - "impressionUrl": { - "type": "array", - "description": "The list of impression URLs. The client should ping all of these URLs to indicate that the user was shown this promoted item.", - "items": { - "type": "string" - } - }, - "videoId": { - "type": "string", - "description": "The ID that YouTube uses to uniquely identify the promoted video." - } - } - }, - "ActivityContentDetailsRecommendation": { - "id": "ActivityContentDetailsRecommendation", - "type": "object", - "description": "Information that identifies the recommended resource.", - "properties": { - "reason": { - "type": "string", - "description": "The reason that the resource is recommended to the user.", - "enum": [ - "unspecified", - "videoFavorited", - "videoLiked", - "videoWatched" - ], - "enumDescriptions": [ - "", - "", - "", - "" - ] - }, - "resourceId": { - "$ref": "ResourceId", - "description": "The resourceId object contains information that identifies the recommended resource." - }, - "seedResourceId": { - "$ref": "ResourceId", - "description": "The seedResourceId object contains information about the resource that caused the recommendation." - } - } - }, - "ActivityContentDetailsSocial": { - "id": "ActivityContentDetailsSocial", - "type": "object", - "description": "Details about a social network post.", - "properties": { - "author": { - "type": "string", - "description": "The author of the social network post." - }, - "imageUrl": { - "type": "string", - "description": "An image of the post's author." - }, - "referenceUrl": { - "type": "string", - "description": "The URL of the social network post." - }, - "resourceId": { - "$ref": "ResourceId", - "description": "The resourceId object encapsulates information that identifies the resource associated with a social network post." - }, - "type": { - "type": "string", - "description": "The name of the social network.", - "enum": [ - "facebook", - "googlePlus", - "twitter", - "unspecified" - ], - "enumDescriptions": [ - "", - "", - "", - "" - ] - } - } - }, - "ActivityContentDetailsSubscription": { - "id": "ActivityContentDetailsSubscription", - "type": "object", - "description": "Information about a channel that a user subscribed to.", - "properties": { - "resourceId": { - "$ref": "ResourceId", - "description": "The resourceId object contains information that identifies the resource that the user subscribed to." - } - } - }, - "ActivityContentDetailsUpload": { - "id": "ActivityContentDetailsUpload", - "type": "object", - "description": "Information about the uploaded video.", - "properties": { - "videoId": { - "type": "string", - "description": "The ID that YouTube uses to uniquely identify the uploaded video." - } - } - }, - "ActivityListResponse": { - "id": "ActivityListResponse", - "type": "object", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "eventId": { - "type": "string", - "description": "Serialized EventId of the request which produced this response." - }, - "items": { - "type": "array", - "description": "A list of activities, or events, that match the request criteria.", - "items": { - "$ref": "Activity" - } - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#activityListResponse\".", - "default": "youtube#activityListResponse" - }, - "nextPageToken": { - "type": "string", - "description": "The token that can be used as the value of the pageToken parameter to retrieve the next page in the result set." - }, - "pageInfo": { - "$ref": "PageInfo" - }, - "prevPageToken": { - "type": "string", - "description": "The token that can be used as the value of the pageToken parameter to retrieve the previous page in the result set." - }, - "tokenPagination": { - "$ref": "TokenPagination" - }, - "visitorId": { - "type": "string", - "description": "The visitorId identifies the visitor." - } - } - }, - "ActivitySnippet": { - "id": "ActivitySnippet", - "type": "object", - "description": "Basic details about an activity, including title, description, thumbnails, activity type and group.", - "properties": { - "channelId": { - "type": "string", - "description": "The ID that YouTube uses to uniquely identify the channel associated with the activity." - }, - "channelTitle": { - "type": "string", - "description": "Channel title for the channel responsible for this activity" - }, - "description": { - "type": "string", - "description": "The description of the resource primarily associated with the activity.", - "annotations": { - "required": [ - "youtube.activities.insert" - ] - } - }, - "groupId": { - "type": "string", - "description": "The group ID associated with the activity. A group ID identifies user events that are associated with the same user and resource. For example, if a user rates a video and marks the same video as a favorite, the entries for those events would have the same group ID in the user's activity feed. In your user interface, you can avoid repetition by grouping events with the same groupId value." - }, - "publishedAt": { - "type": "string", - "description": "The date and time that the video was uploaded. The value is specified in ISO 8601 (YYYY-MM-DDThh:mm:ss.sZ) format.", - "format": "date-time" - }, - "thumbnails": { - "$ref": "ThumbnailDetails", - "description": "A map of thumbnail images associated with the resource that is primarily associated with the activity. For each object in the map, the key is the name of the thumbnail image, and the value is an object that contains other information about the thumbnail." - }, - "title": { - "type": "string", - "description": "The title of the resource primarily associated with the activity." - }, - "type": { - "type": "string", - "description": "The type of activity that the resource describes.", - "enum": [ - "bulletin", - "channelItem", - "comment", - "favorite", - "like", - "playlistItem", - "promotedItem", - "recommendation", - "social", - "subscription", - "upload" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "" - ] - } - } - }, - "Caption": { - "id": "Caption", - "type": "object", - "description": "A caption resource represents a YouTube caption track. A caption track is associated with exactly one YouTube video.", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "id": { - "type": "string", - "description": "The ID that YouTube uses to uniquely identify the caption track.", - "annotations": { - "required": [ - "youtube.captions.update" - ] - } - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#caption\".", - "default": "youtube#caption" - }, - "snippet": { - "$ref": "CaptionSnippet", - "description": "The snippet object contains basic details about the caption." - } - } - }, - "CaptionListResponse": { - "id": "CaptionListResponse", - "type": "object", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "eventId": { - "type": "string", - "description": "Serialized EventId of the request which produced this response." - }, - "items": { - "type": "array", - "description": "A list of captions that match the request criteria.", - "items": { - "$ref": "Caption" - } - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#captionListResponse\".", - "default": "youtube#captionListResponse" - }, - "visitorId": { - "type": "string", - "description": "The visitorId identifies the visitor." - } - } - }, - "CaptionSnippet": { - "id": "CaptionSnippet", - "type": "object", - "description": "Basic details about a caption track, such as its language and name.", - "properties": { - "audioTrackType": { - "type": "string", - "description": "The type of audio track associated with the caption track.", - "enum": [ - "commentary", - "descriptive", - "primary", - "unknown" - ], - "enumDescriptions": [ - "", - "", - "", - "" - ] - }, - "failureReason": { - "type": "string", - "description": "The reason that YouTube failed to process the caption track. This property is only present if the state property's value is failed.", - "enum": [ - "processingFailed", - "unknownFormat", - "unsupportedFormat" - ], - "enumDescriptions": [ - "", - "", - "" - ] - }, - "isAutoSynced": { - "type": "boolean", - "description": "Indicates whether YouTube synchronized the caption track to the audio track in the video. The value will be true if a sync was explicitly requested when the caption track was uploaded. For example, when calling the captions.insert or captions.update methods, you can set the sync parameter to true to instruct YouTube to sync the uploaded track to the video. If the value is false, YouTube uses the time codes in the uploaded caption track to determine when to display captions." - }, - "isCC": { - "type": "boolean", - "description": "Indicates whether the track contains closed captions for the deaf and hard of hearing. The default value is false." - }, - "isDraft": { - "type": "boolean", - "description": "Indicates whether the caption track is a draft. If the value is true, then the track is not publicly visible. The default value is false." - }, - "isEasyReader": { - "type": "boolean", - "description": "Indicates whether caption track is formatted for \"easy reader,\" meaning it is at a third-grade level for language learners. The default value is false." - }, - "isLarge": { - "type": "boolean", - "description": "Indicates whether the caption track uses large text for the vision-impaired. The default value is false." - }, - "language": { - "type": "string", - "description": "The language of the caption track. The property value is a BCP-47 language tag.", - "annotations": { - "required": [ - "youtube.captions.insert" - ] - } - }, - "lastUpdated": { - "type": "string", - "description": "The date and time when the caption track was last updated. The value is specified in ISO 8601 (YYYY-MM-DDThh:mm:ss.sZ) format.", - "format": "date-time" - }, - "name": { - "type": "string", - "description": "The name of the caption track. The name is intended to be visible to the user as an option during playback.", - "annotations": { - "required": [ - "youtube.captions.insert" - ] - } - }, - "status": { - "type": "string", - "description": "The caption track's status.", - "enum": [ - "failed", - "serving", - "syncing" - ], - "enumDescriptions": [ - "", - "", - "" - ] - }, - "trackKind": { - "type": "string", - "description": "The caption track's type.", - "enum": [ - "ASR", - "forced", - "standard" - ], - "enumDescriptions": [ - "", - "", - "" - ] - }, - "videoId": { - "type": "string", - "description": "The ID that YouTube uses to uniquely identify the video associated with the caption track.", - "annotations": { - "required": [ - "youtube.captions.insert" - ] - } - } - } - }, - "CdnSettings": { - "id": "CdnSettings", - "type": "object", - "description": "Brief description of the live stream cdn settings.", - "properties": { - "format": { - "type": "string", - "description": "The format of the video stream that you are sending to Youtube.", - "annotations": { - "required": [ - "youtube.liveStreams.insert", - "youtube.liveStreams.update" - ] - } - }, - "frameRate": { - "type": "string", - "description": "The frame rate of the inbound video data.", - "enum": [ - "30fps", - "60fps" - ], - "enumDescriptions": [ - "", - "" - ] - }, - "ingestionInfo": { - "$ref": "IngestionInfo", - "description": "The ingestionInfo object contains information that YouTube provides that you need to transmit your RTMP or HTTP stream to YouTube." - }, - "ingestionType": { - "type": "string", - "description": "The method or protocol used to transmit the video stream.", - "enum": [ - "dash", - "rtmp" - ], - "enumDescriptions": [ - "", - "" - ], - "annotations": { - "required": [ - "youtube.liveStreams.insert", - "youtube.liveStreams.update" - ] - } - }, - "resolution": { - "type": "string", - "description": "The resolution of the inbound video data.", - "enum": [ - "1080p", - "1440p", - "2160p", - "240p", - "360p", - "480p", - "720p" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "" - ] - } - } - }, - "Channel": { - "id": "Channel", - "type": "object", - "description": "A channel resource contains information about a YouTube channel.", - "properties": { - "auditDetails": { - "$ref": "ChannelAuditDetails", - "description": "The auditionDetails object encapsulates channel data that is relevant for YouTube Partners during the audition process." - }, - "brandingSettings": { - "$ref": "ChannelBrandingSettings", - "description": "The brandingSettings object encapsulates information about the branding of the channel." - }, - "contentDetails": { - "$ref": "ChannelContentDetails", - "description": "The contentDetails object encapsulates information about the channel's content." - }, - "contentOwnerDetails": { - "$ref": "ChannelContentOwnerDetails", - "description": "The contentOwnerDetails object encapsulates channel data that is relevant for YouTube Partners linked with the channel." - }, - "conversionPings": { - "$ref": "ChannelConversionPings", - "description": "The conversionPings object encapsulates information about conversion pings that need to be respected by the channel." - }, - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "id": { - "type": "string", - "description": "The ID that YouTube uses to uniquely identify the channel." - }, - "invideoPromotion": { - "$ref": "InvideoPromotion", - "description": "The invideoPromotion object encapsulates information about promotion campaign associated with the channel." - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#channel\".", - "default": "youtube#channel" - }, - "localizations": { - "type": "object", - "description": "Localizations for different languages", - "additionalProperties": { - "$ref": "ChannelLocalization", - "description": "The language tag, using string since map_key require simple types." - } - }, - "snippet": { - "$ref": "ChannelSnippet", - "description": "The snippet object contains basic details about the channel, such as its title, description, and thumbnail images." - }, - "statistics": { - "$ref": "ChannelStatistics", - "description": "The statistics object encapsulates statistics for the channel." - }, - "status": { - "$ref": "ChannelStatus", - "description": "The status object encapsulates information about the privacy status of the channel." - }, - "topicDetails": { - "$ref": "ChannelTopicDetails", - "description": "The topicDetails object encapsulates information about Freebase topics associated with the channel." - } - } - }, - "ChannelAuditDetails": { - "id": "ChannelAuditDetails", - "type": "object", - "description": "The auditDetails object encapsulates channel data that is relevant for YouTube Partners during the audit process.", - "properties": { - "communityGuidelinesGoodStanding": { - "type": "boolean", - "description": "Whether or not the channel respects the community guidelines." - }, - "contentIdClaimsGoodStanding": { - "type": "boolean", - "description": "Whether or not the channel has any unresolved claims." - }, - "copyrightStrikesGoodStanding": { - "type": "boolean", - "description": "Whether or not the channel has any copyright strikes." - }, - "overallGoodStanding": { - "type": "boolean", - "description": "Describes the general state of the channel. This field will always show if there are any issues whatsoever with the channel. Currently this field represents the result of the logical and operation over the community guidelines good standing, the copyright strikes good standing and the content ID claims good standing, but this may change in the future." - } - } - }, - "ChannelBannerResource": { - "id": "ChannelBannerResource", - "type": "object", - "description": "A channel banner returned as the response to a channel_banner.insert call.", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#channelBannerResource\".", - "default": "youtube#channelBannerResource" - }, - "url": { - "type": "string", - "description": "The URL of this banner image." - } - } - }, - "ChannelBrandingSettings": { - "id": "ChannelBrandingSettings", - "type": "object", - "description": "Branding properties of a YouTube channel.", - "properties": { - "channel": { - "$ref": "ChannelSettings", - "description": "Branding properties for the channel view." - }, - "hints": { - "type": "array", - "description": "Additional experimental branding properties.", - "items": { - "$ref": "PropertyValue" - } - }, - "image": { - "$ref": "ImageSettings", - "description": "Branding properties for branding images." - }, - "watch": { - "$ref": "WatchSettings", - "description": "Branding properties for the watch page." - } - } - }, - "ChannelContentDetails": { - "id": "ChannelContentDetails", - "type": "object", - "description": "Details about the content of a channel.", - "properties": { - "relatedPlaylists": { - "type": "object", - "properties": { - "favorites": { - "type": "string", - "description": "The ID of the playlist that contains the channel\"s favorite videos. Use the playlistItems.insert and playlistItems.delete to add or remove items from that list." - }, - "likes": { - "type": "string", - "description": "The ID of the playlist that contains the channel\"s liked videos. Use the playlistItems.insert and playlistItems.delete to add or remove items from that list." - }, - "uploads": { - "type": "string", - "description": "The ID of the playlist that contains the channel\"s uploaded videos. Use the videos.insert method to upload new videos and the videos.delete method to delete previously uploaded videos." - }, - "watchHistory": { - "type": "string", - "description": "The ID of the playlist that contains the channel\"s watch history. Use the playlistItems.insert and playlistItems.delete to add or remove items from that list." - }, - "watchLater": { - "type": "string", - "description": "The ID of the playlist that contains the channel\"s watch later playlist. Use the playlistItems.insert and playlistItems.delete to add or remove items from that list." - } - } - } - } - }, - "ChannelContentOwnerDetails": { - "id": "ChannelContentOwnerDetails", - "type": "object", - "description": "The contentOwnerDetails object encapsulates channel data that is relevant for YouTube Partners linked with the channel.", - "properties": { - "contentOwner": { - "type": "string", - "description": "The ID of the content owner linked to the channel." - }, - "timeLinked": { - "type": "string", - "description": "The date and time of when the channel was linked to the content owner. The value is specified in ISO 8601 (YYYY-MM-DDThh:mm:ss.sZ) format.", - "format": "date-time" - } - } - }, - "ChannelConversionPing": { - "id": "ChannelConversionPing", - "type": "object", - "description": "Pings that the app shall fire (authenticated by biscotti cookie). Each ping has a context, in which the app must fire the ping, and a url identifying the ping.", - "properties": { - "context": { - "type": "string", - "description": "Defines the context of the ping.", - "enum": [ - "cview", - "subscribe", - "unsubscribe" - ], - "enumDescriptions": [ - "", - "", - "" - ] - }, - "conversionUrl": { - "type": "string", - "description": "The url (without the schema) that the player shall send the ping to. It's at caller's descretion to decide which schema to use (http vs https) Example of a returned url: //googleads.g.doubleclick.net/pagead/ viewthroughconversion/962985656/?data=path%3DtHe_path%3Btype%3D cview%3Butuid%3DGISQtTNGYqaYl4sKxoVvKA&labe=default The caller must append biscotti authentication (ms param in case of mobile, for example) to this ping." - } - } - }, - "ChannelConversionPings": { - "id": "ChannelConversionPings", - "type": "object", - "description": "The conversionPings object encapsulates information about conversion pings that need to be respected by the channel.", - "properties": { - "pings": { - "type": "array", - "description": "Pings that the app shall fire (authenticated by biscotti cookie). Each ping has a context, in which the app must fire the ping, and a url identifying the ping.", - "items": { - "$ref": "ChannelConversionPing" - } - } - } - }, - "ChannelListResponse": { - "id": "ChannelListResponse", - "type": "object", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "eventId": { - "type": "string", - "description": "Serialized EventId of the request which produced this response." - }, - "items": { - "type": "array", - "description": "A list of channels that match the request criteria.", - "items": { - "$ref": "Channel" - } - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#channelListResponse\".", - "default": "youtube#channelListResponse" - }, - "nextPageToken": { - "type": "string", - "description": "The token that can be used as the value of the pageToken parameter to retrieve the next page in the result set." - }, - "pageInfo": { - "$ref": "PageInfo" - }, - "prevPageToken": { - "type": "string", - "description": "The token that can be used as the value of the pageToken parameter to retrieve the previous page in the result set." - }, - "tokenPagination": { - "$ref": "TokenPagination" - }, - "visitorId": { - "type": "string", - "description": "The visitorId identifies the visitor." - } - } - }, - "ChannelLocalization": { - "id": "ChannelLocalization", - "type": "object", - "description": "Channel localization setting", - "properties": { - "description": { - "type": "string", - "description": "The localized strings for channel's description." - }, - "title": { - "type": "string", - "description": "The localized strings for channel's title." - } - } - }, - "ChannelProfileDetails": { - "id": "ChannelProfileDetails", - "type": "object", - "properties": { - "channelId": { - "type": "string", - "description": "The YouTube channel ID." - }, - "channelUrl": { - "type": "string", - "description": "The channel's URL." - }, - "displayName": { - "type": "string", - "description": "The channel's display name." - }, - "profileImageUrl": { - "type": "string", - "description": "The channels's avatar URL." - } - } - }, - "ChannelSection": { - "id": "ChannelSection", - "type": "object", - "properties": { - "contentDetails": { - "$ref": "ChannelSectionContentDetails", - "description": "The contentDetails object contains details about the channel section content, such as a list of playlists or channels featured in the section." - }, - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "id": { - "type": "string", - "description": "The ID that YouTube uses to uniquely identify the channel section." - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#channelSection\".", - "default": "youtube#channelSection" - }, - "localizations": { - "type": "object", - "description": "Localizations for different languages", - "additionalProperties": { - "$ref": "ChannelSectionLocalization", - "description": "The language tag, using string since map_key require simple types." - } - }, - "snippet": { - "$ref": "ChannelSectionSnippet", - "description": "The snippet object contains basic details about the channel section, such as its type, style and title." - }, - "targeting": { - "$ref": "ChannelSectionTargeting", - "description": "The targeting object contains basic targeting settings about the channel section." - } - } - }, - "ChannelSectionContentDetails": { - "id": "ChannelSectionContentDetails", - "type": "object", - "description": "Details about a channelsection, including playlists and channels.", - "properties": { - "channels": { - "type": "array", - "description": "The channel ids for type multiple_channels.", - "items": { - "type": "string" - } - }, - "playlists": { - "type": "array", - "description": "The playlist ids for type single_playlist and multiple_playlists. For singlePlaylist, only one playlistId is allowed.", - "items": { - "type": "string" - } - } - } - }, - "ChannelSectionListResponse": { - "id": "ChannelSectionListResponse", - "type": "object", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "eventId": { - "type": "string", - "description": "Serialized EventId of the request which produced this response." - }, - "items": { - "type": "array", - "description": "A list of ChannelSections that match the request criteria.", - "items": { - "$ref": "ChannelSection" - } - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#channelSectionListResponse\".", - "default": "youtube#channelSectionListResponse" - }, - "visitorId": { - "type": "string", - "description": "The visitorId identifies the visitor." - } - } - }, - "ChannelSectionLocalization": { - "id": "ChannelSectionLocalization", - "type": "object", - "description": "ChannelSection localization setting", - "properties": { - "title": { - "type": "string", - "description": "The localized strings for channel section's title." - } - } - }, - "ChannelSectionSnippet": { - "id": "ChannelSectionSnippet", - "type": "object", - "description": "Basic details about a channel section, including title, style and position.", - "properties": { - "channelId": { - "type": "string", - "description": "The ID that YouTube uses to uniquely identify the channel that published the channel section." - }, - "defaultLanguage": { - "type": "string", - "description": "The language of the channel section's default title and description." - }, - "localized": { - "$ref": "ChannelSectionLocalization", - "description": "Localized title, read-only." - }, - "position": { - "type": "integer", - "description": "The position of the channel section in the channel.", - "format": "uint32" - }, - "style": { - "type": "string", - "description": "The style of the channel section.", - "enum": [ - "channelsectionStyleUndefined", - "horizontalRow", - "verticalList" - ], - "enumDescriptions": [ - "", - "", - "" - ] - }, - "title": { - "type": "string", - "description": "The channel section's title for multiple_playlists and multiple_channels." - }, - "type": { - "type": "string", - "description": "The type of the channel section.", - "enum": [ - "allPlaylists", - "channelsectionTypeUndefined", - "completedEvents", - "likedPlaylists", - "likes", - "liveEvents", - "multipleChannels", - "multiplePlaylists", - "popularUploads", - "postedPlaylists", - "postedVideos", - "recentActivity", - "recentPosts", - "recentUploads", - "singlePlaylist", - "subscriptions", - "upcomingEvents" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "" - ] - } - } - }, - "ChannelSectionTargeting": { - "id": "ChannelSectionTargeting", - "type": "object", - "description": "ChannelSection targeting setting.", - "properties": { - "countries": { - "type": "array", - "description": "The country the channel section is targeting.", - "items": { - "type": "string" - } - }, - "languages": { - "type": "array", - "description": "The language the channel section is targeting.", - "items": { - "type": "string" - } - }, - "regions": { - "type": "array", - "description": "The region the channel section is targeting.", - "items": { - "type": "string" - } - } - } - }, - "ChannelSettings": { - "id": "ChannelSettings", - "type": "object", - "description": "Branding properties for the channel view.", - "properties": { - "country": { - "type": "string", - "description": "The country of the channel." - }, - "defaultLanguage": { - "type": "string" - }, - "defaultTab": { - "type": "string", - "description": "Which content tab users should see when viewing the channel." - }, - "description": { - "type": "string", - "description": "Specifies the channel description." - }, - "featuredChannelsTitle": { - "type": "string", - "description": "Title for the featured channels tab." - }, - "featuredChannelsUrls": { - "type": "array", - "description": "The list of featured channels.", - "items": { - "type": "string" - } - }, - "keywords": { - "type": "string", - "description": "Lists keywords associated with the channel, comma-separated." - }, - "moderateComments": { - "type": "boolean", - "description": "Whether user-submitted comments left on the channel page need to be approved by the channel owner to be publicly visible." - }, - "profileColor": { - "type": "string", - "description": "A prominent color that can be rendered on this channel page." - }, - "showBrowseView": { - "type": "boolean", - "description": "Whether the tab to browse the videos should be displayed." - }, - "showRelatedChannels": { - "type": "boolean", - "description": "Whether related channels should be proposed." - }, - "title": { - "type": "string", - "description": "Specifies the channel title." - }, - "trackingAnalyticsAccountId": { - "type": "string", - "description": "The ID for a Google Analytics account to track and measure traffic to the channels." - }, - "unsubscribedTrailer": { - "type": "string", - "description": "The trailer of the channel, for users that are not subscribers." - } - } - }, - "ChannelSnippet": { - "id": "ChannelSnippet", - "type": "object", - "description": "Basic details about a channel, including title, description and thumbnails. Next available id: 15.", - "properties": { - "country": { - "type": "string", - "description": "The country of the channel." - }, - "customUrl": { - "type": "string", - "description": "The custom url of the channel." - }, - "defaultLanguage": { - "type": "string", - "description": "The language of the channel's default title and description." - }, - "description": { - "type": "string", - "description": "The description of the channel." - }, - "localized": { - "$ref": "ChannelLocalization", - "description": "Localized title and description, read-only." - }, - "publishedAt": { - "type": "string", - "description": "The date and time that the channel was created. The value is specified in ISO 8601 (YYYY-MM-DDThh:mm:ss.sZ) format.", - "format": "date-time" - }, - "thumbnails": { - "$ref": "ThumbnailDetails", - "description": "A map of thumbnail images associated with the channel. For each object in the map, the key is the name of the thumbnail image, and the value is an object that contains other information about the thumbnail." - }, - "title": { - "type": "string", - "description": "The channel's title." - } - } - }, - "ChannelStatistics": { - "id": "ChannelStatistics", - "type": "object", - "description": "Statistics about a channel: number of subscribers, number of videos in the channel, etc.", - "properties": { - "commentCount": { - "type": "string", - "description": "The number of comments for the channel.", - "format": "uint64" - }, - "hiddenSubscriberCount": { - "type": "boolean", - "description": "Whether or not the number of subscribers is shown for this user." - }, - "subscriberCount": { - "type": "string", - "description": "The number of subscribers that the channel has.", - "format": "uint64" - }, - "videoCount": { - "type": "string", - "description": "The number of videos uploaded to the channel.", - "format": "uint64" - }, - "viewCount": { - "type": "string", - "description": "The number of times the channel has been viewed.", - "format": "uint64" - } - } - }, - "ChannelStatus": { - "id": "ChannelStatus", - "type": "object", - "description": "JSON template for the status part of a channel.", - "properties": { - "isLinked": { - "type": "boolean", - "description": "If true, then the user is linked to either a YouTube username or G+ account. Otherwise, the user doesn't have a public YouTube identity." - }, - "longUploadsStatus": { - "type": "string", - "description": "The long uploads status of this channel. See", - "enum": [ - "allowed", - "disallowed", - "eligible", - "longUploadsUnspecified" - ], - "enumDescriptions": [ - "", - "", - "", - "" - ] - }, - "privacyStatus": { - "type": "string", - "description": "Privacy status of the channel.", - "enum": [ - "private", - "public", - "unlisted" - ], - "enumDescriptions": [ - "", - "", - "" - ] - } - } - }, - "ChannelTopicDetails": { - "id": "ChannelTopicDetails", - "type": "object", - "description": "Freebase topic information related to the channel.", - "properties": { - "topicCategories": { - "type": "array", - "description": "A list of Wikipedia URLs that describe the channel's content.", - "items": { - "type": "string" - } - }, - "topicIds": { - "type": "array", - "description": "A list of Freebase topic IDs associated with the channel. You can retrieve information about each topic using the Freebase Topic API.", - "items": { - "type": "string" - } - } - } - }, - "Comment": { - "id": "Comment", - "type": "object", - "description": "A comment represents a single YouTube comment.", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "id": { - "type": "string", - "description": "The ID that YouTube uses to uniquely identify the comment." - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#comment\".", - "default": "youtube#comment" - }, - "snippet": { - "$ref": "CommentSnippet", - "description": "The snippet object contains basic details about the comment." - } - } - }, - "CommentListResponse": { - "id": "CommentListResponse", - "type": "object", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "eventId": { - "type": "string", - "description": "Serialized EventId of the request which produced this response." - }, - "items": { - "type": "array", - "description": "A list of comments that match the request criteria.", - "items": { - "$ref": "Comment" - } - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#commentListResponse\".", - "default": "youtube#commentListResponse" - }, - "nextPageToken": { - "type": "string", - "description": "The token that can be used as the value of the pageToken parameter to retrieve the next page in the result set." - }, - "pageInfo": { - "$ref": "PageInfo" - }, - "tokenPagination": { - "$ref": "TokenPagination" - }, - "visitorId": { - "type": "string", - "description": "The visitorId identifies the visitor." - } - } - }, - "CommentSnippet": { - "id": "CommentSnippet", - "type": "object", - "description": "Basic details about a comment, such as its author and text.", - "properties": { - "authorChannelId": { - "type": "any", - "description": "The id of the author's YouTube channel, if any." - }, - "authorChannelUrl": { - "type": "string", - "description": "Link to the author's YouTube channel, if any." - }, - "authorDisplayName": { - "type": "string", - "description": "The name of the user who posted the comment." - }, - "authorProfileImageUrl": { - "type": "string", - "description": "The URL for the avatar of the user who posted the comment." - }, - "canRate": { - "type": "boolean", - "description": "Whether the current viewer can rate this comment." - }, - "channelId": { - "type": "string", - "description": "The id of the corresponding YouTube channel. In case of a channel comment this is the channel the comment refers to. In case of a video comment it's the video's channel." - }, - "likeCount": { - "type": "integer", - "description": "The total number of likes this comment has received.", - "format": "uint32" - }, - "moderationStatus": { - "type": "string", - "description": "The comment's moderation status. Will not be set if the comments were requested through the id filter.", - "enum": [ - "heldForReview", - "likelySpam", - "published", - "rejected" - ], - "enumDescriptions": [ - "", - "", - "", - "" - ] - }, - "parentId": { - "type": "string", - "description": "The unique id of the parent comment, only set for replies." - }, - "publishedAt": { - "type": "string", - "description": "The date and time when the comment was orignally published. The value is specified in ISO 8601 (YYYY-MM-DDThh:mm:ss.sZ) format.", - "format": "date-time" - }, - "textDisplay": { - "type": "string", - "description": "The comment's text. The format is either plain text or HTML dependent on what has been requested. Even the plain text representation may differ from the text originally posted in that it may replace video links with video titles etc." - }, - "textOriginal": { - "type": "string", - "description": "The comment's original raw text as initially posted or last updated. The original text will only be returned if it is accessible to the viewer, which is only guaranteed if the viewer is the comment's author." - }, - "updatedAt": { - "type": "string", - "description": "The date and time when was last updated . The value is specified in ISO 8601 (YYYY-MM-DDThh:mm:ss.sZ) format.", - "format": "date-time" - }, - "videoId": { - "type": "string", - "description": "The ID of the video the comment refers to, if any." - }, - "viewerRating": { - "type": "string", - "description": "The rating the viewer has given to this comment. For the time being this will never return RATE_TYPE_DISLIKE and instead return RATE_TYPE_NONE. This may change in the future.", - "enum": [ - "dislike", - "like", - "none", - "unspecified" - ], - "enumDescriptions": [ - "", - "", - "", - "" - ] - } - } - }, - "CommentThread": { - "id": "CommentThread", - "type": "object", - "description": "A comment thread represents information that applies to a top level comment and all its replies. It can also include the top level comment itself and some of the replies.", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "id": { - "type": "string", - "description": "The ID that YouTube uses to uniquely identify the comment thread." - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#commentThread\".", - "default": "youtube#commentThread" - }, - "replies": { - "$ref": "CommentThreadReplies", - "description": "The replies object contains a limited number of replies (if any) to the top level comment found in the snippet." - }, - "snippet": { - "$ref": "CommentThreadSnippet", - "description": "The snippet object contains basic details about the comment thread and also the top level comment." - } - } - }, - "CommentThreadListResponse": { - "id": "CommentThreadListResponse", - "type": "object", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "eventId": { - "type": "string", - "description": "Serialized EventId of the request which produced this response." - }, - "items": { - "type": "array", - "description": "A list of comment threads that match the request criteria.", - "items": { - "$ref": "CommentThread" - } - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#commentThreadListResponse\".", - "default": "youtube#commentThreadListResponse" - }, - "nextPageToken": { - "type": "string", - "description": "The token that can be used as the value of the pageToken parameter to retrieve the next page in the result set." - }, - "pageInfo": { - "$ref": "PageInfo" - }, - "tokenPagination": { - "$ref": "TokenPagination" - }, - "visitorId": { - "type": "string", - "description": "The visitorId identifies the visitor." - } - } - }, - "CommentThreadReplies": { - "id": "CommentThreadReplies", - "type": "object", - "description": "Comments written in (direct or indirect) reply to the top level comment.", - "properties": { - "comments": { - "type": "array", - "description": "A limited number of replies. Unless the number of replies returned equals total_reply_count in the snippet the returned replies are only a subset of the total number of replies.", - "items": { - "$ref": "Comment" - } - } - } - }, - "CommentThreadSnippet": { - "id": "CommentThreadSnippet", - "type": "object", - "description": "Basic details about a comment thread.", - "properties": { - "canReply": { - "type": "boolean", - "description": "Whether the current viewer of the thread can reply to it. This is viewer specific - other viewers may see a different value for this field." - }, - "channelId": { - "type": "string", - "description": "The YouTube channel the comments in the thread refer to or the channel with the video the comments refer to. If video_id isn't set the comments refer to the channel itself." - }, - "isPublic": { - "type": "boolean", - "description": "Whether the thread (and therefore all its comments) is visible to all YouTube users." - }, - "topLevelComment": { - "$ref": "Comment", - "description": "The top level comment of this thread." - }, - "totalReplyCount": { - "type": "integer", - "description": "The total number of replies (not including the top level comment).", - "format": "uint32" - }, - "videoId": { - "type": "string", - "description": "The ID of the video the comments refer to, if any. No video_id implies a channel discussion comment." - } - } - }, - "ContentRating": { - "id": "ContentRating", - "type": "object", - "description": "Ratings schemes. The country-specific ratings are mostly for movies and shows. NEXT_ID: 69", - "properties": { - "acbRating": { - "type": "string", - "description": "The video's Australian Classification Board (ACB) or Australian Communications and Media Authority (ACMA) rating. ACMA ratings are used to classify children's television programming.", - "enum": [ - "acbC", - "acbE", - "acbG", - "acbM", - "acbMa15plus", - "acbP", - "acbPg", - "acbR18plus", - "acbUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "", - "", - "" - ] - }, - "agcomRating": { - "type": "string", - "description": "The video's rating from Italy's Autorità per le Garanzie nelle Comunicazioni (AGCOM).", - "enum": [ - "agcomT", - "agcomUnrated", - "agcomVm14", - "agcomVm18" - ], - "enumDescriptions": [ - "", - "", - "", - "" - ] - }, - "anatelRating": { - "type": "string", - "description": "The video's Anatel (Asociación Nacional de Televisión) rating for Chilean television.", - "enum": [ - "anatelA", - "anatelF", - "anatelI", - "anatelI10", - "anatelI12", - "anatelI7", - "anatelR", - "anatelUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "", - "" - ] - }, - "bbfcRating": { - "type": "string", - "description": "The video's British Board of Film Classification (BBFC) rating.", - "enum": [ - "bbfc12", - "bbfc12a", - "bbfc15", - "bbfc18", - "bbfcPg", - "bbfcR18", - "bbfcU", - "bbfcUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "", - "" - ] - }, - "bfvcRating": { - "type": "string", - "description": "The video's rating from Thailand's Board of Film and Video Censors.", - "enum": [ - "bfvc13", - "bfvc15", - "bfvc18", - "bfvc20", - "bfvcB", - "bfvcE", - "bfvcG", - "bfvcUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "", - "" - ] - }, - "bmukkRating": { - "type": "string", - "description": "The video's rating from the Austrian Board of Media Classification (Bundesministerium für Unterricht, Kunst und Kultur).", - "enum": [ - "bmukk10", - "bmukk12", - "bmukk14", - "bmukk16", - "bmukk6", - "bmukk8", - "bmukkAa", - "bmukkUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "", - "" - ] - }, - "catvRating": { - "type": "string", - "description": "Rating system for Canadian TV - Canadian TV Classification System The video's rating from the Canadian Radio-Television and Telecommunications Commission (CRTC) for Canadian English-language broadcasts. For more information, see the Canadian Broadcast Standards Council website.", - "enum": [ - "catv14plus", - "catv18plus", - "catvC", - "catvC8", - "catvG", - "catvPg", - "catvUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "" - ] - }, - "catvfrRating": { - "type": "string", - "description": "The video's rating from the Canadian Radio-Television and Telecommunications Commission (CRTC) for Canadian French-language broadcasts. For more information, see the Canadian Broadcast Standards Council website.", - "enum": [ - "catvfr13plus", - "catvfr16plus", - "catvfr18plus", - "catvfr8plus", - "catvfrG", - "catvfrUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "" - ] - }, - "cbfcRating": { - "type": "string", - "description": "The video's Central Board of Film Certification (CBFC - India) rating.", - "enum": [ - "cbfcA", - "cbfcS", - "cbfcU", - "cbfcUA", - "cbfcUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "" - ] - }, - "cccRating": { - "type": "string", - "description": "The video's Consejo de Calificación Cinematográfica (Chile) rating.", - "enum": [ - "ccc14", - "ccc18", - "ccc18s", - "ccc18v", - "ccc6", - "cccTe", - "cccUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "" - ] - }, - "cceRating": { - "type": "string", - "description": "The video's rating from Portugal's Comissão de Classificação de Espect´culos.", - "enum": [ - "cceM12", - "cceM14", - "cceM16", - "cceM18", - "cceM4", - "cceM6", - "cceUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "" - ] - }, - "chfilmRating": { - "type": "string", - "description": "The video's rating in Switzerland.", - "enum": [ - "chfilm0", - "chfilm12", - "chfilm16", - "chfilm18", - "chfilm6", - "chfilmUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "" - ] - }, - "chvrsRating": { - "type": "string", - "description": "The video's Canadian Home Video Rating System (CHVRS) rating.", - "enum": [ - "chvrs14a", - "chvrs18a", - "chvrsE", - "chvrsG", - "chvrsPg", - "chvrsR", - "chvrsUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "" - ] - }, - "cicfRating": { - "type": "string", - "description": "The video's rating from the Commission de Contrôle des Films (Belgium).", - "enum": [ - "cicfE", - "cicfKntEna", - "cicfKtEa", - "cicfUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "" - ] - }, - "cnaRating": { - "type": "string", - "description": "The video's rating from Romania's CONSILIUL NATIONAL AL AUDIOVIZUALULUI (CNA).", - "enum": [ - "cna12", - "cna15", - "cna18", - "cna18plus", - "cnaAp", - "cnaUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "" - ] - }, - "cncRating": { - "type": "string", - "description": "Rating system in France - Commission de classification cinematographique", - "enum": [ - "cnc10", - "cnc12", - "cnc16", - "cnc18", - "cncE", - "cncT", - "cncUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "" - ] - }, - "csaRating": { - "type": "string", - "description": "The video's rating from France's Conseil supérieur de l?audiovisuel, which rates broadcast content.", - "enum": [ - "csa10", - "csa12", - "csa16", - "csa18", - "csaInterdiction", - "csaT", - "csaUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "" - ] - }, - "cscfRating": { - "type": "string", - "description": "The video's rating from Luxembourg's Commission de surveillance de la classification des films (CSCF).", - "enum": [ - "cscf12", - "cscf16", - "cscf18", - "cscf6", - "cscf9", - "cscfA", - "cscfAl", - "cscfUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "", - "" - ] - }, - "czfilmRating": { - "type": "string", - "description": "The video's rating in the Czech Republic.", - "enum": [ - "czfilm12", - "czfilm14", - "czfilm18", - "czfilmU", - "czfilmUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "" - ] - }, - "djctqRating": { - "type": "string", - "description": "The video's Departamento de Justiça, Classificação, Qualificação e Títulos (DJCQT - Brazil) rating.", - "enum": [ - "djctq10", - "djctq12", - "djctq14", - "djctq16", - "djctq18", - "djctqL", - "djctqUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "" - ] - }, - "djctqRatingReasons": { - "type": "array", - "description": "Reasons that explain why the video received its DJCQT (Brazil) rating.", - "items": { - "type": "string", - "enum": [ - "djctqCriminalActs", - "djctqDrugs", - "djctqExplicitSex", - "djctqExtremeViolence", - "djctqIllegalDrugs", - "djctqImpactingContent", - "djctqInappropriateLanguage", - "djctqLegalDrugs", - "djctqNudity", - "djctqSex", - "djctqSexualContent", - "djctqViolence" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "" - ] - } - }, - "ecbmctRating": { - "type": "string", - "description": "Rating system in Turkey - Evaluation and Classification Board of the Ministry of Culture and Tourism", - "enum": [ - "ecbmct13a", - "ecbmct13plus", - "ecbmct15a", - "ecbmct15plus", - "ecbmct18plus", - "ecbmct7a", - "ecbmct7plus", - "ecbmctG", - "ecbmctUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "", - "", - "" - ] - }, - "eefilmRating": { - "type": "string", - "description": "The video's rating in Estonia.", - "enum": [ - "eefilmK12", - "eefilmK14", - "eefilmK16", - "eefilmK6", - "eefilmL", - "eefilmMs12", - "eefilmMs6", - "eefilmPere", - "eefilmUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "", - "", - "" - ] - }, - "egfilmRating": { - "type": "string", - "description": "The video's rating in Egypt.", - "enum": [ - "egfilm18", - "egfilmBn", - "egfilmGn", - "egfilmUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "" - ] - }, - "eirinRating": { - "type": "string", - "description": "The video's Eirin (映倫) rating. Eirin is the Japanese rating system.", - "enum": [ - "eirinG", - "eirinPg12", - "eirinR15plus", - "eirinR18plus", - "eirinUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "" - ] - }, - "fcbmRating": { - "type": "string", - "description": "The video's rating from Malaysia's Film Censorship Board.", - "enum": [ - "fcbm18", - "fcbm18pa", - "fcbm18pl", - "fcbm18sg", - "fcbm18sx", - "fcbmP13", - "fcbmPg13", - "fcbmU", - "fcbmUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "", - "", - "" - ] - }, - "fcoRating": { - "type": "string", - "description": "The video's rating from Hong Kong's Office for Film, Newspaper and Article Administration.", - "enum": [ - "fcoI", - "fcoIi", - "fcoIia", - "fcoIib", - "fcoIii", - "fcoUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "" - ] - }, - "fmocRating": { - "type": "string", - "description": "This property has been deprecated. Use the contentDetails.contentRating.cncRating instead.", - "enum": [ - "fmoc10", - "fmoc12", - "fmoc16", - "fmoc18", - "fmocE", - "fmocU", - "fmocUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "" - ] - }, - "fpbRating": { - "type": "string", - "description": "The video's rating from South Africa's Film and Publication Board.", - "enum": [ - "fpb10", - "fpb1012Pg", - "fpb13", - "fpb16", - "fpb18", - "fpb79Pg", - "fpbA", - "fpbPg", - "fpbUnrated", - "fpbX18", - "fpbXx" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "" - ] - }, - "fpbRatingReasons": { - "type": "array", - "description": "Reasons that explain why the video received its FPB (South Africa) rating.", - "items": { - "type": "string", - "enum": [ - "fpbBlasphemy", - "fpbCriminalTechniques", - "fpbDrugs", - "fpbHorror", - "fpbImitativeActsTechniques", - "fpbLanguage", - "fpbNudity", - "fpbPrejudice", - "fpbSex", - "fpbSexualViolence", - "fpbViolence" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "" - ] - } - }, - "fskRating": { - "type": "string", - "description": "The video's Freiwillige Selbstkontrolle der Filmwirtschaft (FSK - Germany) rating.", - "enum": [ - "fsk0", - "fsk12", - "fsk16", - "fsk18", - "fsk6", - "fskUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "" - ] - }, - "grfilmRating": { - "type": "string", - "description": "The video's rating in Greece.", - "enum": [ - "grfilmE", - "grfilmK", - "grfilmK12", - "grfilmK13", - "grfilmK15", - "grfilmK17", - "grfilmK18", - "grfilmUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "", - "" - ] - }, - "icaaRating": { - "type": "string", - "description": "The video's Instituto de la Cinematografía y de las Artes Audiovisuales (ICAA - Spain) rating.", - "enum": [ - "icaa12", - "icaa13", - "icaa16", - "icaa18", - "icaa7", - "icaaApta", - "icaaUnrated", - "icaaX" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "", - "" - ] - }, - "ifcoRating": { - "type": "string", - "description": "The video's Irish Film Classification Office (IFCO - Ireland) rating. See the IFCO website for more information.", - "enum": [ - "ifco12", - "ifco12a", - "ifco15", - "ifco15a", - "ifco16", - "ifco18", - "ifcoG", - "ifcoPg", - "ifcoUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "", - "", - "" - ] - }, - "ilfilmRating": { - "type": "string", - "description": "The video's rating in Israel.", - "enum": [ - "ilfilm12", - "ilfilm16", - "ilfilm18", - "ilfilmAa", - "ilfilmUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "" - ] - }, - "incaaRating": { - "type": "string", - "description": "The video's INCAA (Instituto Nacional de Cine y Artes Audiovisuales - Argentina) rating.", - "enum": [ - "incaaAtp", - "incaaC", - "incaaSam13", - "incaaSam16", - "incaaSam18", - "incaaUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "" - ] - }, - "kfcbRating": { - "type": "string", - "description": "The video's rating from the Kenya Film Classification Board.", - "enum": [ - "kfcb16plus", - "kfcbG", - "kfcbPg", - "kfcbR", - "kfcbUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "" - ] - }, - "kijkwijzerRating": { - "type": "string", - "description": "voor de Classificatie van Audiovisuele Media (Netherlands).", - "enum": [ - "kijkwijzer12", - "kijkwijzer16", - "kijkwijzer18", - "kijkwijzer6", - "kijkwijzer9", - "kijkwijzerAl", - "kijkwijzerUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "" - ] - }, - "kmrbRating": { - "type": "string", - "description": "The video's Korea Media Rating Board (영상물등급위원회) rating. The KMRB rates videos in South Korea.", - "enum": [ - "kmrb12plus", - "kmrb15plus", - "kmrbAll", - "kmrbR", - "kmrbTeenr", - "kmrbUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "" - ] - }, - "lsfRating": { - "type": "string", - "description": "The video's rating from Indonesia's Lembaga Sensor Film.", - "enum": [ - "lsf13", - "lsf17", - "lsf21", - "lsfA", - "lsfBo", - "lsfD", - "lsfR", - "lsfSu", - "lsfUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "", - "", - "" - ] - }, - "mccaaRating": { - "type": "string", - "description": "The video's rating from Malta's Film Age-Classification Board.", - "enum": [ - "mccaa12", - "mccaa12a", - "mccaa14", - "mccaa15", - "mccaa16", - "mccaa18", - "mccaaPg", - "mccaaU", - "mccaaUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "", - "", - "" - ] - }, - "mccypRating": { - "type": "string", - "description": "The video's rating from the Danish Film Institute's (Det Danske Filminstitut) Media Council for Children and Young People.", - "enum": [ - "mccyp11", - "mccyp15", - "mccyp7", - "mccypA", - "mccypUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "" - ] - }, - "mcstRating": { - "type": "string", - "description": "The video's rating system for Vietnam - MCST", - "enum": [ - "mcst0", - "mcst16plus", - "mcstC13", - "mcstC16", - "mcstC18", - "mcstGPg", - "mcstP", - "mcstUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "", - "" - ] - }, - "mdaRating": { - "type": "string", - "description": "The video's rating from Singapore's Media Development Authority (MDA) and, specifically, it's Board of Film Censors (BFC).", - "enum": [ - "mdaG", - "mdaM18", - "mdaNc16", - "mdaPg", - "mdaPg13", - "mdaR21", - "mdaUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "" - ] - }, - "medietilsynetRating": { - "type": "string", - "description": "The video's rating from Medietilsynet, the Norwegian Media Authority.", - "enum": [ - "medietilsynet11", - "medietilsynet12", - "medietilsynet15", - "medietilsynet18", - "medietilsynet6", - "medietilsynet7", - "medietilsynet9", - "medietilsynetA", - "medietilsynetUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "", - "", - "" - ] - }, - "mekuRating": { - "type": "string", - "description": "The video's rating from Finland's Kansallinen Audiovisuaalinen Instituutti (National Audiovisual Institute).", - "enum": [ - "meku12", - "meku16", - "meku18", - "meku7", - "mekuS", - "mekuUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "" - ] - }, - "mibacRating": { - "type": "string", - "description": "The video's rating from the Ministero dei Beni e delle Attività Culturali e del Turismo (Italy).", - "enum": [ - "mibacT", - "mibacUnrated", - "mibacVap", - "mibacVm12", - "mibacVm14", - "mibacVm18" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "" - ] - }, - "mocRating": { - "type": "string", - "description": "The video's Ministerio de Cultura (Colombia) rating.", - "enum": [ - "moc12", - "moc15", - "moc18", - "moc7", - "mocBanned", - "mocE", - "mocT", - "mocUnrated", - "mocX" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "", - "", - "" - ] - }, - "moctwRating": { - "type": "string", - "description": "The video's rating from Taiwan's Ministry of Culture (文化部).", - "enum": [ - "moctwG", - "moctwP", - "moctwPg", - "moctwR", - "moctwR12", - "moctwR15", - "moctwUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "" - ] - }, - "mpaaRating": { - "type": "string", - "description": "The video's Motion Picture Association of America (MPAA) rating.", - "enum": [ - "mpaaG", - "mpaaNc17", - "mpaaPg", - "mpaaPg13", - "mpaaR", - "mpaaUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "" - ] - }, - "mtrcbRating": { - "type": "string", - "description": "The video's rating from the Movie and Television Review and Classification Board (Philippines).", - "enum": [ - "mtrcbG", - "mtrcbPg", - "mtrcbR13", - "mtrcbR16", - "mtrcbR18", - "mtrcbUnrated", - "mtrcbX" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "" - ] - }, - "nbcRating": { - "type": "string", - "description": "The video's rating from the Maldives National Bureau of Classification.", - "enum": [ - "nbc12plus", - "nbc15plus", - "nbc18plus", - "nbc18plusr", - "nbcG", - "nbcPg", - "nbcPu", - "nbcUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "", - "" - ] - }, - "nbcplRating": { - "type": "string", - "description": "The video's rating in Poland.", - "enum": [ - "nbcpl18plus", - "nbcplI", - "nbcplIi", - "nbcplIii", - "nbcplIv", - "nbcplUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "" - ] - }, - "nfrcRating": { - "type": "string", - "description": "The video's rating from the Bulgarian National Film Center.", - "enum": [ - "nfrcA", - "nfrcB", - "nfrcC", - "nfrcD", - "nfrcUnrated", - "nfrcX" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "" - ] - }, - "nfvcbRating": { - "type": "string", - "description": "The video's rating from Nigeria's National Film and Video Censors Board.", - "enum": [ - "nfvcb12", - "nfvcb12a", - "nfvcb15", - "nfvcb18", - "nfvcbG", - "nfvcbPg", - "nfvcbRe", - "nfvcbUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "", - "" - ] - }, - "nkclvRating": { - "type": "string", - "description": "The video's rating from the Nacionãlais Kino centrs (National Film Centre of Latvia).", - "enum": [ - "nkclv12plus", - "nkclv18plus", - "nkclv7plus", - "nkclvU", - "nkclvUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "" - ] - }, - "oflcRating": { - "type": "string", - "description": "The video's Office of Film and Literature Classification (OFLC - New Zealand) rating.", - "enum": [ - "oflcG", - "oflcM", - "oflcPg", - "oflcR13", - "oflcR15", - "oflcR16", - "oflcR18", - "oflcRp13", - "oflcRp16", - "oflcUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "", - "", - "", - "" - ] - }, - "pefilmRating": { - "type": "string", - "description": "The video's rating in Peru.", - "enum": [ - "pefilm14", - "pefilm18", - "pefilmPg", - "pefilmPt", - "pefilmUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "" - ] - }, - "rcnofRating": { - "type": "string", - "description": "The video's rating from the Hungarian Nemzeti Filmiroda, the Rating Committee of the National Office of Film.", - "enum": [ - "rcnofI", - "rcnofIi", - "rcnofIii", - "rcnofIv", - "rcnofUnrated", - "rcnofV", - "rcnofVi" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "" - ] - }, - "resorteviolenciaRating": { - "type": "string", - "description": "The video's rating in Venezuela.", - "enum": [ - "resorteviolenciaA", - "resorteviolenciaB", - "resorteviolenciaC", - "resorteviolenciaD", - "resorteviolenciaE", - "resorteviolenciaUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "" - ] - }, - "rtcRating": { - "type": "string", - "description": "The video's General Directorate of Radio, Television and Cinematography (Mexico) rating.", - "enum": [ - "rtcA", - "rtcAa", - "rtcB", - "rtcB15", - "rtcC", - "rtcD", - "rtcUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "" - ] - }, - "rteRating": { - "type": "string", - "description": "The video's rating from Ireland's Raidió Teilifís Éireann.", - "enum": [ - "rteCh", - "rteGa", - "rteMa", - "rtePs", - "rteUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "" - ] - }, - "russiaRating": { - "type": "string", - "description": "The video's National Film Registry of the Russian Federation (MKRF - Russia) rating.", - "enum": [ - "russia0", - "russia12", - "russia16", - "russia18", - "russia6", - "russiaUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "" - ] - }, - "skfilmRating": { - "type": "string", - "description": "The video's rating in Slovakia.", - "enum": [ - "skfilmG", - "skfilmP2", - "skfilmP5", - "skfilmP8", - "skfilmUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "" - ] - }, - "smaisRating": { - "type": "string", - "description": "The video's rating in Iceland.", - "enum": [ - "smais12", - "smais14", - "smais16", - "smais18", - "smais7", - "smaisL", - "smaisUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "" - ] - }, - "smsaRating": { - "type": "string", - "description": "The video's rating from Statens medieråd (Sweden's National Media Council).", - "enum": [ - "smsa11", - "smsa15", - "smsa7", - "smsaA", - "smsaUnrated" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "" - ] - }, - "tvpgRating": { - "type": "string", - "description": "The video's TV Parental Guidelines (TVPG) rating.", - "enum": [ - "pg14", - "tvpgG", - "tvpgMa", - "tvpgPg", - "tvpgUnrated", - "tvpgY", - "tvpgY7", - "tvpgY7Fv" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "", - "" - ] - }, - "ytRating": { - "type": "string", - "description": "A rating that YouTube uses to identify age-restricted content.", - "enum": [ - "ytAgeRestricted" - ], - "enumDescriptions": [ - "" - ] - } - } - }, - "FanFundingEvent": { - "id": "FanFundingEvent", - "type": "object", - "description": "A fanFundingEvent resource represents a fan funding event on a YouTube channel. Fan funding events occur when a user gives one-time monetary support to the channel owner.", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "id": { - "type": "string", - "description": "The ID that YouTube assigns to uniquely identify the fan funding event." - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#fanFundingEvent\".", - "default": "youtube#fanFundingEvent" - }, - "snippet": { - "$ref": "FanFundingEventSnippet", - "description": "The snippet object contains basic details about the fan funding event." - } - } - }, - "FanFundingEventListResponse": { - "id": "FanFundingEventListResponse", - "type": "object", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "eventId": { - "type": "string", - "description": "Serialized EventId of the request which produced this response." - }, - "items": { - "type": "array", - "description": "A list of fan funding events that match the request criteria.", - "items": { - "$ref": "FanFundingEvent" - } - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#fanFundingEventListResponse\".", - "default": "youtube#fanFundingEventListResponse" - }, - "nextPageToken": { - "type": "string", - "description": "The token that can be used as the value of the pageToken parameter to retrieve the next page in the result set." - }, - "pageInfo": { - "$ref": "PageInfo" - }, - "tokenPagination": { - "$ref": "TokenPagination" - }, - "visitorId": { - "type": "string", - "description": "The visitorId identifies the visitor." - } - } - }, - "FanFundingEventSnippet": { - "id": "FanFundingEventSnippet", - "type": "object", - "properties": { - "amountMicros": { - "type": "string", - "description": "The amount of funding in micros of fund_currency. e.g., 1 is represented", - "format": "uint64" - }, - "channelId": { - "type": "string", - "description": "Channel id where the funding event occurred." - }, - "commentText": { - "type": "string", - "description": "The text contents of the comment left by the user." - }, - "createdAt": { - "type": "string", - "description": "The date and time when the funding occurred. The value is specified in ISO 8601 (YYYY-MM-DDThh:mm:ss.sZ) format.", - "format": "date-time" - }, - "currency": { - "type": "string", - "description": "The currency in which the fund was made. ISO 4217." - }, - "displayString": { - "type": "string", - "description": "A rendered string that displays the fund amount and currency (e.g., \"$1.00\"). The string is rendered for the given language." - }, - "supporterDetails": { - "$ref": "ChannelProfileDetails", - "description": "Details about the supporter. Only filled if the event was made public by the user." - } - } - }, - "GeoPoint": { - "id": "GeoPoint", - "type": "object", - "description": "Geographical coordinates of a point, in WGS84.", - "properties": { - "altitude": { - "type": "number", - "description": "Altitude above the reference ellipsoid, in meters.", - "format": "double" - }, - "latitude": { - "type": "number", - "description": "Latitude in degrees.", - "format": "double" - }, - "longitude": { - "type": "number", - "description": "Longitude in degrees.", - "format": "double" - } - } - }, - "GuideCategory": { - "id": "GuideCategory", - "type": "object", - "description": "A guideCategory resource identifies a category that YouTube algorithmically assigns based on a channel's content or other indicators, such as the channel's popularity. The list is similar to video categories, with the difference being that a video's uploader can assign a video category but only YouTube can assign a channel category.", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "id": { - "type": "string", - "description": "The ID that YouTube uses to uniquely identify the guide category." - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#guideCategory\".", - "default": "youtube#guideCategory" - }, - "snippet": { - "$ref": "GuideCategorySnippet", - "description": "The snippet object contains basic details about the category, such as its title." - } - } - }, - "GuideCategoryListResponse": { - "id": "GuideCategoryListResponse", - "type": "object", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "eventId": { - "type": "string", - "description": "Serialized EventId of the request which produced this response." - }, - "items": { - "type": "array", - "description": "A list of categories that can be associated with YouTube channels. In this map, the category ID is the map key, and its value is the corresponding guideCategory resource.", - "items": { - "$ref": "GuideCategory" - } - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#guideCategoryListResponse\".", - "default": "youtube#guideCategoryListResponse" - }, - "nextPageToken": { - "type": "string", - "description": "The token that can be used as the value of the pageToken parameter to retrieve the next page in the result set." - }, - "pageInfo": { - "$ref": "PageInfo" - }, - "prevPageToken": { - "type": "string", - "description": "The token that can be used as the value of the pageToken parameter to retrieve the previous page in the result set." - }, - "tokenPagination": { - "$ref": "TokenPagination" - }, - "visitorId": { - "type": "string", - "description": "The visitorId identifies the visitor." - } - } - }, - "GuideCategorySnippet": { - "id": "GuideCategorySnippet", - "type": "object", - "description": "Basic details about a guide category.", - "properties": { - "channelId": { - "type": "string", - "default": "UCBR8-60-B28hp2BmDPdntcQ" - }, - "title": { - "type": "string", - "description": "Description of the guide category." - } - } - }, - "I18nLanguage": { - "id": "I18nLanguage", - "type": "object", - "description": "An i18nLanguage resource identifies a UI language currently supported by YouTube.", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "id": { - "type": "string", - "description": "The ID that YouTube uses to uniquely identify the i18n language." - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#i18nLanguage\".", - "default": "youtube#i18nLanguage" - }, - "snippet": { - "$ref": "I18nLanguageSnippet", - "description": "The snippet object contains basic details about the i18n language, such as language code and human-readable name." - } - } - }, - "I18nLanguageListResponse": { - "id": "I18nLanguageListResponse", - "type": "object", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "eventId": { - "type": "string", - "description": "Serialized EventId of the request which produced this response." - }, - "items": { - "type": "array", - "description": "A list of supported i18n languages. In this map, the i18n language ID is the map key, and its value is the corresponding i18nLanguage resource.", - "items": { - "$ref": "I18nLanguage" - } - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#i18nLanguageListResponse\".", - "default": "youtube#i18nLanguageListResponse" - }, - "visitorId": { - "type": "string", - "description": "The visitorId identifies the visitor." - } - } - }, - "I18nLanguageSnippet": { - "id": "I18nLanguageSnippet", - "type": "object", - "description": "Basic details about an i18n language, such as language code and human-readable name.", - "properties": { - "hl": { - "type": "string", - "description": "A short BCP-47 code that uniquely identifies a language." - }, - "name": { - "type": "string", - "description": "The human-readable name of the language in the language itself." - } - } - }, - "I18nRegion": { - "id": "I18nRegion", - "type": "object", - "description": "A i18nRegion resource identifies a region where YouTube is available.", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "id": { - "type": "string", - "description": "The ID that YouTube uses to uniquely identify the i18n region." - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#i18nRegion\".", - "default": "youtube#i18nRegion" - }, - "snippet": { - "$ref": "I18nRegionSnippet", - "description": "The snippet object contains basic details about the i18n region, such as region code and human-readable name." - } - } - }, - "I18nRegionListResponse": { - "id": "I18nRegionListResponse", - "type": "object", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "eventId": { - "type": "string", - "description": "Serialized EventId of the request which produced this response." - }, - "items": { - "type": "array", - "description": "A list of regions where YouTube is available. In this map, the i18n region ID is the map key, and its value is the corresponding i18nRegion resource.", - "items": { - "$ref": "I18nRegion" - } - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#i18nRegionListResponse\".", - "default": "youtube#i18nRegionListResponse" - }, - "visitorId": { - "type": "string", - "description": "The visitorId identifies the visitor." - } - } - }, - "I18nRegionSnippet": { - "id": "I18nRegionSnippet", - "type": "object", - "description": "Basic details about an i18n region, such as region code and human-readable name.", - "properties": { - "gl": { - "type": "string", - "description": "The region code as a 2-letter ISO country code." - }, - "name": { - "type": "string", - "description": "The human-readable name of the region." - } - } - }, - "ImageSettings": { - "id": "ImageSettings", - "type": "object", - "description": "Branding properties for images associated with the channel.", - "properties": { - "backgroundImageUrl": { - "$ref": "LocalizedProperty", - "description": "The URL for the background image shown on the video watch page. The image should be 1200px by 615px, with a maximum file size of 128k." - }, - "bannerExternalUrl": { - "type": "string", - "description": "This is used only in update requests; if it's set, we use this URL to generate all of the above banner URLs." - }, - "bannerImageUrl": { - "type": "string", - "description": "Banner image. Desktop size (1060x175)." - }, - "bannerMobileExtraHdImageUrl": { - "type": "string", - "description": "Banner image. Mobile size high resolution (1440x395)." - }, - "bannerMobileHdImageUrl": { - "type": "string", - "description": "Banner image. Mobile size high resolution (1280x360)." - }, - "bannerMobileImageUrl": { - "type": "string", - "description": "Banner image. Mobile size (640x175)." - }, - "bannerMobileLowImageUrl": { - "type": "string", - "description": "Banner image. Mobile size low resolution (320x88)." - }, - "bannerMobileMediumHdImageUrl": { - "type": "string", - "description": "Banner image. Mobile size medium/high resolution (960x263)." - }, - "bannerTabletExtraHdImageUrl": { - "type": "string", - "description": "Banner image. Tablet size extra high resolution (2560x424)." - }, - "bannerTabletHdImageUrl": { - "type": "string", - "description": "Banner image. Tablet size high resolution (2276x377)." - }, - "bannerTabletImageUrl": { - "type": "string", - "description": "Banner image. Tablet size (1707x283)." - }, - "bannerTabletLowImageUrl": { - "type": "string", - "description": "Banner image. Tablet size low resolution (1138x188)." - }, - "bannerTvHighImageUrl": { - "type": "string", - "description": "Banner image. TV size high resolution (1920x1080)." - }, - "bannerTvImageUrl": { - "type": "string", - "description": "Banner image. TV size extra high resolution (2120x1192)." - }, - "bannerTvLowImageUrl": { - "type": "string", - "description": "Banner image. TV size low resolution (854x480)." - }, - "bannerTvMediumImageUrl": { - "type": "string", - "description": "Banner image. TV size medium resolution (1280x720)." - }, - "largeBrandedBannerImageImapScript": { - "$ref": "LocalizedProperty", - "description": "The image map script for the large banner image." - }, - "largeBrandedBannerImageUrl": { - "$ref": "LocalizedProperty", - "description": "The URL for the 854px by 70px image that appears below the video player in the expanded video view of the video watch page." - }, - "smallBrandedBannerImageImapScript": { - "$ref": "LocalizedProperty", - "description": "The image map script for the small banner image." - }, - "smallBrandedBannerImageUrl": { - "$ref": "LocalizedProperty", - "description": "The URL for the 640px by 70px banner image that appears below the video player in the default view of the video watch page." - }, - "trackingImageUrl": { - "type": "string", - "description": "The URL for a 1px by 1px tracking pixel that can be used to collect statistics for views of the channel or video pages." - }, - "watchIconImageUrl": { - "type": "string", - "description": "The URL for the image that appears above the top-left corner of the video player. This is a 25-pixel-high image with a flexible width that cannot exceed 170 pixels." - } - } - }, - "IngestionInfo": { - "id": "IngestionInfo", - "type": "object", - "description": "Describes information necessary for ingesting an RTMP or an HTTP stream.", - "properties": { - "backupIngestionAddress": { - "type": "string", - "description": "The backup ingestion URL that you should use to stream video to YouTube. You have the option of simultaneously streaming the content that you are sending to the ingestionAddress to this URL." - }, - "ingestionAddress": { - "type": "string", - "description": "The primary ingestion URL that you should use to stream video to YouTube. You must stream video to this URL.\n\nDepending on which application or tool you use to encode your video stream, you may need to enter the stream URL and stream name separately or you may need to concatenate them in the following format:\n\nSTREAM_URL/STREAM_NAME" - }, - "streamName": { - "type": "string", - "description": "The HTTP or RTMP stream name that YouTube assigns to the video stream." - } - } - }, - "InvideoBranding": { - "id": "InvideoBranding", - "type": "object", - "properties": { - "imageBytes": { - "type": "string", - "format": "byte" - }, - "imageUrl": { - "type": "string" - }, - "position": { - "$ref": "InvideoPosition" - }, - "targetChannelId": { - "type": "string" - }, - "timing": { - "$ref": "InvideoTiming" - } - } - }, - "InvideoPosition": { - "id": "InvideoPosition", - "type": "object", - "description": "Describes the spatial position of a visual widget inside a video. It is a union of various position types, out of which only will be set one.", - "properties": { - "cornerPosition": { - "type": "string", - "description": "Describes in which corner of the video the visual widget will appear.", - "enum": [ - "bottomLeft", - "bottomRight", - "topLeft", - "topRight" - ], - "enumDescriptions": [ - "", - "", - "", - "" - ] - }, - "type": { - "type": "string", - "description": "Defines the position type.", - "enum": [ - "corner" - ], - "enumDescriptions": [ - "" - ] - } - } - }, - "InvideoPromotion": { - "id": "InvideoPromotion", - "type": "object", - "description": "Describes an invideo promotion campaign consisting of multiple promoted items. A campaign belongs to a single channel_id.", - "properties": { - "defaultTiming": { - "$ref": "InvideoTiming", - "description": "The default temporal position within the video where the promoted item will be displayed. Can be overriden by more specific timing in the item." - }, - "items": { - "type": "array", - "description": "List of promoted items in decreasing priority.", - "items": { - "$ref": "PromotedItem" - } - }, - "position": { - "$ref": "InvideoPosition", - "description": "The spatial position within the video where the promoted item will be displayed." - }, - "useSmartTiming": { - "type": "boolean", - "description": "Indicates whether the channel's promotional campaign uses \"smart timing.\" This feature attempts to show promotions at a point in the video when they are more likely to be clicked and less likely to disrupt the viewing experience. This feature also picks up a single promotion to show on each video." - } - } - }, - "InvideoTiming": { - "id": "InvideoTiming", - "type": "object", - "description": "Describes a temporal position of a visual widget inside a video.", - "properties": { - "durationMs": { - "type": "string", - "description": "Defines the duration in milliseconds for which the promotion should be displayed. If missing, the client should use the default.", - "format": "uint64" - }, - "offsetMs": { - "type": "string", - "description": "Defines the time at which the promotion will appear. Depending on the value of type the value of the offsetMs field will represent a time offset from the start or from the end of the video, expressed in milliseconds.", - "format": "uint64" - }, - "type": { - "type": "string", - "description": "Describes a timing type. If the value is offsetFromStart, then the offsetMs field represents an offset from the start of the video. If the value is offsetFromEnd, then the offsetMs field represents an offset from the end of the video.", - "enum": [ - "offsetFromEnd", - "offsetFromStart" - ], - "enumDescriptions": [ - "", - "" - ] - } - } - }, - "LanguageTag": { - "id": "LanguageTag", - "type": "object", - "properties": { - "value": { - "type": "string" - } - } - }, - "LiveBroadcast": { - "id": "LiveBroadcast", - "type": "object", - "description": "A liveBroadcast resource represents an event that will be streamed, via live video, on YouTube.", - "properties": { - "contentDetails": { - "$ref": "LiveBroadcastContentDetails", - "description": "The contentDetails object contains information about the event's video content, such as whether the content can be shown in an embedded video player or if it will be archived and therefore available for viewing after the event has concluded." - }, - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "id": { - "type": "string", - "description": "The ID that YouTube assigns to uniquely identify the broadcast.", - "annotations": { - "required": [ - "youtube.liveBroadcasts.update" - ] - } - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#liveBroadcast\".", - "default": "youtube#liveBroadcast" - }, - "snippet": { - "$ref": "LiveBroadcastSnippet", - "description": "The snippet object contains basic details about the event, including its title, description, start time, and end time." - }, - "statistics": { - "$ref": "LiveBroadcastStatistics", - "description": "The statistics object contains info about the event's current stats. These include concurrent viewers and total chat count. Statistics can change (in either direction) during the lifetime of an event. Statistics are only returned while the event is live." - }, - "status": { - "$ref": "LiveBroadcastStatus", - "description": "The status object contains information about the event's status." - }, - "topicDetails": { - "$ref": "LiveBroadcastTopicDetails" - } - } - }, - "LiveBroadcastContentDetails": { - "id": "LiveBroadcastContentDetails", - "type": "object", - "description": "Detailed settings of a broadcast.", - "properties": { - "boundStreamId": { - "type": "string", - "description": "This value uniquely identifies the live stream bound to the broadcast." - }, - "boundStreamLastUpdateTimeMs": { - "type": "string", - "description": "The date and time that the live stream referenced by boundStreamId was last updated.", - "format": "date-time" - }, - "closedCaptionsType": { - "type": "string", - "enum": [ - "closedCaptionsDisabled", - "closedCaptionsEmbedded", - "closedCaptionsHttpPost" - ], - "enumDescriptions": [ - "", - "", - "" - ] - }, - "enableClosedCaptions": { - "type": "boolean", - "description": "This setting indicates whether HTTP POST closed captioning is enabled for this broadcast. The ingestion URL of the closed captions is returned through the liveStreams API. This is mutually exclusive with using the closed_captions_type property, and is equivalent to setting closed_captions_type to CLOSED_CAPTIONS_HTTP_POST." - }, - "enableContentEncryption": { - "type": "boolean", - "description": "This setting indicates whether YouTube should enable content encryption for the broadcast.", - "annotations": { - "required": [ - "youtube.liveBroadcasts.update" - ] - } - }, - "enableDvr": { - "type": "boolean", - "description": "This setting determines whether viewers can access DVR controls while watching the video. DVR controls enable the viewer to control the video playback experience by pausing, rewinding, or fast forwarding content. The default value for this property is true.\n\n\n\nImportant: You must set the value to true and also set the enableArchive property's value to true if you want to make playback available immediately after the broadcast ends.", - "annotations": { - "required": [ - "youtube.liveBroadcasts.update" - ] - } - }, - "enableEmbed": { - "type": "boolean", - "description": "This setting indicates whether the broadcast video can be played in an embedded player. If you choose to archive the video (using the enableArchive property), this setting will also apply to the archived video.", - "annotations": { - "required": [ - "youtube.liveBroadcasts.update" - ] - } - }, - "enableLowLatency": { - "type": "boolean", - "description": "Indicates whether this broadcast has low latency enabled." - }, - "monitorStream": { - "$ref": "MonitorStreamInfo", - "description": "The monitorStream object contains information about the monitor stream, which the broadcaster can use to review the event content before the broadcast stream is shown publicly." - }, - "projection": { - "type": "string", - "description": "The projection format of this broadcast. This defaults to rectangular.", - "enum": [ - "360", - "rectangular" - ], - "enumDescriptions": [ - "", - "" - ] - }, - "recordFromStart": { - "type": "boolean", - "description": "Automatically start recording after the event goes live. The default value for this property is true.\n\n\n\nImportant: You must also set the enableDvr property's value to true if you want the playback to be available immediately after the broadcast ends. If you set this property's value to true but do not also set the enableDvr property to true, there may be a delay of around one day before the archived video will be available for playback.", - "annotations": { - "required": [ - "youtube.liveBroadcasts.update" - ] - } - }, - "startWithSlate": { - "type": "boolean", - "description": "This setting indicates whether the broadcast should automatically begin with an in-stream slate when you update the broadcast's status to live. After updating the status, you then need to send a liveCuepoints.insert request that sets the cuepoint's eventState to end to remove the in-stream slate and make your broadcast stream visible to viewers.", - "annotations": { - "required": [ - "youtube.liveBroadcasts.update" - ] - } - } - } - }, - "LiveBroadcastListResponse": { - "id": "LiveBroadcastListResponse", - "type": "object", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "eventId": { - "type": "string", - "description": "Serialized EventId of the request which produced this response." - }, - "items": { - "type": "array", - "description": "A list of broadcasts that match the request criteria.", - "items": { - "$ref": "LiveBroadcast" - } - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#liveBroadcastListResponse\".", - "default": "youtube#liveBroadcastListResponse" - }, - "nextPageToken": { - "type": "string", - "description": "The token that can be used as the value of the pageToken parameter to retrieve the next page in the result set." - }, - "pageInfo": { - "$ref": "PageInfo" - }, - "prevPageToken": { - "type": "string", - "description": "The token that can be used as the value of the pageToken parameter to retrieve the previous page in the result set." - }, - "tokenPagination": { - "$ref": "TokenPagination" - }, - "visitorId": { - "type": "string", - "description": "The visitorId identifies the visitor." - } - } - }, - "LiveBroadcastSnippet": { - "id": "LiveBroadcastSnippet", - "type": "object", - "properties": { - "actualEndTime": { - "type": "string", - "description": "The date and time that the broadcast actually ended. This information is only available once the broadcast's state is complete. The value is specified in ISO 8601 (YYYY-MM-DDThh:mm:ss.sZ) format.", - "format": "date-time" - }, - "actualStartTime": { - "type": "string", - "description": "The date and time that the broadcast actually started. This information is only available once the broadcast's state is live. The value is specified in ISO 8601 (YYYY-MM-DDThh:mm:ss.sZ) format.", - "format": "date-time" - }, - "channelId": { - "type": "string", - "description": "The ID that YouTube uses to uniquely identify the channel that is publishing the broadcast." - }, - "description": { - "type": "string", - "description": "The broadcast's description. As with the title, you can set this field by modifying the broadcast resource or by setting the description field of the corresponding video resource." - }, - "isDefaultBroadcast": { - "type": "boolean" - }, - "liveChatId": { - "type": "string", - "description": "The id of the live chat for this broadcast." - }, - "publishedAt": { - "type": "string", - "description": "The date and time that the broadcast was added to YouTube's live broadcast schedule. The value is specified in ISO 8601 (YYYY-MM-DDThh:mm:ss.sZ) format.", - "format": "date-time" - }, - "scheduledEndTime": { - "type": "string", - "description": "The date and time that the broadcast is scheduled to end. The value is specified in ISO 8601 (YYYY-MM-DDThh:mm:ss.sZ) format.", - "format": "date-time" - }, - "scheduledStartTime": { - "type": "string", - "description": "The date and time that the broadcast is scheduled to start. The value is specified in ISO 8601 (YYYY-MM-DDThh:mm:ss.sZ) format.", - "format": "date-time", - "annotations": { - "required": [ - "youtube.liveBroadcasts.insert", - "youtube.liveBroadcasts.update" - ] - } - }, - "thumbnails": { - "$ref": "ThumbnailDetails", - "description": "A map of thumbnail images associated with the broadcast. For each nested object in this object, the key is the name of the thumbnail image, and the value is an object that contains other information about the thumbnail." - }, - "title": { - "type": "string", - "description": "The broadcast's title. Note that the broadcast represents exactly one YouTube video. You can set this field by modifying the broadcast resource or by setting the title field of the corresponding video resource.", - "annotations": { - "required": [ - "youtube.liveBroadcasts.insert", - "youtube.liveBroadcasts.update" - ] - } - } - } - }, - "LiveBroadcastStatistics": { - "id": "LiveBroadcastStatistics", - "type": "object", - "description": "Statistics about the live broadcast. These represent a snapshot of the values at the time of the request. Statistics are only returned for live broadcasts.", - "properties": { - "concurrentViewers": { - "type": "string", - "description": "The number of viewers currently watching the broadcast. The property and its value will be present if the broadcast has current viewers and the broadcast owner has not hidden the viewcount for the video. Note that YouTube stops tracking the number of concurrent viewers for a broadcast when the broadcast ends. So, this property would not identify the number of viewers watching an archived video of a live broadcast that already ended.", - "format": "uint64" - }, - "totalChatCount": { - "type": "string", - "description": "The total number of live chat messages currently on the broadcast. The property and its value will be present if the broadcast is public, has the live chat feature enabled, and has at least one message. Note that this field will not be filled after the broadcast ends. So this property would not identify the number of chat messages for an archived video of a completed live broadcast.", - "format": "uint64" - } - } - }, - "LiveBroadcastStatus": { - "id": "LiveBroadcastStatus", - "type": "object", - "properties": { - "lifeCycleStatus": { - "type": "string", - "description": "The broadcast's status. The status can be updated using the API's liveBroadcasts.transition method.", - "enum": [ - "abandoned", - "complete", - "completeStarting", - "created", - "live", - "liveStarting", - "ready", - "reclaimed", - "revoked", - "testStarting", - "testing" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "" - ] - }, - "liveBroadcastPriority": { - "type": "string", - "description": "Priority of the live broadcast event (internal state).", - "enum": [ - "high", - "low", - "normal" - ], - "enumDescriptions": [ - "", - "", - "" - ] - }, - "privacyStatus": { - "type": "string", - "description": "The broadcast's privacy status. Note that the broadcast represents exactly one YouTube video, so the privacy settings are identical to those supported for videos. In addition, you can set this field by modifying the broadcast resource or by setting the privacyStatus field of the corresponding video resource.", - "enum": [ - "private", - "public", - "unlisted" - ], - "enumDescriptions": [ - "", - "", - "" - ], - "annotations": { - "required": [ - "youtube.liveBroadcasts.insert", - "youtube.liveBroadcasts.update" - ] - } - }, - "recordingStatus": { - "type": "string", - "description": "The broadcast's recording status.", - "enum": [ - "notRecording", - "recorded", - "recording" - ], - "enumDescriptions": [ - "", - "", - "" - ] - } - } - }, - "LiveBroadcastTopic": { - "id": "LiveBroadcastTopic", - "type": "object", - "properties": { - "snippet": { - "$ref": "LiveBroadcastTopicSnippet", - "description": "Information about the topic matched." - }, - "type": { - "type": "string", - "description": "The type of the topic.", - "enum": [ - "videoGame" - ], - "enumDescriptions": [ - "" - ] - }, - "unmatched": { - "type": "boolean", - "description": "If this flag is set it means that we have not been able to match the topic title and type provided to a known entity." - } - } - }, - "LiveBroadcastTopicDetails": { - "id": "LiveBroadcastTopicDetails", - "type": "object", - "properties": { - "topics": { - "type": "array", - "items": { - "$ref": "LiveBroadcastTopic" - } - } - } - }, - "LiveBroadcastTopicSnippet": { - "id": "LiveBroadcastTopicSnippet", - "type": "object", - "properties": { - "name": { - "type": "string", - "description": "The name of the topic." - }, - "releaseDate": { - "type": "string", - "description": "The date at which the topic was released. Filled for types: videoGame" - } - } - }, - "LiveChatBan": { - "id": "LiveChatBan", - "type": "object", - "description": "A liveChatBan resource represents a ban for a YouTube live chat.", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "id": { - "type": "string", - "description": "The ID that YouTube assigns to uniquely identify the ban." - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#liveChatBan\".", - "default": "youtube#liveChatBan" - }, - "snippet": { - "$ref": "LiveChatBanSnippet", - "description": "The snippet object contains basic details about the ban." - } - } - }, - "LiveChatBanSnippet": { - "id": "LiveChatBanSnippet", - "type": "object", - "properties": { - "banDurationSeconds": { - "type": "string", - "description": "The duration of a ban, only filled if the ban has type TEMPORARY.", - "format": "uint64" - }, - "bannedUserDetails": { - "$ref": "ChannelProfileDetails" - }, - "liveChatId": { - "type": "string", - "description": "The chat this ban is pertinent to." - }, - "type": { - "type": "string", - "description": "The type of ban.", - "enum": [ - "permanent", - "temporary" - ], - "enumDescriptions": [ - "", - "" - ] - } - } - }, - "LiveChatFanFundingEventDetails": { - "id": "LiveChatFanFundingEventDetails", - "type": "object", - "properties": { - "amountDisplayString": { - "type": "string", - "description": "A rendered string that displays the fund amount and currency to the user." - }, - "amountMicros": { - "type": "string", - "description": "The amount of the fund.", - "format": "uint64" - }, - "currency": { - "type": "string", - "description": "The currency in which the fund was made." - }, - "userComment": { - "type": "string", - "description": "The comment added by the user to this fan funding event." - } - } - }, - "LiveChatMessage": { - "id": "LiveChatMessage", - "type": "object", - "description": "A liveChatMessage resource represents a chat message in a YouTube Live Chat.", - "properties": { - "authorDetails": { - "$ref": "LiveChatMessageAuthorDetails", - "description": "The authorDetails object contains basic details about the user that posted this message." - }, - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "id": { - "type": "string", - "description": "The ID that YouTube assigns to uniquely identify the message." - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#liveChatMessage\".", - "default": "youtube#liveChatMessage" - }, - "snippet": { - "$ref": "LiveChatMessageSnippet", - "description": "The snippet object contains basic details about the message." - } - } - }, - "LiveChatMessageAuthorDetails": { - "id": "LiveChatMessageAuthorDetails", - "type": "object", - "properties": { - "channelId": { - "type": "string", - "description": "The YouTube channel ID." - }, - "channelUrl": { - "type": "string", - "description": "The channel's URL." - }, - "displayName": { - "type": "string", - "description": "The channel's display name." - }, - "isChatModerator": { - "type": "boolean", - "description": "Whether the author is a moderator of the live chat." - }, - "isChatOwner": { - "type": "boolean", - "description": "Whether the author is the owner of the live chat." - }, - "isChatSponsor": { - "type": "boolean", - "description": "Whether the author is a sponsor of the live chat." - }, - "isVerified": { - "type": "boolean", - "description": "Whether the author's identity has been verified by YouTube." - }, - "profileImageUrl": { - "type": "string", - "description": "The channels's avatar URL." - } - } - }, - "LiveChatMessageDeletedDetails": { - "id": "LiveChatMessageDeletedDetails", - "type": "object", - "properties": { - "deletedMessageId": { - "type": "string" - } - } - }, - "LiveChatMessageListResponse": { - "id": "LiveChatMessageListResponse", - "type": "object", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "eventId": { - "type": "string", - "description": "Serialized EventId of the request which produced this response." - }, - "items": { - "type": "array", - "description": "A list of live chat messages.", - "items": { - "$ref": "LiveChatMessage" - } - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#liveChatMessageListResponse\".", - "default": "youtube#liveChatMessageListResponse" - }, - "nextPageToken": { - "type": "string", - "description": "The token that can be used as the value of the pageToken parameter to retrieve the next page in the result set." - }, - "offlineAt": { - "type": "string", - "description": "The date and time when the underlying stream went offline. The value is specified in ISO 8601 (YYYY-MM-DDThh:mm:ss.sZ) format.", - "format": "date-time" - }, - "pageInfo": { - "$ref": "PageInfo" - }, - "pollingIntervalMillis": { - "type": "integer", - "description": "The amount of time the client should wait before polling again.", - "format": "uint32" - }, - "tokenPagination": { - "$ref": "TokenPagination" - }, - "visitorId": { - "type": "string", - "description": "The visitorId identifies the visitor." - } - } - }, - "LiveChatMessageRetractedDetails": { - "id": "LiveChatMessageRetractedDetails", - "type": "object", - "properties": { - "retractedMessageId": { - "type": "string" - } - } - }, - "LiveChatMessageSnippet": { - "id": "LiveChatMessageSnippet", - "type": "object", - "properties": { - "authorChannelId": { - "type": "string", - "description": "The ID of the user that authored this message, this field is not always filled. textMessageEvent - the user that wrote the message fanFundingEvent - the user that funded the broadcast newSponsorEvent - the user that just became a sponsor messageDeletedEvent - the moderator that took the action messageRetractedEvent - the author that retracted their message userBannedEvent - the moderator that took the action superChatEvent - the user that made the purchase" - }, - "displayMessage": { - "type": "string", - "description": "Contains a string that can be displayed to the user. If this field is not present the message is silent, at the moment only messages of type TOMBSTONE and CHAT_ENDED_EVENT are silent." - }, - "fanFundingEventDetails": { - "$ref": "LiveChatFanFundingEventDetails", - "description": "Details about the funding event, this is only set if the type is 'fanFundingEvent'." - }, - "hasDisplayContent": { - "type": "boolean", - "description": "Whether the message has display content that should be displayed to users." - }, - "liveChatId": { - "type": "string" - }, - "messageDeletedDetails": { - "$ref": "LiveChatMessageDeletedDetails" - }, - "messageRetractedDetails": { - "$ref": "LiveChatMessageRetractedDetails" - }, - "pollClosedDetails": { - "$ref": "LiveChatPollClosedDetails" - }, - "pollEditedDetails": { - "$ref": "LiveChatPollEditedDetails" - }, - "pollOpenedDetails": { - "$ref": "LiveChatPollOpenedDetails" - }, - "pollVotedDetails": { - "$ref": "LiveChatPollVotedDetails" - }, - "publishedAt": { - "type": "string", - "description": "The date and time when the message was orignally published. The value is specified in ISO 8601 (YYYY-MM-DDThh:mm:ss.sZ) format.", - "format": "date-time" - }, - "superChatDetails": { - "$ref": "LiveChatSuperChatDetails", - "description": "Details about the Super Chat event, this is only set if the type is 'superChatEvent'." - }, - "textMessageDetails": { - "$ref": "LiveChatTextMessageDetails", - "description": "Details about the text message, this is only set if the type is 'textMessageEvent'." - }, - "type": { - "type": "string", - "description": "The type of message, this will always be present, it determines the contents of the message as well as which fields will be present.", - "enum": [ - "chatEndedEvent", - "fanFundingEvent", - "messageDeletedEvent", - "messageRetractedEvent", - "newSponsorEvent", - "pollClosedEvent", - "pollEditedEvent", - "pollOpenedEvent", - "pollVotedEvent", - "sponsorOnlyModeEndedEvent", - "sponsorOnlyModeStartedEvent", - "superChatEvent", - "textMessageEvent", - "tombstone", - "userBannedEvent" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "" - ] - }, - "userBannedDetails": { - "$ref": "LiveChatUserBannedMessageDetails" - } - } - }, - "LiveChatModerator": { - "id": "LiveChatModerator", - "type": "object", - "description": "A liveChatModerator resource represents a moderator for a YouTube live chat. A chat moderator has the ability to ban/unban users from a chat, remove message, etc.", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "id": { - "type": "string", - "description": "The ID that YouTube assigns to uniquely identify the moderator." - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#liveChatModerator\".", - "default": "youtube#liveChatModerator" - }, - "snippet": { - "$ref": "LiveChatModeratorSnippet", - "description": "The snippet object contains basic details about the moderator." - } - } - }, - "LiveChatModeratorListResponse": { - "id": "LiveChatModeratorListResponse", - "type": "object", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "eventId": { - "type": "string", - "description": "Serialized EventId of the request which produced this response." - }, - "items": { - "type": "array", - "description": "A list of moderators that match the request criteria.", - "items": { - "$ref": "LiveChatModerator" - } - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#liveChatModeratorListResponse\".", - "default": "youtube#liveChatModeratorListResponse" - }, - "nextPageToken": { - "type": "string", - "description": "The token that can be used as the value of the pageToken parameter to retrieve the next page in the result set." - }, - "pageInfo": { - "$ref": "PageInfo" - }, - "prevPageToken": { - "type": "string", - "description": "The token that can be used as the value of the pageToken parameter to retrieve the previous page in the result set." - }, - "tokenPagination": { - "$ref": "TokenPagination" - }, - "visitorId": { - "type": "string", - "description": "The visitorId identifies the visitor." - } - } - }, - "LiveChatModeratorSnippet": { - "id": "LiveChatModeratorSnippet", - "type": "object", - "properties": { - "liveChatId": { - "type": "string", - "description": "The ID of the live chat this moderator can act on." - }, - "moderatorDetails": { - "$ref": "ChannelProfileDetails", - "description": "Details about the moderator." - } - } - }, - "LiveChatPollClosedDetails": { - "id": "LiveChatPollClosedDetails", - "type": "object", - "properties": { - "pollId": { - "type": "string", - "description": "The id of the poll that was closed." - } - } - }, - "LiveChatPollEditedDetails": { - "id": "LiveChatPollEditedDetails", - "type": "object", - "properties": { - "id": { - "type": "string" - }, - "items": { - "type": "array", - "items": { - "$ref": "LiveChatPollItem" - } - }, - "prompt": { - "type": "string" - } - } - }, - "LiveChatPollItem": { - "id": "LiveChatPollItem", - "type": "object", - "properties": { - "description": { - "type": "string", - "description": "Plain text description of the item." - }, - "itemId": { - "type": "string" - } - } - }, - "LiveChatPollOpenedDetails": { - "id": "LiveChatPollOpenedDetails", - "type": "object", - "properties": { - "id": { - "type": "string" - }, - "items": { - "type": "array", - "items": { - "$ref": "LiveChatPollItem" - } - }, - "prompt": { - "type": "string" - } - } - }, - "LiveChatPollVotedDetails": { - "id": "LiveChatPollVotedDetails", - "type": "object", - "properties": { - "itemId": { - "type": "string", - "description": "The poll item the user chose." - }, - "pollId": { - "type": "string", - "description": "The poll the user voted on." - } - } - }, - "LiveChatSuperChatDetails": { - "id": "LiveChatSuperChatDetails", - "type": "object", - "properties": { - "amountDisplayString": { - "type": "string", - "description": "A rendered string that displays the fund amount and currency to the user." - }, - "amountMicros": { - "type": "string", - "description": "The amount purchased by the user, in micros (1,750,000 micros = 1.75).", - "format": "uint64" - }, - "currency": { - "type": "string", - "description": "The currency in which the purchase was made." - }, - "tier": { - "type": "integer", - "description": "The tier in which the amount belongs to. Lower amounts belong to lower tiers. Starts at 1.", - "format": "uint32" - }, - "userComment": { - "type": "string", - "description": "The comment added by the user to this Super Chat event." - } - } - }, - "LiveChatTextMessageDetails": { - "id": "LiveChatTextMessageDetails", - "type": "object", - "properties": { - "messageText": { - "type": "string", - "description": "The user's message." - } - } - }, - "LiveChatUserBannedMessageDetails": { - "id": "LiveChatUserBannedMessageDetails", - "type": "object", - "properties": { - "banDurationSeconds": { - "type": "string", - "description": "The duration of the ban. This property is only present if the banType is temporary.", - "format": "uint64" - }, - "banType": { - "type": "string", - "description": "The type of ban.", - "enum": [ - "permanent", - "temporary" - ], - "enumDescriptions": [ - "", - "" - ] - }, - "bannedUserDetails": { - "$ref": "ChannelProfileDetails", - "description": "The details of the user that was banned." - } - } - }, - "LiveStream": { - "id": "LiveStream", - "type": "object", - "description": "A live stream describes a live ingestion point.", - "properties": { - "cdn": { - "$ref": "CdnSettings", - "description": "The cdn object defines the live stream's content delivery network (CDN) settings. These settings provide details about the manner in which you stream your content to YouTube." - }, - "contentDetails": { - "$ref": "LiveStreamContentDetails", - "description": "The content_details object contains information about the stream, including the closed captions ingestion URL." - }, - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "id": { - "type": "string", - "description": "The ID that YouTube assigns to uniquely identify the stream.", - "annotations": { - "required": [ - "youtube.liveStreams.update" - ] - } - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#liveStream\".", - "default": "youtube#liveStream" - }, - "snippet": { - "$ref": "LiveStreamSnippet", - "description": "The snippet object contains basic details about the stream, including its channel, title, and description." - }, - "status": { - "$ref": "LiveStreamStatus", - "description": "The status object contains information about live stream's status." - } - } - }, - "LiveStreamConfigurationIssue": { - "id": "LiveStreamConfigurationIssue", - "type": "object", - "properties": { - "description": { - "type": "string", - "description": "The long-form description of the issue and how to resolve it." - }, - "reason": { - "type": "string", - "description": "The short-form reason for this issue." - }, - "severity": { - "type": "string", - "description": "How severe this issue is to the stream.", - "enum": [ - "error", - "info", - "warning" - ], - "enumDescriptions": [ - "", - "", - "" - ] - }, - "type": { - "type": "string", - "description": "The kind of error happening.", - "enum": [ - "audioBitrateHigh", - "audioBitrateLow", - "audioBitrateMismatch", - "audioCodec", - "audioCodecMismatch", - "audioSampleRate", - "audioSampleRateMismatch", - "audioStereoMismatch", - "audioTooManyChannels", - "badContainer", - "bitrateHigh", - "bitrateLow", - "frameRateHigh", - "framerateMismatch", - "gopMismatch", - "gopSizeLong", - "gopSizeOver", - "gopSizeShort", - "interlacedVideo", - "multipleAudioStreams", - "multipleVideoStreams", - "noAudioStream", - "noVideoStream", - "openGop", - "resolutionMismatch", - "videoBitrateMismatch", - "videoCodec", - "videoCodecMismatch", - "videoIngestionStarved", - "videoInterlaceMismatch", - "videoProfileMismatch", - "videoResolutionSuboptimal", - "videoResolutionUnsupported" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "" - ] - } - } - }, - "LiveStreamContentDetails": { - "id": "LiveStreamContentDetails", - "type": "object", - "description": "Detailed settings of a stream.", - "properties": { - "closedCaptionsIngestionUrl": { - "type": "string", - "description": "The ingestion URL where the closed captions of this stream are sent." - }, - "isReusable": { - "type": "boolean", - "description": "Indicates whether the stream is reusable, which means that it can be bound to multiple broadcasts. It is common for broadcasters to reuse the same stream for many different broadcasts if those broadcasts occur at different times.\n\nIf you set this value to false, then the stream will not be reusable, which means that it can only be bound to one broadcast. Non-reusable streams differ from reusable streams in the following ways: \n- A non-reusable stream can only be bound to one broadcast. \n- A non-reusable stream might be deleted by an automated process after the broadcast ends. \n- The liveStreams.list method does not list non-reusable streams if you call the method and set the mine parameter to true. The only way to use that method to retrieve the resource for a non-reusable stream is to use the id parameter to identify the stream." - } - } - }, - "LiveStreamHealthStatus": { - "id": "LiveStreamHealthStatus", - "type": "object", - "properties": { - "configurationIssues": { - "type": "array", - "description": "The configurations issues on this stream", - "items": { - "$ref": "LiveStreamConfigurationIssue" - } - }, - "lastUpdateTimeSeconds": { - "type": "string", - "description": "The last time this status was updated (in seconds)", - "format": "uint64" - }, - "status": { - "type": "string", - "description": "The status code of this stream", - "enum": [ - "bad", - "good", - "noData", - "ok", - "revoked" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "" - ] - } - } - }, - "LiveStreamListResponse": { - "id": "LiveStreamListResponse", - "type": "object", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "eventId": { - "type": "string", - "description": "Serialized EventId of the request which produced this response." - }, - "items": { - "type": "array", - "description": "A list of live streams that match the request criteria.", - "items": { - "$ref": "LiveStream" - } - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#liveStreamListResponse\".", - "default": "youtube#liveStreamListResponse" - }, - "nextPageToken": { - "type": "string", - "description": "The token that can be used as the value of the pageToken parameter to retrieve the next page in the result set." - }, - "pageInfo": { - "$ref": "PageInfo" - }, - "prevPageToken": { - "type": "string", - "description": "The token that can be used as the value of the pageToken parameter to retrieve the previous page in the result set." - }, - "tokenPagination": { - "$ref": "TokenPagination" - }, - "visitorId": { - "type": "string", - "description": "The visitorId identifies the visitor." - } - } - }, - "LiveStreamSnippet": { - "id": "LiveStreamSnippet", - "type": "object", - "properties": { - "channelId": { - "type": "string", - "description": "The ID that YouTube uses to uniquely identify the channel that is transmitting the stream." - }, - "description": { - "type": "string", - "description": "The stream's description. The value cannot be longer than 10000 characters." - }, - "isDefaultStream": { - "type": "boolean" - }, - "publishedAt": { - "type": "string", - "description": "The date and time that the stream was created. The value is specified in ISO 8601 (YYYY-MM-DDThh:mm:ss.sZ) format.", - "format": "date-time" - }, - "title": { - "type": "string", - "description": "The stream's title. The value must be between 1 and 128 characters long.", - "annotations": { - "required": [ - "youtube.liveStreams.insert", - "youtube.liveStreams.update" - ] - } - } - } - }, - "LiveStreamStatus": { - "id": "LiveStreamStatus", - "type": "object", - "description": "Brief description of the live stream status.", - "properties": { - "healthStatus": { - "$ref": "LiveStreamHealthStatus", - "description": "The health status of the stream." - }, - "streamStatus": { - "type": "string", - "enum": [ - "active", - "created", - "error", - "inactive", - "ready" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "" - ] - } - } - }, - "LocalizedProperty": { - "id": "LocalizedProperty", - "type": "object", - "properties": { - "default": { - "type": "string" - }, - "defaultLanguage": { - "$ref": "LanguageTag", - "description": "The language of the default property." - }, - "localized": { - "type": "array", - "items": { - "$ref": "LocalizedString" - } - } - } - }, - "LocalizedString": { - "id": "LocalizedString", - "type": "object", - "properties": { - "language": { - "type": "string" - }, - "value": { - "type": "string" - } - } - }, - "MonitorStreamInfo": { - "id": "MonitorStreamInfo", - "type": "object", - "description": "Settings and Info of the monitor stream", - "properties": { - "broadcastStreamDelayMs": { - "type": "integer", - "description": "If you have set the enableMonitorStream property to true, then this property determines the length of the live broadcast delay.", - "format": "uint32", - "annotations": { - "required": [ - "youtube.liveBroadcasts.update" - ] - } - }, - "embedHtml": { - "type": "string", - "description": "HTML code that embeds a player that plays the monitor stream." - }, - "enableMonitorStream": { - "type": "boolean", - "description": "This value determines whether the monitor stream is enabled for the broadcast. If the monitor stream is enabled, then YouTube will broadcast the event content on a special stream intended only for the broadcaster's consumption. The broadcaster can use the stream to review the event content and also to identify the optimal times to insert cuepoints.\n\nYou need to set this value to true if you intend to have a broadcast delay for your event.\n\nNote: This property cannot be updated once the broadcast is in the testing or live state.", - "annotations": { - "required": [ - "youtube.liveBroadcasts.update" - ] - } - } - } - }, - "PageInfo": { - "id": "PageInfo", - "type": "object", - "description": "Paging details for lists of resources, including total number of items available and number of resources returned in a single page.", - "properties": { - "resultsPerPage": { - "type": "integer", - "description": "The number of results included in the API response.", - "format": "int32" - }, - "totalResults": { - "type": "integer", - "description": "The total number of results in the result set.", - "format": "int32" - } - } - }, - "Playlist": { - "id": "Playlist", - "type": "object", - "description": "A playlist resource represents a YouTube playlist. A playlist is a collection of videos that can be viewed sequentially and shared with other users. A playlist can contain up to 200 videos, and YouTube does not limit the number of playlists that each user creates. By default, playlists are publicly visible to other users, but playlists can be public or private.\n\nYouTube also uses playlists to identify special collections of videos for a channel, such as: \n- uploaded videos \n- favorite videos \n- positively rated (liked) videos \n- watch history \n- watch later To be more specific, these lists are associated with a channel, which is a collection of a person, group, or company's videos, playlists, and other YouTube information. You can retrieve the playlist IDs for each of these lists from the channel resource for a given channel.\n\nYou can then use the playlistItems.list method to retrieve any of those lists. You can also add or remove items from those lists by calling the playlistItems.insert and playlistItems.delete methods.", - "properties": { - "contentDetails": { - "$ref": "PlaylistContentDetails", - "description": "The contentDetails object contains information like video count." - }, - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "id": { - "type": "string", - "description": "The ID that YouTube uses to uniquely identify the playlist." - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#playlist\".", - "default": "youtube#playlist" - }, - "localizations": { - "type": "object", - "description": "Localizations for different languages", - "additionalProperties": { - "$ref": "PlaylistLocalization", - "description": "The language tag, using string since map_key require simple types." - } - }, - "player": { - "$ref": "PlaylistPlayer", - "description": "The player object contains information that you would use to play the playlist in an embedded player." - }, - "snippet": { - "$ref": "PlaylistSnippet", - "description": "The snippet object contains basic details about the playlist, such as its title and description." - }, - "status": { - "$ref": "PlaylistStatus", - "description": "The status object contains status information for the playlist." - } - } - }, - "PlaylistContentDetails": { - "id": "PlaylistContentDetails", - "type": "object", - "properties": { - "itemCount": { - "type": "integer", - "description": "The number of videos in the playlist.", - "format": "uint32" - } - } - }, - "PlaylistItem": { - "id": "PlaylistItem", - "type": "object", - "description": "A playlistItem resource identifies another resource, such as a video, that is included in a playlist. In addition, the playlistItem resource contains details about the included resource that pertain specifically to how that resource is used in that playlist.\n\nYouTube uses playlists to identify special collections of videos for a channel, such as: \n- uploaded videos \n- favorite videos \n- positively rated (liked) videos \n- watch history \n- watch later To be more specific, these lists are associated with a channel, which is a collection of a person, group, or company's videos, playlists, and other YouTube information.\n\nYou can retrieve the playlist IDs for each of these lists from the channel resource for a given channel. You can then use the playlistItems.list method to retrieve any of those lists. You can also add or remove items from those lists by calling the playlistItems.insert and playlistItems.delete methods. For example, if a user gives a positive rating to a video, you would insert that video into the liked videos playlist for that user's channel.", - "properties": { - "contentDetails": { - "$ref": "PlaylistItemContentDetails", - "description": "The contentDetails object is included in the resource if the included item is a YouTube video. The object contains additional information about the video." - }, - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "id": { - "type": "string", - "description": "The ID that YouTube uses to uniquely identify the playlist item." - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#playlistItem\".", - "default": "youtube#playlistItem" - }, - "snippet": { - "$ref": "PlaylistItemSnippet", - "description": "The snippet object contains basic details about the playlist item, such as its title and position in the playlist." - }, - "status": { - "$ref": "PlaylistItemStatus", - "description": "The status object contains information about the playlist item's privacy status." - } - } - }, - "PlaylistItemContentDetails": { - "id": "PlaylistItemContentDetails", - "type": "object", - "properties": { - "endAt": { - "type": "string", - "description": "The time, measured in seconds from the start of the video, when the video should stop playing. (The playlist owner can specify the times when the video should start and stop playing when the video is played in the context of the playlist.) By default, assume that the video.endTime is the end of the video." - }, - "note": { - "type": "string", - "description": "A user-generated note for this item." - }, - "startAt": { - "type": "string", - "description": "The time, measured in seconds from the start of the video, when the video should start playing. (The playlist owner can specify the times when the video should start and stop playing when the video is played in the context of the playlist.) The default value is 0." - }, - "videoId": { - "type": "string", - "description": "The ID that YouTube uses to uniquely identify a video. To retrieve the video resource, set the id query parameter to this value in your API request." - }, - "videoPublishedAt": { - "type": "string", - "description": "The date and time that the video was published to YouTube. The value is specified in ISO 8601 (YYYY-MM-DDThh:mm:ss.sZ) format.", - "format": "date-time" - } - } - }, - "PlaylistItemListResponse": { - "id": "PlaylistItemListResponse", - "type": "object", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "eventId": { - "type": "string", - "description": "Serialized EventId of the request which produced this response." - }, - "items": { - "type": "array", - "description": "A list of playlist items that match the request criteria.", - "items": { - "$ref": "PlaylistItem" - } - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#playlistItemListResponse\".", - "default": "youtube#playlistItemListResponse" - }, - "nextPageToken": { - "type": "string", - "description": "The token that can be used as the value of the pageToken parameter to retrieve the next page in the result set." - }, - "pageInfo": { - "$ref": "PageInfo" - }, - "prevPageToken": { - "type": "string", - "description": "The token that can be used as the value of the pageToken parameter to retrieve the previous page in the result set." - }, - "tokenPagination": { - "$ref": "TokenPagination" - }, - "visitorId": { - "type": "string", - "description": "The visitorId identifies the visitor." - } - } - }, - "PlaylistItemSnippet": { - "id": "PlaylistItemSnippet", - "type": "object", - "description": "Basic details about a playlist, including title, description and thumbnails.", - "properties": { - "channelId": { - "type": "string", - "description": "The ID that YouTube uses to uniquely identify the user that added the item to the playlist." - }, - "channelTitle": { - "type": "string", - "description": "Channel title for the channel that the playlist item belongs to." - }, - "description": { - "type": "string", - "description": "The item's description." - }, - "playlistId": { - "type": "string", - "description": "The ID that YouTube uses to uniquely identify the playlist that the playlist item is in.", - "annotations": { - "required": [ - "youtube.playlistItems.insert", - "youtube.playlistItems.update" - ] - } - }, - "position": { - "type": "integer", - "description": "The order in which the item appears in the playlist. The value uses a zero-based index, so the first item has a position of 0, the second item has a position of 1, and so forth.", - "format": "uint32" - }, - "publishedAt": { - "type": "string", - "description": "The date and time that the item was added to the playlist. The value is specified in ISO 8601 (YYYY-MM-DDThh:mm:ss.sZ) format.", - "format": "date-time" - }, - "resourceId": { - "$ref": "ResourceId", - "description": "The id object contains information that can be used to uniquely identify the resource that is included in the playlist as the playlist item.", - "annotations": { - "required": [ - "youtube.playlistItems.insert", - "youtube.playlistItems.update" - ] - } - }, - "thumbnails": { - "$ref": "ThumbnailDetails", - "description": "A map of thumbnail images associated with the playlist item. For each object in the map, the key is the name of the thumbnail image, and the value is an object that contains other information about the thumbnail." - }, - "title": { - "type": "string", - "description": "The item's title." - } - } - }, - "PlaylistItemStatus": { - "id": "PlaylistItemStatus", - "type": "object", - "description": "Information about the playlist item's privacy status.", - "properties": { - "privacyStatus": { - "type": "string", - "description": "This resource's privacy status.", - "enum": [ - "private", - "public", - "unlisted" - ], - "enumDescriptions": [ - "", - "", - "" - ] - } - } - }, - "PlaylistListResponse": { - "id": "PlaylistListResponse", - "type": "object", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "eventId": { - "type": "string", - "description": "Serialized EventId of the request which produced this response." - }, - "items": { - "type": "array", - "description": "A list of playlists that match the request criteria.", - "items": { - "$ref": "Playlist" - } - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#playlistListResponse\".", - "default": "youtube#playlistListResponse" - }, - "nextPageToken": { - "type": "string", - "description": "The token that can be used as the value of the pageToken parameter to retrieve the next page in the result set." - }, - "pageInfo": { - "$ref": "PageInfo" - }, - "prevPageToken": { - "type": "string", - "description": "The token that can be used as the value of the pageToken parameter to retrieve the previous page in the result set." - }, - "tokenPagination": { - "$ref": "TokenPagination" - }, - "visitorId": { - "type": "string", - "description": "The visitorId identifies the visitor." - } - } - }, - "PlaylistLocalization": { - "id": "PlaylistLocalization", - "type": "object", - "description": "Playlist localization setting", - "properties": { - "description": { - "type": "string", - "description": "The localized strings for playlist's description." - }, - "title": { - "type": "string", - "description": "The localized strings for playlist's title." - } - } - }, - "PlaylistPlayer": { - "id": "PlaylistPlayer", - "type": "object", - "properties": { - "embedHtml": { - "type": "string", - "description": "An \u003ciframe\u003e tag that embeds a player that will play the playlist." - } - } - }, - "PlaylistSnippet": { - "id": "PlaylistSnippet", - "type": "object", - "description": "Basic details about a playlist, including title, description and thumbnails.", - "properties": { - "channelId": { - "type": "string", - "description": "The ID that YouTube uses to uniquely identify the channel that published the playlist." - }, - "channelTitle": { - "type": "string", - "description": "The channel title of the channel that the video belongs to." - }, - "defaultLanguage": { - "type": "string", - "description": "The language of the playlist's default title and description." - }, - "description": { - "type": "string", - "description": "The playlist's description." - }, - "localized": { - "$ref": "PlaylistLocalization", - "description": "Localized title and description, read-only." - }, - "publishedAt": { - "type": "string", - "description": "The date and time that the playlist was created. The value is specified in ISO 8601 (YYYY-MM-DDThh:mm:ss.sZ) format.", - "format": "date-time" - }, - "tags": { - "type": "array", - "description": "Keyword tags associated with the playlist.", - "items": { - "type": "string" - } - }, - "thumbnails": { - "$ref": "ThumbnailDetails", - "description": "A map of thumbnail images associated with the playlist. For each object in the map, the key is the name of the thumbnail image, and the value is an object that contains other information about the thumbnail." - }, - "title": { - "type": "string", - "description": "The playlist's title.", - "annotations": { - "required": [ - "youtube.playlists.insert", - "youtube.playlists.update" - ] - } - } - } - }, - "PlaylistStatus": { - "id": "PlaylistStatus", - "type": "object", - "properties": { - "privacyStatus": { - "type": "string", - "description": "The playlist's privacy status.", - "enum": [ - "private", - "public", - "unlisted" - ], - "enumDescriptions": [ - "", - "", - "" - ] - } - } - }, - "PromotedItem": { - "id": "PromotedItem", - "type": "object", - "description": "Describes a single promoted item.", - "properties": { - "customMessage": { - "type": "string", - "description": "A custom message to display for this promotion. This field is currently ignored unless the promoted item is a website." - }, - "id": { - "$ref": "PromotedItemId", - "description": "Identifies the promoted item." - }, - "promotedByContentOwner": { - "type": "boolean", - "description": "If true, the content owner's name will be used when displaying the promotion. This field can only be set when the update is made on behalf of the content owner." - }, - "timing": { - "$ref": "InvideoTiming", - "description": "The temporal position within the video where the promoted item will be displayed. If present, it overrides the default timing." - } - } - }, - "PromotedItemId": { - "id": "PromotedItemId", - "type": "object", - "description": "Describes a single promoted item id. It is a union of various possible types.", - "properties": { - "recentlyUploadedBy": { - "type": "string", - "description": "If type is recentUpload, this field identifies the channel from which to take the recent upload. If missing, the channel is assumed to be the same channel for which the invideoPromotion is set." - }, - "type": { - "type": "string", - "description": "Describes the type of the promoted item.", - "enum": [ - "recentUpload", - "video", - "website" - ], - "enumDescriptions": [ - "", - "", - "" - ] - }, - "videoId": { - "type": "string", - "description": "If the promoted item represents a video, this field represents the unique YouTube ID identifying it. This field will be present only if type has the value video." - }, - "websiteUrl": { - "type": "string", - "description": "If the promoted item represents a website, this field represents the url pointing to the website. This field will be present only if type has the value website." - } - } - }, - "PropertyValue": { - "id": "PropertyValue", - "type": "object", - "description": "A pair Property / Value.", - "properties": { - "property": { - "type": "string", - "description": "A property." - }, - "value": { - "type": "string", - "description": "The property's value." - } - } - }, - "ResourceId": { - "id": "ResourceId", - "type": "object", - "description": "A resource id is a generic reference that points to another YouTube resource.", - "properties": { - "channelId": { - "type": "string", - "description": "The ID that YouTube uses to uniquely identify the referred resource, if that resource is a channel. This property is only present if the resourceId.kind value is youtube#channel." - }, - "kind": { - "type": "string", - "description": "The type of the API resource." - }, - "playlistId": { - "type": "string", - "description": "The ID that YouTube uses to uniquely identify the referred resource, if that resource is a playlist. This property is only present if the resourceId.kind value is youtube#playlist." - }, - "videoId": { - "type": "string", - "description": "The ID that YouTube uses to uniquely identify the referred resource, if that resource is a video. This property is only present if the resourceId.kind value is youtube#video." - } - } - }, - "SearchListResponse": { - "id": "SearchListResponse", - "type": "object", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "eventId": { - "type": "string", - "description": "Serialized EventId of the request which produced this response." - }, - "items": { - "type": "array", - "description": "A list of results that match the search criteria.", - "items": { - "$ref": "SearchResult" - } - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#searchListResponse\".", - "default": "youtube#searchListResponse" - }, - "nextPageToken": { - "type": "string", - "description": "The token that can be used as the value of the pageToken parameter to retrieve the next page in the result set." - }, - "pageInfo": { - "$ref": "PageInfo" - }, - "prevPageToken": { - "type": "string", - "description": "The token that can be used as the value of the pageToken parameter to retrieve the previous page in the result set." - }, - "regionCode": { - "type": "string" - }, - "tokenPagination": { - "$ref": "TokenPagination" - }, - "visitorId": { - "type": "string", - "description": "The visitorId identifies the visitor." - } - } - }, - "SearchResult": { - "id": "SearchResult", - "type": "object", - "description": "A search result contains information about a YouTube video, channel, or playlist that matches the search parameters specified in an API request. While a search result points to a uniquely identifiable resource, like a video, it does not have its own persistent data.", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "id": { - "$ref": "ResourceId", - "description": "The id object contains information that can be used to uniquely identify the resource that matches the search request." - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#searchResult\".", - "default": "youtube#searchResult" - }, - "snippet": { - "$ref": "SearchResultSnippet", - "description": "The snippet object contains basic details about a search result, such as its title or description. For example, if the search result is a video, then the title will be the video's title and the description will be the video's description." - } - } - }, - "SearchResultSnippet": { - "id": "SearchResultSnippet", - "type": "object", - "description": "Basic details about a search result, including title, description and thumbnails of the item referenced by the search result.", - "properties": { - "channelId": { - "type": "string", - "description": "The value that YouTube uses to uniquely identify the channel that published the resource that the search result identifies." - }, - "channelTitle": { - "type": "string", - "description": "The title of the channel that published the resource that the search result identifies." - }, - "description": { - "type": "string", - "description": "A description of the search result." - }, - "liveBroadcastContent": { - "type": "string", - "description": "It indicates if the resource (video or channel) has upcoming/active live broadcast content. Or it's \"none\" if there is not any upcoming/active live broadcasts.", - "enum": [ - "live", - "none", - "upcoming" - ], - "enumDescriptions": [ - "", - "", - "" - ] - }, - "publishedAt": { - "type": "string", - "description": "The creation date and time of the resource that the search result identifies. The value is specified in ISO 8601 (YYYY-MM-DDThh:mm:ss.sZ) format.", - "format": "date-time" - }, - "thumbnails": { - "$ref": "ThumbnailDetails", - "description": "A map of thumbnail images associated with the search result. For each object in the map, the key is the name of the thumbnail image, and the value is an object that contains other information about the thumbnail." - }, - "title": { - "type": "string", - "description": "The title of the search result." - } - } - }, - "Sponsor": { - "id": "Sponsor", - "type": "object", - "description": "A sponsor resource represents a sponsor for a YouTube channel. A sponsor provides recurring monetary support to a creator and receives special benefits.", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "id": { - "type": "string", - "description": "The ID that YouTube assigns to uniquely identify the sponsor." - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#sponsor\".", - "default": "youtube#sponsor" - }, - "snippet": { - "$ref": "SponsorSnippet", - "description": "The snippet object contains basic details about the sponsor." - } - } - }, - "SponsorListResponse": { - "id": "SponsorListResponse", - "type": "object", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "eventId": { - "type": "string", - "description": "Serialized EventId of the request which produced this response." - }, - "items": { - "type": "array", - "description": "A list of sponsors that match the request criteria.", - "items": { - "$ref": "Sponsor" - } - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#sponsorListResponse\".", - "default": "youtube#sponsorListResponse" - }, - "nextPageToken": { - "type": "string", - "description": "The token that can be used as the value of the pageToken parameter to retrieve the next page in the result set." - }, - "pageInfo": { - "$ref": "PageInfo" - }, - "tokenPagination": { - "$ref": "TokenPagination" - }, - "visitorId": { - "type": "string", - "description": "The visitorId identifies the visitor." - } - } - }, - "SponsorSnippet": { - "id": "SponsorSnippet", - "type": "object", - "properties": { - "channelId": { - "type": "string", - "description": "The id of the channel being sponsored." - }, - "sponsorDetails": { - "$ref": "ChannelProfileDetails", - "description": "Details about the sponsor." - }, - "sponsorSince": { - "type": "string", - "description": "The date and time when the user became a sponsor. The value is specified in ISO 8601 (YYYY-MM-DDThh:mm:ss.sZ) format.", - "format": "date-time" - } - } - }, - "Subscription": { - "id": "Subscription", - "type": "object", - "description": "A subscription resource contains information about a YouTube user subscription. A subscription notifies a user when new videos are added to a channel or when another user takes one of several actions on YouTube, such as uploading a video, rating a video, or commenting on a video.", - "properties": { - "contentDetails": { - "$ref": "SubscriptionContentDetails", - "description": "The contentDetails object contains basic statistics about the subscription." - }, - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "id": { - "type": "string", - "description": "The ID that YouTube uses to uniquely identify the subscription." - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#subscription\".", - "default": "youtube#subscription" - }, - "snippet": { - "$ref": "SubscriptionSnippet", - "description": "The snippet object contains basic details about the subscription, including its title and the channel that the user subscribed to." - }, - "subscriberSnippet": { - "$ref": "SubscriptionSubscriberSnippet", - "description": "The subscriberSnippet object contains basic details about the sbuscriber." - } - } - }, - "SubscriptionContentDetails": { - "id": "SubscriptionContentDetails", - "type": "object", - "description": "Details about the content to witch a subscription refers.", - "properties": { - "activityType": { - "type": "string", - "description": "The type of activity this subscription is for (only uploads, everything).", - "enum": [ - "all", - "uploads" - ], - "enumDescriptions": [ - "", - "" - ] - }, - "newItemCount": { - "type": "integer", - "description": "The number of new items in the subscription since its content was last read.", - "format": "uint32" - }, - "totalItemCount": { - "type": "integer", - "description": "The approximate number of items that the subscription points to.", - "format": "uint32" - } - } - }, - "SubscriptionListResponse": { - "id": "SubscriptionListResponse", - "type": "object", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "eventId": { - "type": "string", - "description": "Serialized EventId of the request which produced this response." - }, - "items": { - "type": "array", - "description": "A list of subscriptions that match the request criteria.", - "items": { - "$ref": "Subscription" - } - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#subscriptionListResponse\".", - "default": "youtube#subscriptionListResponse" - }, - "nextPageToken": { - "type": "string", - "description": "The token that can be used as the value of the pageToken parameter to retrieve the next page in the result set." - }, - "pageInfo": { - "$ref": "PageInfo" - }, - "prevPageToken": { - "type": "string", - "description": "The token that can be used as the value of the pageToken parameter to retrieve the previous page in the result set." - }, - "tokenPagination": { - "$ref": "TokenPagination" - }, - "visitorId": { - "type": "string", - "description": "The visitorId identifies the visitor." - } - } - }, - "SubscriptionSnippet": { - "id": "SubscriptionSnippet", - "type": "object", - "description": "Basic details about a subscription, including title, description and thumbnails of the subscribed item.", - "properties": { - "channelId": { - "type": "string", - "description": "The ID that YouTube uses to uniquely identify the subscriber's channel." - }, - "channelTitle": { - "type": "string", - "description": "Channel title for the channel that the subscription belongs to." - }, - "description": { - "type": "string", - "description": "The subscription's details." - }, - "publishedAt": { - "type": "string", - "description": "The date and time that the subscription was created. The value is specified in ISO 8601 (YYYY-MM-DDThh:mm:ss.sZ) format.", - "format": "date-time" - }, - "resourceId": { - "$ref": "ResourceId", - "description": "The id object contains information about the channel that the user subscribed to.", - "annotations": { - "required": [ - "youtube.subscriptions.insert" - ] - } - }, - "thumbnails": { - "$ref": "ThumbnailDetails", - "description": "A map of thumbnail images associated with the video. For each object in the map, the key is the name of the thumbnail image, and the value is an object that contains other information about the thumbnail." - }, - "title": { - "type": "string", - "description": "The subscription's title." - } - } - }, - "SubscriptionSubscriberSnippet": { - "id": "SubscriptionSubscriberSnippet", - "type": "object", - "description": "Basic details about a subscription's subscriber including title, description, channel ID and thumbnails.", - "properties": { - "channelId": { - "type": "string", - "description": "The channel ID of the subscriber." - }, - "description": { - "type": "string", - "description": "The description of the subscriber." - }, - "thumbnails": { - "$ref": "ThumbnailDetails", - "description": "Thumbnails for this subscriber." - }, - "title": { - "type": "string", - "description": "The title of the subscriber." - } - } - }, - "SuperChatEvent": { - "id": "SuperChatEvent", - "type": "object", - "description": "A superChatEvent resource represents a Super Chat purchase on a YouTube channel.", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "id": { - "type": "string", - "description": "The ID that YouTube assigns to uniquely identify the Super Chat event." - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#superChatEvent\".", - "default": "youtube#superChatEvent" - }, - "snippet": { - "$ref": "SuperChatEventSnippet", - "description": "The snippet object contains basic details about the Super Chat event." - } - } - }, - "SuperChatEventListResponse": { - "id": "SuperChatEventListResponse", - "type": "object", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "eventId": { - "type": "string", - "description": "Serialized EventId of the request which produced this response." - }, - "items": { - "type": "array", - "description": "A list of Super Chat purchases that match the request criteria.", - "items": { - "$ref": "SuperChatEvent" - } - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#superChatEventListResponse\".", - "default": "youtube#superChatEventListResponse" - }, - "nextPageToken": { - "type": "string", - "description": "The token that can be used as the value of the pageToken parameter to retrieve the next page in the result set." - }, - "pageInfo": { - "$ref": "PageInfo" - }, - "tokenPagination": { - "$ref": "TokenPagination" - }, - "visitorId": { - "type": "string", - "description": "The visitorId identifies the visitor." - } - } - }, - "SuperChatEventSnippet": { - "id": "SuperChatEventSnippet", - "type": "object", - "properties": { - "amountMicros": { - "type": "string", - "description": "The purchase amount, in micros of the purchase currency. e.g., 1 is represented as 1000000.", - "format": "uint64" - }, - "channelId": { - "type": "string", - "description": "Channel id where the event occurred." - }, - "commentText": { - "type": "string", - "description": "The text contents of the comment left by the user." - }, - "createdAt": { - "type": "string", - "description": "The date and time when the event occurred. The value is specified in ISO 8601 (YYYY-MM-DDThh:mm:ss.sZ) format.", - "format": "date-time" - }, - "currency": { - "type": "string", - "description": "The currency in which the purchase was made. ISO 4217." - }, - "displayString": { - "type": "string", - "description": "A rendered string that displays the purchase amount and currency (e.g., \"$1.00\"). The string is rendered for the given language." - }, - "messageType": { - "type": "integer", - "description": "The tier for the paid message, which is based on the amount of money spent to purchase the message.", - "format": "uint32" - }, - "supporterDetails": { - "$ref": "ChannelProfileDetails", - "description": "Details about the supporter." - } - } - }, - "Thumbnail": { - "id": "Thumbnail", - "type": "object", - "description": "A thumbnail is an image representing a YouTube resource.", - "properties": { - "height": { - "type": "integer", - "description": "(Optional) Height of the thumbnail image.", - "format": "uint32" - }, - "url": { - "type": "string", - "description": "The thumbnail image's URL." - }, - "width": { - "type": "integer", - "description": "(Optional) Width of the thumbnail image.", - "format": "uint32" - } - } - }, - "ThumbnailDetails": { - "id": "ThumbnailDetails", - "type": "object", - "description": "Internal representation of thumbnails for a YouTube resource.", - "properties": { - "default": { - "$ref": "Thumbnail", - "description": "The default image for this resource." - }, - "high": { - "$ref": "Thumbnail", - "description": "The high quality image for this resource." - }, - "maxres": { - "$ref": "Thumbnail", - "description": "The maximum resolution quality image for this resource." - }, - "medium": { - "$ref": "Thumbnail", - "description": "The medium quality image for this resource." - }, - "standard": { - "$ref": "Thumbnail", - "description": "The standard quality image for this resource." - } - } - }, - "ThumbnailSetResponse": { - "id": "ThumbnailSetResponse", - "type": "object", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "eventId": { - "type": "string", - "description": "Serialized EventId of the request which produced this response." - }, - "items": { - "type": "array", - "description": "A list of thumbnails.", - "items": { - "$ref": "ThumbnailDetails" - } - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#thumbnailSetResponse\".", - "default": "youtube#thumbnailSetResponse" - }, - "visitorId": { - "type": "string", - "description": "The visitorId identifies the visitor." - } - } - }, - "TokenPagination": { - "id": "TokenPagination", - "type": "object", - "description": "Stub token pagination template to suppress results." - }, - "Video": { - "id": "Video", - "type": "object", - "description": "A video resource represents a YouTube video.", - "properties": { - "ageGating": { - "$ref": "VideoAgeGating", - "description": "Age restriction details related to a video. This data can only be retrieved by the video owner." - }, - "contentDetails": { - "$ref": "VideoContentDetails", - "description": "The contentDetails object contains information about the video content, including the length of the video and its aspect ratio." - }, - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "fileDetails": { - "$ref": "VideoFileDetails", - "description": "The fileDetails object encapsulates information about the video file that was uploaded to YouTube, including the file's resolution, duration, audio and video codecs, stream bitrates, and more. This data can only be retrieved by the video owner." - }, - "id": { - "type": "string", - "description": "The ID that YouTube uses to uniquely identify the video.", - "annotations": { - "required": [ - "youtube.videos.update" - ] - } - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#video\".", - "default": "youtube#video" - }, - "liveStreamingDetails": { - "$ref": "VideoLiveStreamingDetails", - "description": "The liveStreamingDetails object contains metadata about a live video broadcast. The object will only be present in a video resource if the video is an upcoming, live, or completed live broadcast." - }, - "localizations": { - "type": "object", - "description": "List with all localizations.", - "additionalProperties": { - "$ref": "VideoLocalization", - "description": "The language tag, using string since map_key require simple types." - } - }, - "monetizationDetails": { - "$ref": "VideoMonetizationDetails", - "description": "The monetizationDetails object encapsulates information about the monetization status of the video." - }, - "player": { - "$ref": "VideoPlayer", - "description": "The player object contains information that you would use to play the video in an embedded player." - }, - "processingDetails": { - "$ref": "VideoProcessingDetails", - "description": "The processingProgress object encapsulates information about YouTube's progress in processing the uploaded video file. The properties in the object identify the current processing status and an estimate of the time remaining until YouTube finishes processing the video. This part also indicates whether different types of data or content, such as file details or thumbnail images, are available for the video.\n\nThe processingProgress object is designed to be polled so that the video uploaded can track the progress that YouTube has made in processing the uploaded video file. This data can only be retrieved by the video owner." - }, - "projectDetails": { - "$ref": "VideoProjectDetails", - "description": "The projectDetails object contains information about the project specific video metadata." - }, - "recordingDetails": { - "$ref": "VideoRecordingDetails", - "description": "The recordingDetails object encapsulates information about the location, date and address where the video was recorded." - }, - "snippet": { - "$ref": "VideoSnippet", - "description": "The snippet object contains basic details about the video, such as its title, description, and category." - }, - "statistics": { - "$ref": "VideoStatistics", - "description": "The statistics object contains statistics about the video." - }, - "status": { - "$ref": "VideoStatus", - "description": "The status object contains information about the video's uploading, processing, and privacy statuses." - }, - "suggestions": { - "$ref": "VideoSuggestions", - "description": "The suggestions object encapsulates suggestions that identify opportunities to improve the video quality or the metadata for the uploaded video. This data can only be retrieved by the video owner." - }, - "topicDetails": { - "$ref": "VideoTopicDetails", - "description": "The topicDetails object encapsulates information about Freebase topics associated with the video." - } - } - }, - "VideoAbuseReport": { - "id": "VideoAbuseReport", - "type": "object", - "properties": { - "comments": { - "type": "string", - "description": "Additional comments regarding the abuse report." - }, - "language": { - "type": "string", - "description": "The language that the content was viewed in." - }, - "reasonId": { - "type": "string", - "description": "The high-level, or primary, reason that the content is abusive. The value is an abuse report reason ID." - }, - "secondaryReasonId": { - "type": "string", - "description": "The specific, or secondary, reason that this content is abusive (if available). The value is an abuse report reason ID that is a valid secondary reason for the primary reason." - }, - "videoId": { - "type": "string", - "description": "The ID that YouTube uses to uniquely identify the video." - } - } - }, - "VideoAbuseReportReason": { - "id": "VideoAbuseReportReason", - "type": "object", - "description": "A videoAbuseReportReason resource identifies a reason that a video could be reported as abusive. Video abuse report reasons are used with video.ReportAbuse.", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "id": { - "type": "string", - "description": "The ID of this abuse report reason." - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#videoAbuseReportReason\".", - "default": "youtube#videoAbuseReportReason" - }, - "snippet": { - "$ref": "VideoAbuseReportReasonSnippet", - "description": "The snippet object contains basic details about the abuse report reason." - } - } - }, - "VideoAbuseReportReasonListResponse": { - "id": "VideoAbuseReportReasonListResponse", - "type": "object", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "eventId": { - "type": "string", - "description": "Serialized EventId of the request which produced this response." - }, - "items": { - "type": "array", - "description": "A list of valid abuse reasons that are used with video.ReportAbuse.", - "items": { - "$ref": "VideoAbuseReportReason" - } - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#videoAbuseReportReasonListResponse\".", - "default": "youtube#videoAbuseReportReasonListResponse" - }, - "visitorId": { - "type": "string", - "description": "The visitorId identifies the visitor." - } - } - }, - "VideoAbuseReportReasonSnippet": { - "id": "VideoAbuseReportReasonSnippet", - "type": "object", - "description": "Basic details about a video category, such as its localized title.", - "properties": { - "label": { - "type": "string", - "description": "The localized label belonging to this abuse report reason." - }, - "secondaryReasons": { - "type": "array", - "description": "The secondary reasons associated with this reason, if any are available. (There might be 0 or more.)", - "items": { - "$ref": "VideoAbuseReportSecondaryReason" - } - } - } - }, - "VideoAbuseReportSecondaryReason": { - "id": "VideoAbuseReportSecondaryReason", - "type": "object", - "properties": { - "id": { - "type": "string", - "description": "The ID of this abuse report secondary reason." - }, - "label": { - "type": "string", - "description": "The localized label for this abuse report secondary reason." - } - } - }, - "VideoAgeGating": { - "id": "VideoAgeGating", - "type": "object", - "properties": { - "alcoholContent": { - "type": "boolean", - "description": "Indicates whether or not the video has alcoholic beverage content. Only users of legal purchasing age in a particular country, as identified by ICAP, can view the content." - }, - "restricted": { - "type": "boolean", - "description": "Age-restricted trailers. For redband trailers and adult-rated video-games. Only users aged 18+ can view the content. The the field is true the content is restricted to viewers aged 18+. Otherwise The field won't be present." - }, - "videoGameRating": { - "type": "string", - "description": "Video game rating, if any.", - "enum": [ - "anyone", - "m15Plus", - "m16Plus", - "m17Plus" - ], - "enumDescriptions": [ - "", - "", - "", - "" - ] - } - } - }, - "VideoCategory": { - "id": "VideoCategory", - "type": "object", - "description": "A videoCategory resource identifies a category that has been or could be associated with uploaded videos.", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "id": { - "type": "string", - "description": "The ID that YouTube uses to uniquely identify the video category." - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#videoCategory\".", - "default": "youtube#videoCategory" - }, - "snippet": { - "$ref": "VideoCategorySnippet", - "description": "The snippet object contains basic details about the video category, including its title." - } - } - }, - "VideoCategoryListResponse": { - "id": "VideoCategoryListResponse", - "type": "object", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "eventId": { - "type": "string", - "description": "Serialized EventId of the request which produced this response." - }, - "items": { - "type": "array", - "description": "A list of video categories that can be associated with YouTube videos. In this map, the video category ID is the map key, and its value is the corresponding videoCategory resource.", - "items": { - "$ref": "VideoCategory" - } - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#videoCategoryListResponse\".", - "default": "youtube#videoCategoryListResponse" - }, - "nextPageToken": { - "type": "string", - "description": "The token that can be used as the value of the pageToken parameter to retrieve the next page in the result set." - }, - "pageInfo": { - "$ref": "PageInfo" - }, - "prevPageToken": { - "type": "string", - "description": "The token that can be used as the value of the pageToken parameter to retrieve the previous page in the result set." - }, - "tokenPagination": { - "$ref": "TokenPagination" - }, - "visitorId": { - "type": "string", - "description": "The visitorId identifies the visitor." - } - } - }, - "VideoCategorySnippet": { - "id": "VideoCategorySnippet", - "type": "object", - "description": "Basic details about a video category, such as its localized title.", - "properties": { - "assignable": { - "type": "boolean" - }, - "channelId": { - "type": "string", - "description": "The YouTube channel that created the video category.", - "default": "UCBR8-60-B28hp2BmDPdntcQ" - }, - "title": { - "type": "string", - "description": "The video category's title." - } - } - }, - "VideoContentDetails": { - "id": "VideoContentDetails", - "type": "object", - "description": "Details about the content of a YouTube Video.", - "properties": { - "caption": { - "type": "string", - "description": "The value of captions indicates whether the video has captions or not.", - "enum": [ - "false", - "true" - ], - "enumDescriptions": [ - "", - "" - ] - }, - "contentRating": { - "$ref": "ContentRating", - "description": "Specifies the ratings that the video received under various rating schemes." - }, - "countryRestriction": { - "$ref": "AccessPolicy", - "description": "The countryRestriction object contains information about the countries where a video is (or is not) viewable." - }, - "definition": { - "type": "string", - "description": "The value of definition indicates whether the video is available in high definition or only in standard definition.", - "enum": [ - "hd", - "sd" - ], - "enumDescriptions": [ - "", - "" - ] - }, - "dimension": { - "type": "string", - "description": "The value of dimension indicates whether the video is available in 3D or in 2D." - }, - "duration": { - "type": "string", - "description": "The length of the video. The tag value is an ISO 8601 duration in the format PT#M#S, in which the letters PT indicate that the value specifies a period of time, and the letters M and S refer to length in minutes and seconds, respectively. The # characters preceding the M and S letters are both integers that specify the number of minutes (or seconds) of the video. For example, a value of PT15M51S indicates that the video is 15 minutes and 51 seconds long." - }, - "hasCustomThumbnail": { - "type": "boolean", - "description": "Indicates whether the video uploader has provided a custom thumbnail image for the video. This property is only visible to the video uploader." - }, - "licensedContent": { - "type": "boolean", - "description": "The value of is_license_content indicates whether the video is licensed content." - }, - "projection": { - "type": "string", - "description": "Specifies the projection format of the video.", - "enum": [ - "360", - "rectangular" - ], - "enumDescriptions": [ - "", - "" - ] - }, - "regionRestriction": { - "$ref": "VideoContentDetailsRegionRestriction", - "description": "The regionRestriction object contains information about the countries where a video is (or is not) viewable. The object will contain either the contentDetails.regionRestriction.allowed property or the contentDetails.regionRestriction.blocked property." - } - } - }, - "VideoContentDetailsRegionRestriction": { - "id": "VideoContentDetailsRegionRestriction", - "type": "object", - "description": "DEPRECATED Region restriction of the video.", - "properties": { - "allowed": { - "type": "array", - "description": "A list of region codes that identify countries where the video is viewable. If this property is present and a country is not listed in its value, then the video is blocked from appearing in that country. If this property is present and contains an empty list, the video is blocked in all countries.", - "items": { - "type": "string" - } - }, - "blocked": { - "type": "array", - "description": "A list of region codes that identify countries where the video is blocked. If this property is present and a country is not listed in its value, then the video is viewable in that country. If this property is present and contains an empty list, the video is viewable in all countries.", - "items": { - "type": "string" - } - } - } - }, - "VideoFileDetails": { - "id": "VideoFileDetails", - "type": "object", - "description": "Describes original video file properties, including technical details about audio and video streams, but also metadata information like content length, digitization time, or geotagging information.", - "properties": { - "audioStreams": { - "type": "array", - "description": "A list of audio streams contained in the uploaded video file. Each item in the list contains detailed metadata about an audio stream.", - "items": { - "$ref": "VideoFileDetailsAudioStream" - } - }, - "bitrateBps": { - "type": "string", - "description": "The uploaded video file's combined (video and audio) bitrate in bits per second.", - "format": "uint64" - }, - "container": { - "type": "string", - "description": "The uploaded video file's container format." - }, - "creationTime": { - "type": "string", - "description": "The date and time when the uploaded video file was created. The value is specified in ISO 8601 format. Currently, the following ISO 8601 formats are supported: \n- Date only: YYYY-MM-DD \n- Naive time: YYYY-MM-DDTHH:MM:SS \n- Time with timezone: YYYY-MM-DDTHH:MM:SS+HH:MM" - }, - "durationMs": { - "type": "string", - "description": "The length of the uploaded video in milliseconds.", - "format": "uint64" - }, - "fileName": { - "type": "string", - "description": "The uploaded file's name. This field is present whether a video file or another type of file was uploaded." - }, - "fileSize": { - "type": "string", - "description": "The uploaded file's size in bytes. This field is present whether a video file or another type of file was uploaded.", - "format": "uint64" - }, - "fileType": { - "type": "string", - "description": "The uploaded file's type as detected by YouTube's video processing engine. Currently, YouTube only processes video files, but this field is present whether a video file or another type of file was uploaded.", - "enum": [ - "archive", - "audio", - "document", - "image", - "other", - "project", - "video" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "" - ] - }, - "videoStreams": { - "type": "array", - "description": "A list of video streams contained in the uploaded video file. Each item in the list contains detailed metadata about a video stream.", - "items": { - "$ref": "VideoFileDetailsVideoStream" - } - } - } - }, - "VideoFileDetailsAudioStream": { - "id": "VideoFileDetailsAudioStream", - "type": "object", - "description": "Information about an audio stream.", - "properties": { - "bitrateBps": { - "type": "string", - "description": "The audio stream's bitrate, in bits per second.", - "format": "uint64" - }, - "channelCount": { - "type": "integer", - "description": "The number of audio channels that the stream contains.", - "format": "uint32" - }, - "codec": { - "type": "string", - "description": "The audio codec that the stream uses." - }, - "vendor": { - "type": "string", - "description": "A value that uniquely identifies a video vendor. Typically, the value is a four-letter vendor code." - } - } - }, - "VideoFileDetailsVideoStream": { - "id": "VideoFileDetailsVideoStream", - "type": "object", - "description": "Information about a video stream.", - "properties": { - "aspectRatio": { - "type": "number", - "description": "The video content's display aspect ratio, which specifies the aspect ratio in which the video should be displayed.", - "format": "double" - }, - "bitrateBps": { - "type": "string", - "description": "The video stream's bitrate, in bits per second.", - "format": "uint64" - }, - "codec": { - "type": "string", - "description": "The video codec that the stream uses." - }, - "frameRateFps": { - "type": "number", - "description": "The video stream's frame rate, in frames per second.", - "format": "double" - }, - "heightPixels": { - "type": "integer", - "description": "The encoded video content's height in pixels.", - "format": "uint32" - }, - "rotation": { - "type": "string", - "description": "The amount that YouTube needs to rotate the original source content to properly display the video.", - "enum": [ - "clockwise", - "counterClockwise", - "none", - "other", - "upsideDown" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "" - ] - }, - "vendor": { - "type": "string", - "description": "A value that uniquely identifies a video vendor. Typically, the value is a four-letter vendor code." - }, - "widthPixels": { - "type": "integer", - "description": "The encoded video content's width in pixels. You can calculate the video's encoding aspect ratio as width_pixels / height_pixels.", - "format": "uint32" - } - } - }, - "VideoGetRatingResponse": { - "id": "VideoGetRatingResponse", - "type": "object", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "eventId": { - "type": "string", - "description": "Serialized EventId of the request which produced this response." - }, - "items": { - "type": "array", - "description": "A list of ratings that match the request criteria.", - "items": { - "$ref": "VideoRating" - } - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#videoGetRatingResponse\".", - "default": "youtube#videoGetRatingResponse" - }, - "visitorId": { - "type": "string", - "description": "The visitorId identifies the visitor." - } - } - }, - "VideoListResponse": { - "id": "VideoListResponse", - "type": "object", - "properties": { - "etag": { - "type": "string", - "description": "Etag of this resource." - }, - "eventId": { - "type": "string", - "description": "Serialized EventId of the request which produced this response." - }, - "items": { - "type": "array", - "description": "A list of videos that match the request criteria.", - "items": { - "$ref": "Video" - } - }, - "kind": { - "type": "string", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#videoListResponse\".", - "default": "youtube#videoListResponse" - }, - "nextPageToken": { - "type": "string", - "description": "The token that can be used as the value of the pageToken parameter to retrieve the next page in the result set." - }, - "pageInfo": { - "$ref": "PageInfo" - }, - "prevPageToken": { - "type": "string", - "description": "The token that can be used as the value of the pageToken parameter to retrieve the previous page in the result set." - }, - "tokenPagination": { - "$ref": "TokenPagination" - }, - "visitorId": { - "type": "string", - "description": "The visitorId identifies the visitor." - } - } - }, - "VideoLiveStreamingDetails": { - "id": "VideoLiveStreamingDetails", - "type": "object", - "description": "Details about the live streaming metadata.", - "properties": { - "activeLiveChatId": { - "type": "string", - "description": "The ID of the currently active live chat attached to this video. This field is filled only if the video is a currently live broadcast that has live chat. Once the broadcast transitions to complete this field will be removed and the live chat closed down. For persistent broadcasts that live chat id will no longer be tied to this video but rather to the new video being displayed at the persistent page." - }, - "actualEndTime": { - "type": "string", - "description": "The time that the broadcast actually ended. The value is specified in ISO 8601 (YYYY-MM-DDThh:mm:ss.sZ) format. This value will not be available until the broadcast is over.", - "format": "date-time" - }, - "actualStartTime": { - "type": "string", - "description": "The time that the broadcast actually started. The value is specified in ISO 8601 (YYYY-MM-DDThh:mm:ss.sZ) format. This value will not be available until the broadcast begins.", - "format": "date-time" - }, - "concurrentViewers": { - "type": "string", - "description": "The number of viewers currently watching the broadcast. The property and its value will be present if the broadcast has current viewers and the broadcast owner has not hidden the viewcount for the video. Note that YouTube stops tracking the number of concurrent viewers for a broadcast when the broadcast ends. So, this property would not identify the number of viewers watching an archived video of a live broadcast that already ended.", - "format": "uint64" - }, - "scheduledEndTime": { - "type": "string", - "description": "The time that the broadcast is scheduled to end. The value is specified in ISO 8601 (YYYY-MM-DDThh:mm:ss.sZ) format. If the value is empty or the property is not present, then the broadcast is scheduled to continue indefinitely.", - "format": "date-time" - }, - "scheduledStartTime": { - "type": "string", - "description": "The time that the broadcast is scheduled to begin. The value is specified in ISO 8601 (YYYY-MM-DDThh:mm:ss.sZ) format.", - "format": "date-time" - } - } - }, - "VideoLocalization": { - "id": "VideoLocalization", - "type": "object", - "description": "Localized versions of certain video properties (e.g. title).", - "properties": { - "description": { - "type": "string", - "description": "Localized version of the video's description." - }, - "title": { - "type": "string", - "description": "Localized version of the video's title." - } - } - }, - "VideoMonetizationDetails": { - "id": "VideoMonetizationDetails", - "type": "object", - "description": "Details about monetization of a YouTube Video.", - "properties": { - "access": { - "$ref": "AccessPolicy", - "description": "The value of access indicates whether the video can be monetized or not." - } - } - }, - "VideoPlayer": { - "id": "VideoPlayer", - "type": "object", - "description": "Player to be used for a video playback.", - "properties": { - "embedHeight": { - "type": "string", - "format": "int64" - }, - "embedHtml": { - "type": "string", - "description": "An \u003ciframe\u003e tag that embeds a player that will play the video." - }, - "embedWidth": { - "type": "string", - "description": "The embed width", - "format": "int64" - } - } - }, - "VideoProcessingDetails": { - "id": "VideoProcessingDetails", - "type": "object", - "description": "Describes processing status and progress and availability of some other Video resource parts.", - "properties": { - "editorSuggestionsAvailability": { - "type": "string", - "description": "This value indicates whether video editing suggestions, which might improve video quality or the playback experience, are available for the video. You can retrieve these suggestions by requesting the suggestions part in your videos.list() request." - }, - "fileDetailsAvailability": { - "type": "string", - "description": "This value indicates whether file details are available for the uploaded video. You can retrieve a video's file details by requesting the fileDetails part in your videos.list() request." - }, - "processingFailureReason": { - "type": "string", - "description": "The reason that YouTube failed to process the video. This property will only have a value if the processingStatus property's value is failed.", - "enum": [ - "other", - "streamingFailed", - "transcodeFailed", - "uploadFailed" - ], - "enumDescriptions": [ - "", - "", - "", - "" - ] - }, - "processingIssuesAvailability": { - "type": "string", - "description": "This value indicates whether the video processing engine has generated suggestions that might improve YouTube's ability to process the the video, warnings that explain video processing problems, or errors that cause video processing problems. You can retrieve these suggestions by requesting the suggestions part in your videos.list() request." - }, - "processingProgress": { - "$ref": "VideoProcessingDetailsProcessingProgress", - "description": "The processingProgress object contains information about the progress YouTube has made in processing the video. The values are really only relevant if the video's processing status is processing." - }, - "processingStatus": { - "type": "string", - "description": "The video's processing status. This value indicates whether YouTube was able to process the video or if the video is still being processed.", - "enum": [ - "failed", - "processing", - "succeeded", - "terminated" - ], - "enumDescriptions": [ - "", - "", - "", - "" - ] - }, - "tagSuggestionsAvailability": { - "type": "string", - "description": "This value indicates whether keyword (tag) suggestions are available for the video. Tags can be added to a video's metadata to make it easier for other users to find the video. You can retrieve these suggestions by requesting the suggestions part in your videos.list() request." - }, - "thumbnailsAvailability": { - "type": "string", - "description": "This value indicates whether thumbnail images have been generated for the video." - } - } - }, - "VideoProcessingDetailsProcessingProgress": { - "id": "VideoProcessingDetailsProcessingProgress", - "type": "object", - "description": "Video processing progress and completion time estimate.", - "properties": { - "partsProcessed": { - "type": "string", - "description": "The number of parts of the video that YouTube has already processed. You can estimate the percentage of the video that YouTube has already processed by calculating:\n100 * parts_processed / parts_total\n\nNote that since the estimated number of parts could increase without a corresponding increase in the number of parts that have already been processed, it is possible that the calculated progress could periodically decrease while YouTube processes a video.", - "format": "uint64" - }, - "partsTotal": { - "type": "string", - "description": "An estimate of the total number of parts that need to be processed for the video. The number may be updated with more precise estimates while YouTube processes the video.", - "format": "uint64" - }, - "timeLeftMs": { - "type": "string", - "description": "An estimate of the amount of time, in millseconds, that YouTube needs to finish processing the video.", - "format": "uint64" - } - } - }, - "VideoProjectDetails": { - "id": "VideoProjectDetails", - "type": "object", - "description": "Project specific details about the content of a YouTube Video.", - "properties": { - "tags": { - "type": "array", - "description": "A list of project tags associated with the video during the upload.", - "items": { - "type": "string" - } - } - } - }, - "VideoRating": { - "id": "VideoRating", - "type": "object", - "properties": { - "rating": { - "type": "string", - "enum": [ - "dislike", - "like", - "none", - "unspecified" - ], - "enumDescriptions": [ - "", - "", - "", - "" - ] - }, - "videoId": { - "type": "string" - } - } - }, - "VideoRecordingDetails": { - "id": "VideoRecordingDetails", - "type": "object", - "description": "Recording information associated with the video.", - "properties": { - "location": { - "$ref": "GeoPoint", - "description": "The geolocation information associated with the video." - }, - "locationDescription": { - "type": "string", - "description": "The text description of the location where the video was recorded." - }, - "recordingDate": { - "type": "string", - "description": "The date and time when the video was recorded. The value is specified in ISO 8601 (YYYY-MM-DDThh:mm:ss.sssZ) format.", - "format": "date-time" - } - } - }, - "VideoSnippet": { - "id": "VideoSnippet", - "type": "object", - "description": "Basic details about a video, including title, description, uploader, thumbnails and category.", - "properties": { - "categoryId": { - "type": "string", - "description": "The YouTube video category associated with the video." - }, - "channelId": { - "type": "string", - "description": "The ID that YouTube uses to uniquely identify the channel that the video was uploaded to." - }, - "channelTitle": { - "type": "string", - "description": "Channel title for the channel that the video belongs to." - }, - "defaultAudioLanguage": { - "type": "string", - "description": "The default_audio_language property specifies the language spoken in the video's default audio track." - }, - "defaultLanguage": { - "type": "string", - "description": "The language of the videos's default snippet." - }, - "description": { - "type": "string", - "description": "The video's description." - }, - "liveBroadcastContent": { - "type": "string", - "description": "Indicates if the video is an upcoming/active live broadcast. Or it's \"none\" if the video is not an upcoming/active live broadcast.", - "enum": [ - "live", - "none", - "upcoming" - ], - "enumDescriptions": [ - "", - "", - "" - ] - }, - "localized": { - "$ref": "VideoLocalization", - "description": "Localized snippet selected with the hl parameter. If no such localization exists, this field is populated with the default snippet. (Read-only)" - }, - "publishedAt": { - "type": "string", - "description": "The date and time that the video was uploaded. The value is specified in ISO 8601 (YYYY-MM-DDThh:mm:ss.sZ) format.", - "format": "date-time" - }, - "tags": { - "type": "array", - "description": "A list of keyword tags associated with the video. Tags may contain spaces.", - "items": { - "type": "string" - } - }, - "thumbnails": { - "$ref": "ThumbnailDetails", - "description": "A map of thumbnail images associated with the video. For each object in the map, the key is the name of the thumbnail image, and the value is an object that contains other information about the thumbnail." - }, - "title": { - "type": "string", - "description": "The video's title." - } - } - }, - "VideoStatistics": { - "id": "VideoStatistics", - "type": "object", - "description": "Statistics about the video, such as the number of times the video was viewed or liked.", - "properties": { - "commentCount": { - "type": "string", - "description": "The number of comments for the video.", - "format": "uint64" - }, - "dislikeCount": { - "type": "string", - "description": "The number of users who have indicated that they disliked the video by giving it a negative rating.", - "format": "uint64" - }, - "favoriteCount": { - "type": "string", - "description": "The number of users who currently have the video marked as a favorite video.", - "format": "uint64" - }, - "likeCount": { - "type": "string", - "description": "The number of users who have indicated that they liked the video by giving it a positive rating.", - "format": "uint64" - }, - "viewCount": { - "type": "string", - "description": "The number of times the video has been viewed.", - "format": "uint64" - } - } - }, - "VideoStatus": { - "id": "VideoStatus", - "type": "object", - "description": "Basic details about a video category, such as its localized title.", - "properties": { - "embeddable": { - "type": "boolean", - "description": "This value indicates if the video can be embedded on another website." - }, - "failureReason": { - "type": "string", - "description": "This value explains why a video failed to upload. This property is only present if the uploadStatus property indicates that the upload failed.", - "enum": [ - "codec", - "conversion", - "emptyFile", - "invalidFile", - "tooSmall", - "uploadAborted" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "" - ] - }, - "license": { - "type": "string", - "description": "The video's license.", - "enum": [ - "creativeCommon", - "youtube" - ], - "enumDescriptions": [ - "", - "" - ] - }, - "privacyStatus": { - "type": "string", - "description": "The video's privacy status.", - "enum": [ - "private", - "public", - "unlisted" - ], - "enumDescriptions": [ - "", - "", - "" - ] - }, - "publicStatsViewable": { - "type": "boolean", - "description": "This value indicates if the extended video statistics on the watch page can be viewed by everyone. Note that the view count, likes, etc will still be visible if this is disabled." - }, - "publishAt": { - "type": "string", - "description": "The date and time when the video is scheduled to publish. It can be set only if the privacy status of the video is private. The value is specified in ISO 8601 (YYYY-MM-DDThh:mm:ss.sZ) format.", - "format": "date-time" - }, - "rejectionReason": { - "type": "string", - "description": "This value explains why YouTube rejected an uploaded video. This property is only present if the uploadStatus property indicates that the upload was rejected.", - "enum": [ - "claim", - "copyright", - "duplicate", - "inappropriate", - "legal", - "length", - "termsOfUse", - "trademark", - "uploaderAccountClosed", - "uploaderAccountSuspended" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "", - "", - "", - "" - ] - }, - "uploadStatus": { - "type": "string", - "description": "The status of the uploaded video.", - "enum": [ - "deleted", - "failed", - "processed", - "rejected", - "uploaded" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "" - ] - } - } - }, - "VideoSuggestions": { - "id": "VideoSuggestions", - "type": "object", - "description": "Specifies suggestions on how to improve video content, including encoding hints, tag suggestions, and editor suggestions.", - "properties": { - "editorSuggestions": { - "type": "array", - "description": "A list of video editing operations that might improve the video quality or playback experience of the uploaded video.", - "items": { - "type": "string", - "enum": [ - "audioQuietAudioSwap", - "videoAutoLevels", - "videoCrop", - "videoStabilize" - ], - "enumDescriptions": [ - "", - "", - "", - "" - ] - } - }, - "processingErrors": { - "type": "array", - "description": "A list of errors that will prevent YouTube from successfully processing the uploaded video video. These errors indicate that, regardless of the video's current processing status, eventually, that status will almost certainly be failed.", - "items": { - "type": "string", - "enum": [ - "archiveFile", - "audioFile", - "docFile", - "imageFile", - "notAVideoFile", - "projectFile", - "unsupportedSpatialAudioLayout" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "" - ] - } - }, - "processingHints": { - "type": "array", - "description": "A list of suggestions that may improve YouTube's ability to process the video.", - "items": { - "type": "string", - "enum": [ - "nonStreamableMov", - "sendBestQualityVideo", - "spatialAudio", - "sphericalVideo", - "vrVideo" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "" - ] - } - }, - "processingWarnings": { - "type": "array", - "description": "A list of reasons why YouTube may have difficulty transcoding the uploaded video or that might result in an erroneous transcoding. These warnings are generated before YouTube actually processes the uploaded video file. In addition, they identify issues that are unlikely to cause the video processing to fail but that might cause problems such as sync issues, video artifacts, or a missing audio track.", - "items": { - "type": "string", - "enum": [ - "hasEditlist", - "inconsistentResolution", - "problematicAudioCodec", - "problematicVideoCodec", - "unknownAudioCodec", - "unknownContainer", - "unknownVideoCodec", - "unsupportedSphericalProjectionType", - "unsupportedVrStereoMode" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "", - "", - "" - ] - } - }, - "tagSuggestions": { - "type": "array", - "description": "A list of keyword tags that could be added to the video's metadata to increase the likelihood that users will locate your video when searching or browsing on YouTube.", - "items": { - "$ref": "VideoSuggestionsTagSuggestion" - } - } - } - }, - "VideoSuggestionsTagSuggestion": { - "id": "VideoSuggestionsTagSuggestion", - "type": "object", - "description": "A single tag suggestion with it's relevance information.", - "properties": { - "categoryRestricts": { - "type": "array", - "description": "A set of video categories for which the tag is relevant. You can use this information to display appropriate tag suggestions based on the video category that the video uploader associates with the video. By default, tag suggestions are relevant for all categories if there are no restricts defined for the keyword.", - "items": { - "type": "string" - } - }, - "tag": { - "type": "string", - "description": "The keyword tag suggested for the video." - } - } - }, - "VideoTopicDetails": { - "id": "VideoTopicDetails", - "type": "object", - "description": "Freebase topic information related to the video.", - "properties": { - "relevantTopicIds": { - "type": "array", - "description": "Similar to topic_id, except that these topics are merely relevant to the video. These are topics that may be mentioned in, or appear in the video. You can retrieve information about each topic using Freebase Topic API.", - "items": { - "type": "string" - } - }, - "topicCategories": { - "type": "array", - "description": "A list of Wikipedia URLs that provide a high-level description of the video's content.", - "items": { - "type": "string" - } - }, - "topicIds": { - "type": "array", - "description": "A list of Freebase topic IDs that are centrally associated with the video. These are topics that are centrally featured in the video, and it can be said that the video is mainly about each of these. You can retrieve information about each topic using the Freebase Topic API.", - "items": { - "type": "string" - } - } - } - }, - "WatchSettings": { - "id": "WatchSettings", - "type": "object", - "description": "Branding properties for the watch. All deprecated.", - "properties": { - "backgroundColor": { - "type": "string", - "description": "The text color for the video watch page's branded area." - }, - "featuredPlaylistId": { - "type": "string", - "description": "An ID that uniquely identifies a playlist that displays next to the video player." - }, - "textColor": { - "type": "string", - "description": "The background color for the video watch page's branded area." - } - } - } - }, - "resources": { - "activities": { - "methods": { - "insert": { - "id": "youtube.activities.insert", - "path": "activities", - "httpMethod": "POST", - "description": "Posts a bulletin for a specific channel. (The user submitting the request must be authorized to act on the channel's behalf.)\n\nNote: Even though an activity resource can contain information about actions like a user rating a video or marking a video as a favorite, you need to use other API methods to generate those activity resources. For example, you would use the API's videos.rate() method to rate a video and the playlistItems.insert() method to mark a video as a favorite.", - "parameters": { - "part": { - "type": "string", - "description": "The part parameter serves two purposes in this operation. It identifies the properties that the write operation will set as well as the properties that the API response will include.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "request": { - "$ref": "Activity" - }, - "response": { - "$ref": "Activity" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl" - ] - }, - "list": { - "id": "youtube.activities.list", - "path": "activities", - "httpMethod": "GET", - "description": "Returns a list of channel activity events that match the request criteria. For example, you can retrieve events associated with a particular channel, events associated with the user's subscriptions and Google+ friends, or the YouTube home page feed, which is customized for each user.", - "parameters": { - "channelId": { - "type": "string", - "description": "The channelId parameter specifies a unique YouTube channel ID. The API will then return a list of that channel's activities.", - "location": "query" - }, - "home": { - "type": "boolean", - "description": "Set this parameter's value to true to retrieve the activity feed that displays on the YouTube home page for the currently authenticated user.", - "location": "query" - }, - "maxResults": { - "type": "integer", - "description": "The maxResults parameter specifies the maximum number of items that should be returned in the result set.", - "default": "5", - "format": "uint32", - "minimum": "0", - "maximum": "50", - "location": "query" - }, - "mine": { - "type": "boolean", - "description": "Set this parameter's value to true to retrieve a feed of the authenticated user's activities.", - "location": "query" - }, - "pageToken": { - "type": "string", - "description": "The pageToken parameter identifies a specific page in the result set that should be returned. In an API response, the nextPageToken and prevPageToken properties identify other pages that could be retrieved.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter specifies a comma-separated list of one or more activity resource properties that the API response will include.\n\nIf the parameter identifies a property that contains child properties, the child properties will be included in the response. For example, in an activity resource, the snippet property contains other properties that identify the type of activity, a display title for the activity, and so forth. If you set part=snippet, the API response will also contain all of those nested properties.", - "required": true, - "location": "query" - }, - "publishedAfter": { - "type": "string", - "description": "The publishedAfter parameter specifies the earliest date and time that an activity could have occurred for that activity to be included in the API response. If the parameter value specifies a day, but not a time, then any activities that occurred that day will be included in the result set. The value is specified in ISO 8601 (YYYY-MM-DDThh:mm:ss.sZ) format.", - "format": "date-time", - "location": "query" - }, - "publishedBefore": { - "type": "string", - "description": "The publishedBefore parameter specifies the date and time before which an activity must have occurred for that activity to be included in the API response. If the parameter value specifies a day, but not a time, then any activities that occurred that day will be excluded from the result set. The value is specified in ISO 8601 (YYYY-MM-DDThh:mm:ss.sZ) format.", - "format": "date-time", - "location": "query" - }, - "regionCode": { - "type": "string", - "description": "The regionCode parameter instructs the API to return results for the specified country. The parameter value is an ISO 3166-1 alpha-2 country code. YouTube uses this value when the authorized user's previous activity on YouTube does not provide enough information to generate the activity feed.", - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "response": { - "$ref": "ActivityListResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtube.readonly" - ] - } - } - }, - "captions": { - "methods": { - "delete": { - "id": "youtube.captions.delete", - "path": "captions", - "httpMethod": "DELETE", - "description": "Deletes a specified caption track.", - "parameters": { - "id": { - "type": "string", - "description": "The id parameter identifies the caption track that is being deleted. The value is a caption track ID as identified by the id property in a caption resource.", - "required": true, - "location": "query" - }, - "onBehalfOf": { - "type": "string", - "description": "ID of the Google+ Page for the channel that the request is be on behalf of", - "location": "query" - }, - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The actual CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - } - }, - "parameterOrder": [ - "id" - ], - "scopes": [ - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtubepartner" - ] - }, - "download": { - "id": "youtube.captions.download", - "path": "captions/{id}", - "httpMethod": "GET", - "description": "Downloads a caption track. The caption track is returned in its original format unless the request specifies a value for the tfmt parameter and in its original language unless the request specifies a value for the tlang parameter.", - "parameters": { - "id": { - "type": "string", - "description": "The id parameter identifies the caption track that is being retrieved. The value is a caption track ID as identified by the id property in a caption resource.", - "required": true, - "location": "path" - }, - "onBehalfOf": { - "type": "string", - "description": "ID of the Google+ Page for the channel that the request is be on behalf of", - "location": "query" - }, - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The actual CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - }, - "tfmt": { - "type": "string", - "description": "The tfmt parameter specifies that the caption track should be returned in a specific format. If the parameter is not included in the request, the track is returned in its original format.", - "enum": [ - "sbv", - "scc", - "srt", - "ttml", - "vtt" - ], - "enumDescriptions": [ - "SubViewer subtitle.", - "Scenarist Closed Caption format.", - "SubRip subtitle.", - "Timed Text Markup Language caption.", - "Web Video Text Tracks caption." - ], - "location": "query" - }, - "tlang": { - "type": "string", - "description": "The tlang parameter specifies that the API response should return a translation of the specified caption track. The parameter value is an ISO 639-1 two-letter language code that identifies the desired caption language. The translation is generated by using machine translation, such as Google Translate.", - "location": "query" - } - }, - "parameterOrder": [ - "id" - ], - "scopes": [ - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtubepartner" - ], - "supportsMediaDownload": true - }, - "insert": { - "id": "youtube.captions.insert", - "path": "captions", - "httpMethod": "POST", - "description": "Uploads a caption track.", - "parameters": { - "onBehalfOf": { - "type": "string", - "description": "ID of the Google+ Page for the channel that the request is be on behalf of", - "location": "query" - }, - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The actual CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter specifies the caption resource parts that the API response will include. Set the parameter value to snippet.", - "required": true, - "location": "query" - }, - "sync": { - "type": "boolean", - "description": "The sync parameter indicates whether YouTube should automatically synchronize the caption file with the audio track of the video. If you set the value to true, YouTube will disregard any time codes that are in the uploaded caption file and generate new time codes for the captions.\n\nYou should set the sync parameter to true if you are uploading a transcript, which has no time codes, or if you suspect the time codes in your file are incorrect and want YouTube to try to fix them.", - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "request": { - "$ref": "Caption" - }, - "response": { - "$ref": "Caption" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtubepartner" - ], - "supportsMediaUpload": true, - "mediaUpload": { - "accept": [ - "*/*", - "application/octet-stream", - "text/xml" - ], - "maxSize": "100MB", - "protocols": { - "simple": { - "multipart": true, - "path": "/upload/youtube/v3/captions" - }, - "resumable": { - "multipart": true, - "path": "/resumable/upload/youtube/v3/captions" - } - } - } - }, - "list": { - "id": "youtube.captions.list", - "path": "captions", - "httpMethod": "GET", - "description": "Returns a list of caption tracks that are associated with a specified video. Note that the API response does not contain the actual captions and that the captions.download method provides the ability to retrieve a caption track.", - "parameters": { - "id": { - "type": "string", - "description": "The id parameter specifies a comma-separated list of IDs that identify the caption resources that should be retrieved. Each ID must identify a caption track associated with the specified video.", - "location": "query" - }, - "onBehalfOf": { - "type": "string", - "description": "ID of the Google+ Page for the channel that the request is on behalf of.", - "location": "query" - }, - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The actual CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter specifies a comma-separated list of one or more caption resource parts that the API response will include. The part names that you can include in the parameter value are id and snippet.", - "required": true, - "location": "query" - }, - "videoId": { - "type": "string", - "description": "The videoId parameter specifies the YouTube video ID of the video for which the API should return caption tracks.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "part", - "videoId" - ], - "response": { - "$ref": "CaptionListResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtubepartner" - ] - }, - "update": { - "id": "youtube.captions.update", - "path": "captions", - "httpMethod": "PUT", - "description": "Updates a caption track. When updating a caption track, you can change the track's draft status, upload a new caption file for the track, or both.", - "parameters": { - "onBehalfOf": { - "type": "string", - "description": "ID of the Google+ Page for the channel that the request is be on behalf of", - "location": "query" - }, - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The actual CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter serves two purposes in this operation. It identifies the properties that the write operation will set as well as the properties that the API response will include. Set the property value to snippet if you are updating the track's draft status. Otherwise, set the property value to id.", - "required": true, - "location": "query" - }, - "sync": { - "type": "boolean", - "description": "Note: The API server only processes the parameter value if the request contains an updated caption file.\n\nThe sync parameter indicates whether YouTube should automatically synchronize the caption file with the audio track of the video. If you set the value to true, YouTube will automatically synchronize the caption track with the audio track.", - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "request": { - "$ref": "Caption" - }, - "response": { - "$ref": "Caption" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtubepartner" - ], - "supportsMediaUpload": true, - "mediaUpload": { - "accept": [ - "*/*", - "application/octet-stream", - "text/xml" - ], - "maxSize": "100MB", - "protocols": { - "simple": { - "multipart": true, - "path": "/upload/youtube/v3/captions" - }, - "resumable": { - "multipart": true, - "path": "/resumable/upload/youtube/v3/captions" - } - } - } - } - } - }, - "channelBanners": { - "methods": { - "insert": { - "id": "youtube.channelBanners.insert", - "path": "channelBanners/insert", - "httpMethod": "POST", - "description": "Uploads a channel banner image to YouTube. This method represents the first two steps in a three-step process to update the banner image for a channel:\n\n- Call the channelBanners.insert method to upload the binary image data to YouTube. The image must have a 16:9 aspect ratio and be at least 2120x1192 pixels.\n- Extract the url property's value from the response that the API returns for step 1.\n- Call the channels.update method to update the channel's branding settings. Set the brandingSettings.image.bannerExternalUrl property's value to the URL obtained in step 2.", - "parameters": { - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - } - }, - "request": { - "$ref": "ChannelBannerResource" - }, - "response": { - "$ref": "ChannelBannerResource" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtube.upload" - ], - "supportsMediaUpload": true, - "mediaUpload": { - "accept": [ - "application/octet-stream", - "image/jpeg", - "image/png" - ], - "maxSize": "6MB", - "protocols": { - "simple": { - "multipart": true, - "path": "/upload/youtube/v3/channelBanners/insert" - }, - "resumable": { - "multipart": true, - "path": "/resumable/upload/youtube/v3/channelBanners/insert" - } - } - } - } - } - }, - "channelSections": { - "methods": { - "delete": { - "id": "youtube.channelSections.delete", - "path": "channelSections", - "httpMethod": "DELETE", - "description": "Deletes a channelSection.", - "parameters": { - "id": { - "type": "string", - "description": "The id parameter specifies the YouTube channelSection ID for the resource that is being deleted. In a channelSection resource, the id property specifies the YouTube channelSection ID.", - "required": true, - "location": "query" - }, - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - } - }, - "parameterOrder": [ - "id" - ], - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtubepartner" - ] - }, - "insert": { - "id": "youtube.channelSections.insert", - "path": "channelSections", - "httpMethod": "POST", - "description": "Adds a channelSection for the authenticated user's channel.", - "parameters": { - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - }, - "onBehalfOfContentOwnerChannel": { - "type": "string", - "description": "This parameter can only be used in a properly authorized request. Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwnerChannel parameter specifies the YouTube channel ID of the channel to which a video is being added. This parameter is required when a request specifies a value for the onBehalfOfContentOwner parameter, and it can only be used in conjunction with that parameter. In addition, the request must be authorized using a CMS account that is linked to the content owner that the onBehalfOfContentOwner parameter specifies. Finally, the channel that the onBehalfOfContentOwnerChannel parameter value specifies must be linked to the content owner that the onBehalfOfContentOwner parameter specifies.\n\nThis parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and perform actions on behalf of the channel specified in the parameter value, without having to provide authentication credentials for each separate channel.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter serves two purposes in this operation. It identifies the properties that the write operation will set as well as the properties that the API response will include.\n\nThe part names that you can include in the parameter value are snippet and contentDetails.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "request": { - "$ref": "ChannelSection" - }, - "response": { - "$ref": "ChannelSection" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtubepartner" - ] - }, - "list": { - "id": "youtube.channelSections.list", - "path": "channelSections", - "httpMethod": "GET", - "description": "Returns channelSection resources that match the API request criteria.", - "parameters": { - "channelId": { - "type": "string", - "description": "The channelId parameter specifies a YouTube channel ID. The API will only return that channel's channelSections.", - "location": "query" - }, - "hl": { - "type": "string", - "description": "The hl parameter indicates that the snippet.localized property values in the returned channelSection resources should be in the specified language if localized values for that language are available. For example, if the API request specifies hl=de, the snippet.localized properties in the API response will contain German titles if German titles are available. Channel owners can provide localized channel section titles using either the channelSections.insert or channelSections.update method.", - "location": "query" - }, - "id": { - "type": "string", - "description": "The id parameter specifies a comma-separated list of the YouTube channelSection ID(s) for the resource(s) that are being retrieved. In a channelSection resource, the id property specifies the YouTube channelSection ID.", - "location": "query" - }, - "mine": { - "type": "boolean", - "description": "Set this parameter's value to true to retrieve a feed of the authenticated user's channelSections.", - "location": "query" - }, - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter specifies a comma-separated list of one or more channelSection resource properties that the API response will include. The part names that you can include in the parameter value are id, snippet, and contentDetails.\n\nIf the parameter identifies a property that contains child properties, the child properties will be included in the response. For example, in a channelSection resource, the snippet property contains other properties, such as a display title for the channelSection. If you set part=snippet, the API response will also contain all of those nested properties.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "response": { - "$ref": "ChannelSectionListResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtube.readonly", - "https://www.googleapis.com/auth/youtubepartner" - ] - }, - "update": { - "id": "youtube.channelSections.update", - "path": "channelSections", - "httpMethod": "PUT", - "description": "Update a channelSection.", - "parameters": { - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter serves two purposes in this operation. It identifies the properties that the write operation will set as well as the properties that the API response will include.\n\nThe part names that you can include in the parameter value are snippet and contentDetails.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "request": { - "$ref": "ChannelSection" - }, - "response": { - "$ref": "ChannelSection" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtubepartner" - ] - } - } - }, - "channels": { - "methods": { - "list": { - "id": "youtube.channels.list", - "path": "channels", - "httpMethod": "GET", - "description": "Returns a collection of zero or more channel resources that match the request criteria.", - "parameters": { - "categoryId": { - "type": "string", - "description": "The categoryId parameter specifies a YouTube guide category, thereby requesting YouTube channels associated with that category.", - "location": "query" - }, - "forUsername": { - "type": "string", - "description": "The forUsername parameter specifies a YouTube username, thereby requesting the channel associated with that username.", - "location": "query" - }, - "hl": { - "type": "string", - "description": "The hl parameter should be used for filter out the properties that are not in the given language. Used for the brandingSettings part.", - "location": "query" - }, - "id": { - "type": "string", - "description": "The id parameter specifies a comma-separated list of the YouTube channel ID(s) for the resource(s) that are being retrieved. In a channel resource, the id property specifies the channel's YouTube channel ID.", - "location": "query" - }, - "managedByMe": { - "type": "boolean", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nSet this parameter's value to true to instruct the API to only return channels managed by the content owner that the onBehalfOfContentOwner parameter specifies. The user must be authenticated as a CMS account linked to the specified content owner and onBehalfOfContentOwner must be provided.", - "location": "query" - }, - "maxResults": { - "type": "integer", - "description": "The maxResults parameter specifies the maximum number of items that should be returned in the result set.", - "default": "5", - "format": "uint32", - "minimum": "0", - "maximum": "50", - "location": "query" - }, - "mine": { - "type": "boolean", - "description": "Set this parameter's value to true to instruct the API to only return channels owned by the authenticated user.", - "location": "query" - }, - "mySubscribers": { - "type": "boolean", - "description": "Use the subscriptions.list method and its mySubscribers parameter to retrieve a list of subscribers to the authenticated user's channel.", - "location": "query" - }, - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - }, - "pageToken": { - "type": "string", - "description": "The pageToken parameter identifies a specific page in the result set that should be returned. In an API response, the nextPageToken and prevPageToken properties identify other pages that could be retrieved.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter specifies a comma-separated list of one or more channel resource properties that the API response will include.\n\nIf the parameter identifies a property that contains child properties, the child properties will be included in the response. For example, in a channel resource, the contentDetails property contains other properties, such as the uploads properties. As such, if you set part=contentDetails, the API response will also contain all of those nested properties.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "response": { - "$ref": "ChannelListResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtube.readonly", - "https://www.googleapis.com/auth/youtubepartner", - "https://www.googleapis.com/auth/youtubepartner-channel-audit" - ] - }, - "update": { - "id": "youtube.channels.update", - "path": "channels", - "httpMethod": "PUT", - "description": "Updates a channel's metadata. Note that this method currently only supports updates to the channel resource's brandingSettings and invideoPromotion objects and their child properties.", - "parameters": { - "onBehalfOfContentOwner": { - "type": "string", - "description": "The onBehalfOfContentOwner parameter indicates that the authenticated user is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The actual CMS account that the user authenticates with needs to be linked to the specified YouTube content owner.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter serves two purposes in this operation. It identifies the properties that the write operation will set as well as the properties that the API response will include.\n\nThe API currently only allows the parameter value to be set to either brandingSettings or invideoPromotion. (You cannot update both of those parts with a single request.)\n\nNote that this method overrides the existing values for all of the mutable properties that are contained in any parts that the parameter value specifies.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "request": { - "$ref": "Channel" - }, - "response": { - "$ref": "Channel" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtubepartner" - ] - } - } - }, - "commentThreads": { - "methods": { - "insert": { - "id": "youtube.commentThreads.insert", - "path": "commentThreads", - "httpMethod": "POST", - "description": "Creates a new top-level comment. To add a reply to an existing comment, use the comments.insert method instead.", - "parameters": { - "part": { - "type": "string", - "description": "The part parameter identifies the properties that the API response will include. Set the parameter value to snippet. The snippet part has a quota cost of 2 units.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "request": { - "$ref": "CommentThread" - }, - "response": { - "$ref": "CommentThread" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube.force-ssl" - ] - }, - "list": { - "id": "youtube.commentThreads.list", - "path": "commentThreads", - "httpMethod": "GET", - "description": "Returns a list of comment threads that match the API request parameters.", - "parameters": { - "allThreadsRelatedToChannelId": { - "type": "string", - "description": "The allThreadsRelatedToChannelId parameter instructs the API to return all comment threads associated with the specified channel. The response can include comments about the channel or about the channel's videos.", - "location": "query" - }, - "channelId": { - "type": "string", - "description": "The channelId parameter instructs the API to return comment threads containing comments about the specified channel. (The response will not include comments left on videos that the channel uploaded.)", - "location": "query" - }, - "id": { - "type": "string", - "description": "The id parameter specifies a comma-separated list of comment thread IDs for the resources that should be retrieved.", - "location": "query" - }, - "maxResults": { - "type": "integer", - "description": "The maxResults parameter specifies the maximum number of items that should be returned in the result set.\n\nNote: This parameter is not supported for use in conjunction with the id parameter.", - "default": "20", - "format": "uint32", - "minimum": "1", - "maximum": "100", - "location": "query" - }, - "moderationStatus": { - "type": "string", - "description": "Set this parameter to limit the returned comment threads to a particular moderation state.\n\nNote: This parameter is not supported for use in conjunction with the id parameter.", - "default": "MODERATION_STATUS_PUBLISHED", - "enum": [ - "heldForReview", - "likelySpam", - "published" - ], - "enumDescriptions": [ - "Retrieve comment threads that are awaiting review by a moderator. A comment thread can be included in the response if the top-level comment or at least one of the replies to that comment are awaiting review.", - "Retrieve comment threads classified as likely to be spam. A comment thread can be included in the response if the top-level comment or at least one of the replies to that comment is considered likely to be spam.", - "Retrieve threads of published comments. This is the default value. A comment thread can be included in the response if its top-level comment has been published." - ], - "location": "query" - }, - "order": { - "type": "string", - "description": "The order parameter specifies the order in which the API response should list comment threads. Valid values are: \n- time - Comment threads are ordered by time. This is the default behavior.\n- relevance - Comment threads are ordered by relevance.Note: This parameter is not supported for use in conjunction with the id parameter.", - "default": "true", - "enum": [ - "relevance", - "time" - ], - "enumDescriptions": [ - "Order by relevance.", - "Order by time." - ], - "location": "query" - }, - "pageToken": { - "type": "string", - "description": "The pageToken parameter identifies a specific page in the result set that should be returned. In an API response, the nextPageToken property identifies the next page of the result that can be retrieved.\n\nNote: This parameter is not supported for use in conjunction with the id parameter.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter specifies a comma-separated list of one or more commentThread resource properties that the API response will include.", - "required": true, - "location": "query" - }, - "searchTerms": { - "type": "string", - "description": "The searchTerms parameter instructs the API to limit the API response to only contain comments that contain the specified search terms.\n\nNote: This parameter is not supported for use in conjunction with the id parameter.", - "location": "query" - }, - "textFormat": { - "type": "string", - "description": "Set this parameter's value to html or plainText to instruct the API to return the comments left by users in html formatted or in plain text.", - "default": "FORMAT_HTML", - "enum": [ - "html", - "plainText" - ], - "enumDescriptions": [ - "Returns the comments in HTML format. This is the default value.", - "Returns the comments in plain text format." - ], - "location": "query" - }, - "videoId": { - "type": "string", - "description": "The videoId parameter instructs the API to return comment threads associated with the specified video ID.", - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "response": { - "$ref": "CommentThreadListResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube.force-ssl" - ] - }, - "update": { - "id": "youtube.commentThreads.update", - "path": "commentThreads", - "httpMethod": "PUT", - "description": "Modifies the top-level comment in a comment thread.", - "parameters": { - "part": { - "type": "string", - "description": "The part parameter specifies a comma-separated list of commentThread resource properties that the API response will include. You must at least include the snippet part in the parameter value since that part contains all of the properties that the API request can update.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "request": { - "$ref": "CommentThread" - }, - "response": { - "$ref": "CommentThread" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube.force-ssl" - ] - } - } - }, - "comments": { - "methods": { - "delete": { - "id": "youtube.comments.delete", - "path": "comments", - "httpMethod": "DELETE", - "description": "Deletes a comment.", - "parameters": { - "id": { - "type": "string", - "description": "The id parameter specifies the comment ID for the resource that is being deleted.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "id" - ], - "scopes": [ - "https://www.googleapis.com/auth/youtube.force-ssl" - ] - }, - "insert": { - "id": "youtube.comments.insert", - "path": "comments", - "httpMethod": "POST", - "description": "Creates a reply to an existing comment. Note: To create a top-level comment, use the commentThreads.insert method.", - "parameters": { - "part": { - "type": "string", - "description": "The part parameter identifies the properties that the API response will include. Set the parameter value to snippet. The snippet part has a quota cost of 2 units.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "request": { - "$ref": "Comment" - }, - "response": { - "$ref": "Comment" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube.force-ssl" - ] - }, - "list": { - "id": "youtube.comments.list", - "path": "comments", - "httpMethod": "GET", - "description": "Returns a list of comments that match the API request parameters.", - "parameters": { - "id": { - "type": "string", - "description": "The id parameter specifies a comma-separated list of comment IDs for the resources that are being retrieved. In a comment resource, the id property specifies the comment's ID.", - "location": "query" - }, - "maxResults": { - "type": "integer", - "description": "The maxResults parameter specifies the maximum number of items that should be returned in the result set.\n\nNote: This parameter is not supported for use in conjunction with the id parameter.", - "default": "20", - "format": "uint32", - "minimum": "1", - "maximum": "100", - "location": "query" - }, - "pageToken": { - "type": "string", - "description": "The pageToken parameter identifies a specific page in the result set that should be returned. In an API response, the nextPageToken property identifies the next page of the result that can be retrieved.\n\nNote: This parameter is not supported for use in conjunction with the id parameter.", - "location": "query" - }, - "parentId": { - "type": "string", - "description": "The parentId parameter specifies the ID of the comment for which replies should be retrieved.\n\nNote: YouTube currently supports replies only for top-level comments. However, replies to replies may be supported in the future.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter specifies a comma-separated list of one or more comment resource properties that the API response will include.", - "required": true, - "location": "query" - }, - "textFormat": { - "type": "string", - "description": "This parameter indicates whether the API should return comments formatted as HTML or as plain text.", - "default": "FORMAT_HTML", - "enum": [ - "html", - "plainText" - ], - "enumDescriptions": [ - "Returns the comments in HTML format. This is the default value.", - "Returns the comments in plain text format." - ], - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "response": { - "$ref": "CommentListResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube.force-ssl" - ] - }, - "markAsSpam": { - "id": "youtube.comments.markAsSpam", - "path": "comments/markAsSpam", - "httpMethod": "POST", - "description": "Expresses the caller's opinion that one or more comments should be flagged as spam.", - "parameters": { - "id": { - "type": "string", - "description": "The id parameter specifies a comma-separated list of IDs of comments that the caller believes should be classified as spam.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "id" - ], - "scopes": [ - "https://www.googleapis.com/auth/youtube.force-ssl" - ] - }, - "setModerationStatus": { - "id": "youtube.comments.setModerationStatus", - "path": "comments/setModerationStatus", - "httpMethod": "POST", - "description": "Sets the moderation status of one or more comments. The API request must be authorized by the owner of the channel or video associated with the comments.", - "parameters": { - "banAuthor": { - "type": "boolean", - "description": "The banAuthor parameter lets you indicate that you want to automatically reject any additional comments written by the comment's author. Set the parameter value to true to ban the author.\n\nNote: This parameter is only valid if the moderationStatus parameter is also set to rejected.", - "default": "false", - "location": "query" - }, - "id": { - "type": "string", - "description": "The id parameter specifies a comma-separated list of IDs that identify the comments for which you are updating the moderation status.", - "required": true, - "location": "query" - }, - "moderationStatus": { - "type": "string", - "description": "Identifies the new moderation status of the specified comments.", - "required": true, - "enum": [ - "heldForReview", - "published", - "rejected" - ], - "enumDescriptions": [ - "Marks a comment as awaiting review by a moderator.", - "Clears a comment for public display.", - "Rejects a comment as being unfit for display. This action also effectively hides all replies to the rejected comment.\n\nNote: The API does not currently provide a way to list or otherwise discover rejected comments. However, you can change the moderation status of a rejected comment if you still know its ID. If you were to change the moderation status of a rejected comment, the comment replies would subsequently be discoverable again as well." - ], - "location": "query" - } - }, - "parameterOrder": [ - "id", - "moderationStatus" - ], - "scopes": [ - "https://www.googleapis.com/auth/youtube.force-ssl" - ] - }, - "update": { - "id": "youtube.comments.update", - "path": "comments", - "httpMethod": "PUT", - "description": "Modifies a comment.", - "parameters": { - "part": { - "type": "string", - "description": "The part parameter identifies the properties that the API response will include. You must at least include the snippet part in the parameter value since that part contains all of the properties that the API request can update.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "request": { - "$ref": "Comment" - }, - "response": { - "$ref": "Comment" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube.force-ssl" - ] - } - } - }, - "fanFundingEvents": { - "methods": { - "list": { - "id": "youtube.fanFundingEvents.list", - "path": "fanFundingEvents", - "httpMethod": "GET", - "description": "Lists fan funding events for a channel.", - "parameters": { - "hl": { - "type": "string", - "description": "The hl parameter instructs the API to retrieve localized resource metadata for a specific application language that the YouTube website supports. The parameter value must be a language code included in the list returned by the i18nLanguages.list method.\n\nIf localized resource details are available in that language, the resource's snippet.localized object will contain the localized values. However, if localized details are not available, the snippet.localized object will contain resource details in the resource's default language.", - "location": "query" - }, - "maxResults": { - "type": "integer", - "description": "The maxResults parameter specifies the maximum number of items that should be returned in the result set.", - "default": "5", - "format": "uint32", - "minimum": "0", - "maximum": "50", - "location": "query" - }, - "pageToken": { - "type": "string", - "description": "The pageToken parameter identifies a specific page in the result set that should be returned. In an API response, the nextPageToken and prevPageToken properties identify other pages that could be retrieved.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter specifies the fanFundingEvent resource parts that the API response will include. Supported values are id and snippet.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "response": { - "$ref": "FanFundingEventListResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtube.readonly" - ] - } - } - }, - "guideCategories": { - "methods": { - "list": { - "id": "youtube.guideCategories.list", - "path": "guideCategories", - "httpMethod": "GET", - "description": "Returns a list of categories that can be associated with YouTube channels.", - "parameters": { - "hl": { - "type": "string", - "description": "The hl parameter specifies the language that will be used for text values in the API response.", - "default": "en-US", - "location": "query" - }, - "id": { - "type": "string", - "description": "The id parameter specifies a comma-separated list of the YouTube channel category ID(s) for the resource(s) that are being retrieved. In a guideCategory resource, the id property specifies the YouTube channel category ID.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter specifies the guideCategory resource properties that the API response will include. Set the parameter value to snippet.", - "required": true, - "location": "query" - }, - "regionCode": { - "type": "string", - "description": "The regionCode parameter instructs the API to return the list of guide categories available in the specified country. The parameter value is an ISO 3166-1 alpha-2 country code.", - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "response": { - "$ref": "GuideCategoryListResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtube.readonly", - "https://www.googleapis.com/auth/youtubepartner" - ] - } - } - }, - "i18nLanguages": { - "methods": { - "list": { - "id": "youtube.i18nLanguages.list", - "path": "i18nLanguages", - "httpMethod": "GET", - "description": "Returns a list of application languages that the YouTube website supports.", - "parameters": { - "hl": { - "type": "string", - "description": "The hl parameter specifies the language that should be used for text values in the API response.", - "default": "en_US", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter specifies the i18nLanguage resource properties that the API response will include. Set the parameter value to snippet.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "response": { - "$ref": "I18nLanguageListResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtube.readonly", - "https://www.googleapis.com/auth/youtubepartner" - ] - } - } - }, - "i18nRegions": { - "methods": { - "list": { - "id": "youtube.i18nRegions.list", - "path": "i18nRegions", - "httpMethod": "GET", - "description": "Returns a list of content regions that the YouTube website supports.", - "parameters": { - "hl": { - "type": "string", - "description": "The hl parameter specifies the language that should be used for text values in the API response.", - "default": "en_US", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter specifies the i18nRegion resource properties that the API response will include. Set the parameter value to snippet.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "response": { - "$ref": "I18nRegionListResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtube.readonly", - "https://www.googleapis.com/auth/youtubepartner" - ] - } - } - }, - "liveBroadcasts": { - "methods": { - "bind": { - "id": "youtube.liveBroadcasts.bind", - "path": "liveBroadcasts/bind", - "httpMethod": "POST", - "description": "Binds a YouTube broadcast to a stream or removes an existing binding between a broadcast and a stream. A broadcast can only be bound to one video stream, though a video stream may be bound to more than one broadcast.", - "parameters": { - "id": { - "type": "string", - "description": "The id parameter specifies the unique ID of the broadcast that is being bound to a video stream.", - "required": true, - "location": "query" - }, - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - }, - "onBehalfOfContentOwnerChannel": { - "type": "string", - "description": "This parameter can only be used in a properly authorized request. Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwnerChannel parameter specifies the YouTube channel ID of the channel to which a video is being added. This parameter is required when a request specifies a value for the onBehalfOfContentOwner parameter, and it can only be used in conjunction with that parameter. In addition, the request must be authorized using a CMS account that is linked to the content owner that the onBehalfOfContentOwner parameter specifies. Finally, the channel that the onBehalfOfContentOwnerChannel parameter value specifies must be linked to the content owner that the onBehalfOfContentOwner parameter specifies.\n\nThis parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and perform actions on behalf of the channel specified in the parameter value, without having to provide authentication credentials for each separate channel.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter specifies a comma-separated list of one or more liveBroadcast resource properties that the API response will include. The part names that you can include in the parameter value are id, snippet, contentDetails, and status.", - "required": true, - "location": "query" - }, - "streamId": { - "type": "string", - "description": "The streamId parameter specifies the unique ID of the video stream that is being bound to a broadcast. If this parameter is omitted, the API will remove any existing binding between the broadcast and a video stream.", - "location": "query" - } - }, - "parameterOrder": [ - "id", - "part" - ], - "response": { - "$ref": "LiveBroadcast" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl" - ] - }, - "control": { - "id": "youtube.liveBroadcasts.control", - "path": "liveBroadcasts/control", - "httpMethod": "POST", - "description": "Controls the settings for a slate that can be displayed in the broadcast stream.", - "parameters": { - "displaySlate": { - "type": "boolean", - "description": "The displaySlate parameter specifies whether the slate is being enabled or disabled.", - "location": "query" - }, - "id": { - "type": "string", - "description": "The id parameter specifies the YouTube live broadcast ID that uniquely identifies the broadcast in which the slate is being updated.", - "required": true, - "location": "query" - }, - "offsetTimeMs": { - "type": "string", - "description": "The offsetTimeMs parameter specifies a positive time offset when the specified slate change will occur. The value is measured in milliseconds from the beginning of the broadcast's monitor stream, which is the time that the testing phase for the broadcast began. Even though it is specified in milliseconds, the value is actually an approximation, and YouTube completes the requested action as closely as possible to that time.\n\nIf you do not specify a value for this parameter, then YouTube performs the action as soon as possible. See the Getting started guide for more details.\n\nImportant: You should only specify a value for this parameter if your broadcast stream is delayed.", - "format": "uint64", - "location": "query" - }, - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - }, - "onBehalfOfContentOwnerChannel": { - "type": "string", - "description": "This parameter can only be used in a properly authorized request. Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwnerChannel parameter specifies the YouTube channel ID of the channel to which a video is being added. This parameter is required when a request specifies a value for the onBehalfOfContentOwner parameter, and it can only be used in conjunction with that parameter. In addition, the request must be authorized using a CMS account that is linked to the content owner that the onBehalfOfContentOwner parameter specifies. Finally, the channel that the onBehalfOfContentOwnerChannel parameter value specifies must be linked to the content owner that the onBehalfOfContentOwner parameter specifies.\n\nThis parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and perform actions on behalf of the channel specified in the parameter value, without having to provide authentication credentials for each separate channel.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter specifies a comma-separated list of one or more liveBroadcast resource properties that the API response will include. The part names that you can include in the parameter value are id, snippet, contentDetails, and status.", - "required": true, - "location": "query" - }, - "walltime": { - "type": "string", - "description": "The walltime parameter specifies the wall clock time at which the specified slate change will occur. The value is specified in ISO 8601 (YYYY-MM-DDThh:mm:ss.sssZ) format.", - "format": "date-time", - "location": "query" - } - }, - "parameterOrder": [ - "id", - "part" - ], - "response": { - "$ref": "LiveBroadcast" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl" - ] - }, - "delete": { - "id": "youtube.liveBroadcasts.delete", - "path": "liveBroadcasts", - "httpMethod": "DELETE", - "description": "Deletes a broadcast.", - "parameters": { - "id": { - "type": "string", - "description": "The id parameter specifies the YouTube live broadcast ID for the resource that is being deleted.", - "required": true, - "location": "query" - }, - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - }, - "onBehalfOfContentOwnerChannel": { - "type": "string", - "description": "This parameter can only be used in a properly authorized request. Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwnerChannel parameter specifies the YouTube channel ID of the channel to which a video is being added. This parameter is required when a request specifies a value for the onBehalfOfContentOwner parameter, and it can only be used in conjunction with that parameter. In addition, the request must be authorized using a CMS account that is linked to the content owner that the onBehalfOfContentOwner parameter specifies. Finally, the channel that the onBehalfOfContentOwnerChannel parameter value specifies must be linked to the content owner that the onBehalfOfContentOwner parameter specifies.\n\nThis parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and perform actions on behalf of the channel specified in the parameter value, without having to provide authentication credentials for each separate channel.", - "location": "query" - } - }, - "parameterOrder": [ - "id" - ], - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl" - ] - }, - "insert": { - "id": "youtube.liveBroadcasts.insert", - "path": "liveBroadcasts", - "httpMethod": "POST", - "description": "Creates a broadcast.", - "parameters": { - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - }, - "onBehalfOfContentOwnerChannel": { - "type": "string", - "description": "This parameter can only be used in a properly authorized request. Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwnerChannel parameter specifies the YouTube channel ID of the channel to which a video is being added. This parameter is required when a request specifies a value for the onBehalfOfContentOwner parameter, and it can only be used in conjunction with that parameter. In addition, the request must be authorized using a CMS account that is linked to the content owner that the onBehalfOfContentOwner parameter specifies. Finally, the channel that the onBehalfOfContentOwnerChannel parameter value specifies must be linked to the content owner that the onBehalfOfContentOwner parameter specifies.\n\nThis parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and perform actions on behalf of the channel specified in the parameter value, without having to provide authentication credentials for each separate channel.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter serves two purposes in this operation. It identifies the properties that the write operation will set as well as the properties that the API response will include.\n\nThe part properties that you can include in the parameter value are id, snippet, contentDetails, and status.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "request": { - "$ref": "LiveBroadcast" - }, - "response": { - "$ref": "LiveBroadcast" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl" - ] - }, - "list": { - "id": "youtube.liveBroadcasts.list", - "path": "liveBroadcasts", - "httpMethod": "GET", - "description": "Returns a list of YouTube broadcasts that match the API request parameters.", - "parameters": { - "broadcastStatus": { - "type": "string", - "description": "The broadcastStatus parameter filters the API response to only include broadcasts with the specified status.", - "enum": [ - "active", - "all", - "completed", - "upcoming" - ], - "enumDescriptions": [ - "Return current live broadcasts.", - "Return all broadcasts.", - "Return broadcasts that have already ended.", - "Return broadcasts that have not yet started." - ], - "location": "query" - }, - "broadcastType": { - "type": "string", - "description": "The broadcastType parameter filters the API response to only include broadcasts with the specified type. This is only compatible with the mine filter for now.", - "default": "BROADCAST_TYPE_FILTER_EVENT", - "enum": [ - "all", - "event", - "persistent" - ], - "enumDescriptions": [ - "Return all broadcasts.", - "Return only scheduled event broadcasts.", - "Return only persistent broadcasts." - ], - "location": "query" - }, - "id": { - "type": "string", - "description": "The id parameter specifies a comma-separated list of YouTube broadcast IDs that identify the broadcasts being retrieved. In a liveBroadcast resource, the id property specifies the broadcast's ID.", - "location": "query" - }, - "maxResults": { - "type": "integer", - "description": "The maxResults parameter specifies the maximum number of items that should be returned in the result set.", - "default": "5", - "format": "uint32", - "minimum": "0", - "maximum": "50", - "location": "query" - }, - "mine": { - "type": "boolean", - "description": "The mine parameter can be used to instruct the API to only return broadcasts owned by the authenticated user. Set the parameter value to true to only retrieve your own broadcasts.", - "location": "query" - }, - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - }, - "onBehalfOfContentOwnerChannel": { - "type": "string", - "description": "This parameter can only be used in a properly authorized request. Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwnerChannel parameter specifies the YouTube channel ID of the channel to which a video is being added. This parameter is required when a request specifies a value for the onBehalfOfContentOwner parameter, and it can only be used in conjunction with that parameter. In addition, the request must be authorized using a CMS account that is linked to the content owner that the onBehalfOfContentOwner parameter specifies. Finally, the channel that the onBehalfOfContentOwnerChannel parameter value specifies must be linked to the content owner that the onBehalfOfContentOwner parameter specifies.\n\nThis parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and perform actions on behalf of the channel specified in the parameter value, without having to provide authentication credentials for each separate channel.", - "location": "query" - }, - "pageToken": { - "type": "string", - "description": "The pageToken parameter identifies a specific page in the result set that should be returned. In an API response, the nextPageToken and prevPageToken properties identify other pages that could be retrieved.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter specifies a comma-separated list of one or more liveBroadcast resource properties that the API response will include. The part names that you can include in the parameter value are id, snippet, contentDetails, and status.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "response": { - "$ref": "LiveBroadcastListResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtube.readonly" - ] - }, - "transition": { - "id": "youtube.liveBroadcasts.transition", - "path": "liveBroadcasts/transition", - "httpMethod": "POST", - "description": "Changes the status of a YouTube live broadcast and initiates any processes associated with the new status. For example, when you transition a broadcast's status to testing, YouTube starts to transmit video to that broadcast's monitor stream. Before calling this method, you should confirm that the value of the status.streamStatus property for the stream bound to your broadcast is active.", - "parameters": { - "broadcastStatus": { - "type": "string", - "description": "The broadcastStatus parameter identifies the state to which the broadcast is changing. Note that to transition a broadcast to either the testing or live state, the status.streamStatus must be active for the stream that the broadcast is bound to.", - "required": true, - "enum": [ - "complete", - "live", - "testing" - ], - "enumDescriptions": [ - "The broadcast is over. YouTube stops transmitting video.", - "The broadcast is visible to its audience. YouTube transmits video to the broadcast's monitor stream and its broadcast stream.", - "Start testing the broadcast. YouTube transmits video to the broadcast's monitor stream. Note that you can only transition a broadcast to the testing state if its contentDetails.monitorStream.enableMonitorStream property is set to true." - ], - "location": "query" - }, - "id": { - "type": "string", - "description": "The id parameter specifies the unique ID of the broadcast that is transitioning to another status.", - "required": true, - "location": "query" - }, - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - }, - "onBehalfOfContentOwnerChannel": { - "type": "string", - "description": "This parameter can only be used in a properly authorized request. Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwnerChannel parameter specifies the YouTube channel ID of the channel to which a video is being added. This parameter is required when a request specifies a value for the onBehalfOfContentOwner parameter, and it can only be used in conjunction with that parameter. In addition, the request must be authorized using a CMS account that is linked to the content owner that the onBehalfOfContentOwner parameter specifies. Finally, the channel that the onBehalfOfContentOwnerChannel parameter value specifies must be linked to the content owner that the onBehalfOfContentOwner parameter specifies.\n\nThis parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and perform actions on behalf of the channel specified in the parameter value, without having to provide authentication credentials for each separate channel.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter specifies a comma-separated list of one or more liveBroadcast resource properties that the API response will include. The part names that you can include in the parameter value are id, snippet, contentDetails, and status.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "broadcastStatus", - "id", - "part" - ], - "response": { - "$ref": "LiveBroadcast" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl" - ] - }, - "update": { - "id": "youtube.liveBroadcasts.update", - "path": "liveBroadcasts", - "httpMethod": "PUT", - "description": "Updates a broadcast. For example, you could modify the broadcast settings defined in the liveBroadcast resource's contentDetails object.", - "parameters": { - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - }, - "onBehalfOfContentOwnerChannel": { - "type": "string", - "description": "This parameter can only be used in a properly authorized request. Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwnerChannel parameter specifies the YouTube channel ID of the channel to which a video is being added. This parameter is required when a request specifies a value for the onBehalfOfContentOwner parameter, and it can only be used in conjunction with that parameter. In addition, the request must be authorized using a CMS account that is linked to the content owner that the onBehalfOfContentOwner parameter specifies. Finally, the channel that the onBehalfOfContentOwnerChannel parameter value specifies must be linked to the content owner that the onBehalfOfContentOwner parameter specifies.\n\nThis parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and perform actions on behalf of the channel specified in the parameter value, without having to provide authentication credentials for each separate channel.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter serves two purposes in this operation. It identifies the properties that the write operation will set as well as the properties that the API response will include.\n\nThe part properties that you can include in the parameter value are id, snippet, contentDetails, and status.\n\nNote that this method will override the existing values for all of the mutable properties that are contained in any parts that the parameter value specifies. For example, a broadcast's privacy status is defined in the status part. As such, if your request is updating a private or unlisted broadcast, and the request's part parameter value includes the status part, the broadcast's privacy setting will be updated to whatever value the request body specifies. If the request body does not specify a value, the existing privacy setting will be removed and the broadcast will revert to the default privacy setting.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "request": { - "$ref": "LiveBroadcast" - }, - "response": { - "$ref": "LiveBroadcast" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl" - ] - } - } - }, - "liveChatBans": { - "methods": { - "delete": { - "id": "youtube.liveChatBans.delete", - "path": "liveChat/bans", - "httpMethod": "DELETE", - "description": "Removes a chat ban.", - "parameters": { - "id": { - "type": "string", - "description": "The id parameter identifies the chat ban to remove. The value uniquely identifies both the ban and the chat.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "id" - ], - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl" - ] - }, - "insert": { - "id": "youtube.liveChatBans.insert", - "path": "liveChat/bans", - "httpMethod": "POST", - "description": "Adds a new ban to the chat.", - "parameters": { - "part": { - "type": "string", - "description": "The part parameter serves two purposes in this operation. It identifies the properties that the write operation will set as well as the properties that the API response returns. Set the parameter value to snippet.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "request": { - "$ref": "LiveChatBan" - }, - "response": { - "$ref": "LiveChatBan" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl" - ] - } - } - }, - "liveChatMessages": { - "methods": { - "delete": { - "id": "youtube.liveChatMessages.delete", - "path": "liveChat/messages", - "httpMethod": "DELETE", - "description": "Deletes a chat message.", - "parameters": { - "id": { - "type": "string", - "description": "The id parameter specifies the YouTube chat message ID of the resource that is being deleted.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "id" - ], - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl" - ] - }, - "insert": { - "id": "youtube.liveChatMessages.insert", - "path": "liveChat/messages", - "httpMethod": "POST", - "description": "Adds a message to a live chat.", - "parameters": { - "part": { - "type": "string", - "description": "The part parameter serves two purposes. It identifies the properties that the write operation will set as well as the properties that the API response will include. Set the parameter value to snippet.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "request": { - "$ref": "LiveChatMessage" - }, - "response": { - "$ref": "LiveChatMessage" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl" - ] - }, - "list": { - "id": "youtube.liveChatMessages.list", - "path": "liveChat/messages", - "httpMethod": "GET", - "description": "Lists live chat messages for a specific chat.", - "parameters": { - "hl": { - "type": "string", - "description": "The hl parameter instructs the API to retrieve localized resource metadata for a specific application language that the YouTube website supports. The parameter value must be a language code included in the list returned by the i18nLanguages.list method.\n\nIf localized resource details are available in that language, the resource's snippet.localized object will contain the localized values. However, if localized details are not available, the snippet.localized object will contain resource details in the resource's default language.", - "location": "query" - }, - "liveChatId": { - "type": "string", - "description": "The liveChatId parameter specifies the ID of the chat whose messages will be returned.", - "required": true, - "location": "query" - }, - "maxResults": { - "type": "integer", - "description": "The maxResults parameter specifies the maximum number of messages that should be returned in the result set.", - "default": "500", - "format": "uint32", - "minimum": "200", - "maximum": "2000", - "location": "query" - }, - "pageToken": { - "type": "string", - "description": "The pageToken parameter identifies a specific page in the result set that should be returned. In an API response, the nextPageToken property identify other pages that could be retrieved.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter specifies the liveChatComment resource parts that the API response will include. Supported values are id and snippet.", - "required": true, - "location": "query" - }, - "profileImageSize": { - "type": "integer", - "description": "The profileImageSize parameter specifies the size of the user profile pictures that should be returned in the result set. Default: 88.", - "format": "uint32", - "minimum": "16", - "maximum": "720", - "location": "query" - } - }, - "parameterOrder": [ - "liveChatId", - "part" - ], - "response": { - "$ref": "LiveChatMessageListResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtube.readonly" - ] - } - } - }, - "liveChatModerators": { - "methods": { - "delete": { - "id": "youtube.liveChatModerators.delete", - "path": "liveChat/moderators", - "httpMethod": "DELETE", - "description": "Removes a chat moderator.", - "parameters": { - "id": { - "type": "string", - "description": "The id parameter identifies the chat moderator to remove. The value uniquely identifies both the moderator and the chat.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "id" - ], - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl" - ] - }, - "insert": { - "id": "youtube.liveChatModerators.insert", - "path": "liveChat/moderators", - "httpMethod": "POST", - "description": "Adds a new moderator for the chat.", - "parameters": { - "part": { - "type": "string", - "description": "The part parameter serves two purposes in this operation. It identifies the properties that the write operation will set as well as the properties that the API response returns. Set the parameter value to snippet.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "request": { - "$ref": "LiveChatModerator" - }, - "response": { - "$ref": "LiveChatModerator" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl" - ] - }, - "list": { - "id": "youtube.liveChatModerators.list", - "path": "liveChat/moderators", - "httpMethod": "GET", - "description": "Lists moderators for a live chat.", - "parameters": { - "liveChatId": { - "type": "string", - "description": "The liveChatId parameter specifies the YouTube live chat for which the API should return moderators.", - "required": true, - "location": "query" - }, - "maxResults": { - "type": "integer", - "description": "The maxResults parameter specifies the maximum number of items that should be returned in the result set.", - "default": "5", - "format": "uint32", - "minimum": "0", - "maximum": "50", - "location": "query" - }, - "pageToken": { - "type": "string", - "description": "The pageToken parameter identifies a specific page in the result set that should be returned. In an API response, the nextPageToken and prevPageToken properties identify other pages that could be retrieved.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter specifies the liveChatModerator resource parts that the API response will include. Supported values are id and snippet.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "liveChatId", - "part" - ], - "response": { - "$ref": "LiveChatModeratorListResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtube.readonly" - ] - } - } - }, - "liveStreams": { - "methods": { - "delete": { - "id": "youtube.liveStreams.delete", - "path": "liveStreams", - "httpMethod": "DELETE", - "description": "Deletes a video stream.", - "parameters": { - "id": { - "type": "string", - "description": "The id parameter specifies the YouTube live stream ID for the resource that is being deleted.", - "required": true, - "location": "query" - }, - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - }, - "onBehalfOfContentOwnerChannel": { - "type": "string", - "description": "This parameter can only be used in a properly authorized request. Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwnerChannel parameter specifies the YouTube channel ID of the channel to which a video is being added. This parameter is required when a request specifies a value for the onBehalfOfContentOwner parameter, and it can only be used in conjunction with that parameter. In addition, the request must be authorized using a CMS account that is linked to the content owner that the onBehalfOfContentOwner parameter specifies. Finally, the channel that the onBehalfOfContentOwnerChannel parameter value specifies must be linked to the content owner that the onBehalfOfContentOwner parameter specifies.\n\nThis parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and perform actions on behalf of the channel specified in the parameter value, without having to provide authentication credentials for each separate channel.", - "location": "query" - } - }, - "parameterOrder": [ - "id" - ], - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl" - ] - }, - "insert": { - "id": "youtube.liveStreams.insert", - "path": "liveStreams", - "httpMethod": "POST", - "description": "Creates a video stream. The stream enables you to send your video to YouTube, which can then broadcast the video to your audience.", - "parameters": { - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - }, - "onBehalfOfContentOwnerChannel": { - "type": "string", - "description": "This parameter can only be used in a properly authorized request. Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwnerChannel parameter specifies the YouTube channel ID of the channel to which a video is being added. This parameter is required when a request specifies a value for the onBehalfOfContentOwner parameter, and it can only be used in conjunction with that parameter. In addition, the request must be authorized using a CMS account that is linked to the content owner that the onBehalfOfContentOwner parameter specifies. Finally, the channel that the onBehalfOfContentOwnerChannel parameter value specifies must be linked to the content owner that the onBehalfOfContentOwner parameter specifies.\n\nThis parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and perform actions on behalf of the channel specified in the parameter value, without having to provide authentication credentials for each separate channel.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter serves two purposes in this operation. It identifies the properties that the write operation will set as well as the properties that the API response will include.\n\nThe part properties that you can include in the parameter value are id, snippet, cdn, and status.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "request": { - "$ref": "LiveStream" - }, - "response": { - "$ref": "LiveStream" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl" - ] - }, - "list": { - "id": "youtube.liveStreams.list", - "path": "liveStreams", - "httpMethod": "GET", - "description": "Returns a list of video streams that match the API request parameters.", - "parameters": { - "id": { - "type": "string", - "description": "The id parameter specifies a comma-separated list of YouTube stream IDs that identify the streams being retrieved. In a liveStream resource, the id property specifies the stream's ID.", - "location": "query" - }, - "maxResults": { - "type": "integer", - "description": "The maxResults parameter specifies the maximum number of items that should be returned in the result set.", - "default": "5", - "format": "uint32", - "minimum": "0", - "maximum": "50", - "location": "query" - }, - "mine": { - "type": "boolean", - "description": "The mine parameter can be used to instruct the API to only return streams owned by the authenticated user. Set the parameter value to true to only retrieve your own streams.", - "location": "query" - }, - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - }, - "onBehalfOfContentOwnerChannel": { - "type": "string", - "description": "This parameter can only be used in a properly authorized request. Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwnerChannel parameter specifies the YouTube channel ID of the channel to which a video is being added. This parameter is required when a request specifies a value for the onBehalfOfContentOwner parameter, and it can only be used in conjunction with that parameter. In addition, the request must be authorized using a CMS account that is linked to the content owner that the onBehalfOfContentOwner parameter specifies. Finally, the channel that the onBehalfOfContentOwnerChannel parameter value specifies must be linked to the content owner that the onBehalfOfContentOwner parameter specifies.\n\nThis parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and perform actions on behalf of the channel specified in the parameter value, without having to provide authentication credentials for each separate channel.", - "location": "query" - }, - "pageToken": { - "type": "string", - "description": "The pageToken parameter identifies a specific page in the result set that should be returned. In an API response, the nextPageToken and prevPageToken properties identify other pages that could be retrieved.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter specifies a comma-separated list of one or more liveStream resource properties that the API response will include. The part names that you can include in the parameter value are id, snippet, cdn, and status.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "response": { - "$ref": "LiveStreamListResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtube.readonly" - ] - }, - "update": { - "id": "youtube.liveStreams.update", - "path": "liveStreams", - "httpMethod": "PUT", - "description": "Updates a video stream. If the properties that you want to change cannot be updated, then you need to create a new stream with the proper settings.", - "parameters": { - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - }, - "onBehalfOfContentOwnerChannel": { - "type": "string", - "description": "This parameter can only be used in a properly authorized request. Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwnerChannel parameter specifies the YouTube channel ID of the channel to which a video is being added. This parameter is required when a request specifies a value for the onBehalfOfContentOwner parameter, and it can only be used in conjunction with that parameter. In addition, the request must be authorized using a CMS account that is linked to the content owner that the onBehalfOfContentOwner parameter specifies. Finally, the channel that the onBehalfOfContentOwnerChannel parameter value specifies must be linked to the content owner that the onBehalfOfContentOwner parameter specifies.\n\nThis parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and perform actions on behalf of the channel specified in the parameter value, without having to provide authentication credentials for each separate channel.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter serves two purposes in this operation. It identifies the properties that the write operation will set as well as the properties that the API response will include.\n\nThe part properties that you can include in the parameter value are id, snippet, cdn, and status.\n\nNote that this method will override the existing values for all of the mutable properties that are contained in any parts that the parameter value specifies. If the request body does not specify a value for a mutable property, the existing value for that property will be removed.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "request": { - "$ref": "LiveStream" - }, - "response": { - "$ref": "LiveStream" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl" - ] - } - } - }, - "playlistItems": { - "methods": { - "delete": { - "id": "youtube.playlistItems.delete", - "path": "playlistItems", - "httpMethod": "DELETE", - "description": "Deletes a playlist item.", - "parameters": { - "id": { - "type": "string", - "description": "The id parameter specifies the YouTube playlist item ID for the playlist item that is being deleted. In a playlistItem resource, the id property specifies the playlist item's ID.", - "required": true, - "location": "query" - }, - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - } - }, - "parameterOrder": [ - "id" - ], - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtubepartner" - ] - }, - "insert": { - "id": "youtube.playlistItems.insert", - "path": "playlistItems", - "httpMethod": "POST", - "description": "Adds a resource to a playlist.", - "parameters": { - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter serves two purposes in this operation. It identifies the properties that the write operation will set as well as the properties that the API response will include.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "request": { - "$ref": "PlaylistItem" - }, - "response": { - "$ref": "PlaylistItem" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtubepartner" - ] - }, - "list": { - "id": "youtube.playlistItems.list", - "path": "playlistItems", - "httpMethod": "GET", - "description": "Returns a collection of playlist items that match the API request parameters. You can retrieve all of the playlist items in a specified playlist or retrieve one or more playlist items by their unique IDs.", - "parameters": { - "id": { - "type": "string", - "description": "The id parameter specifies a comma-separated list of one or more unique playlist item IDs.", - "location": "query" - }, - "maxResults": { - "type": "integer", - "description": "The maxResults parameter specifies the maximum number of items that should be returned in the result set.", - "default": "5", - "format": "uint32", - "minimum": "0", - "maximum": "50", - "location": "query" - }, - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - }, - "pageToken": { - "type": "string", - "description": "The pageToken parameter identifies a specific page in the result set that should be returned. In an API response, the nextPageToken and prevPageToken properties identify other pages that could be retrieved.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter specifies a comma-separated list of one or more playlistItem resource properties that the API response will include.\n\nIf the parameter identifies a property that contains child properties, the child properties will be included in the response. For example, in a playlistItem resource, the snippet property contains numerous fields, including the title, description, position, and resourceId properties. As such, if you set part=snippet, the API response will contain all of those properties.", - "required": true, - "location": "query" - }, - "playlistId": { - "type": "string", - "description": "The playlistId parameter specifies the unique ID of the playlist for which you want to retrieve playlist items. Note that even though this is an optional parameter, every request to retrieve playlist items must specify a value for either the id parameter or the playlistId parameter.", - "location": "query" - }, - "videoId": { - "type": "string", - "description": "The videoId parameter specifies that the request should return only the playlist items that contain the specified video.", - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "response": { - "$ref": "PlaylistItemListResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtube.readonly", - "https://www.googleapis.com/auth/youtubepartner" - ], - "supportsSubscription": true - }, - "update": { - "id": "youtube.playlistItems.update", - "path": "playlistItems", - "httpMethod": "PUT", - "description": "Modifies a playlist item. For example, you could update the item's position in the playlist.", - "parameters": { - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter serves two purposes in this operation. It identifies the properties that the write operation will set as well as the properties that the API response will include.\n\nNote that this method will override the existing values for all of the mutable properties that are contained in any parts that the parameter value specifies. For example, a playlist item can specify a start time and end time, which identify the times portion of the video that should play when users watch the video in the playlist. If your request is updating a playlist item that sets these values, and the request's part parameter value includes the contentDetails part, the playlist item's start and end times will be updated to whatever value the request body specifies. If the request body does not specify values, the existing start and end times will be removed and replaced with the default settings.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "request": { - "$ref": "PlaylistItem" - }, - "response": { - "$ref": "PlaylistItem" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtubepartner" - ] - } - } - }, - "playlists": { - "methods": { - "delete": { - "id": "youtube.playlists.delete", - "path": "playlists", - "httpMethod": "DELETE", - "description": "Deletes a playlist.", - "parameters": { - "id": { - "type": "string", - "description": "The id parameter specifies the YouTube playlist ID for the playlist that is being deleted. In a playlist resource, the id property specifies the playlist's ID.", - "required": true, - "location": "query" - }, - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - } - }, - "parameterOrder": [ - "id" - ], - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtubepartner" - ] - }, - "insert": { - "id": "youtube.playlists.insert", - "path": "playlists", - "httpMethod": "POST", - "description": "Creates a playlist.", - "parameters": { - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - }, - "onBehalfOfContentOwnerChannel": { - "type": "string", - "description": "This parameter can only be used in a properly authorized request. Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwnerChannel parameter specifies the YouTube channel ID of the channel to which a video is being added. This parameter is required when a request specifies a value for the onBehalfOfContentOwner parameter, and it can only be used in conjunction with that parameter. In addition, the request must be authorized using a CMS account that is linked to the content owner that the onBehalfOfContentOwner parameter specifies. Finally, the channel that the onBehalfOfContentOwnerChannel parameter value specifies must be linked to the content owner that the onBehalfOfContentOwner parameter specifies.\n\nThis parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and perform actions on behalf of the channel specified in the parameter value, without having to provide authentication credentials for each separate channel.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter serves two purposes in this operation. It identifies the properties that the write operation will set as well as the properties that the API response will include.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "request": { - "$ref": "Playlist" - }, - "response": { - "$ref": "Playlist" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtubepartner" - ] - }, - "list": { - "id": "youtube.playlists.list", - "path": "playlists", - "httpMethod": "GET", - "description": "Returns a collection of playlists that match the API request parameters. For example, you can retrieve all playlists that the authenticated user owns, or you can retrieve one or more playlists by their unique IDs.", - "parameters": { - "channelId": { - "type": "string", - "description": "This value indicates that the API should only return the specified channel's playlists.", - "location": "query" - }, - "hl": { - "type": "string", - "description": "The hl parameter should be used for filter out the properties that are not in the given language. Used for the snippet part.", - "location": "query" - }, - "id": { - "type": "string", - "description": "The id parameter specifies a comma-separated list of the YouTube playlist ID(s) for the resource(s) that are being retrieved. In a playlist resource, the id property specifies the playlist's YouTube playlist ID.", - "location": "query" - }, - "maxResults": { - "type": "integer", - "description": "The maxResults parameter specifies the maximum number of items that should be returned in the result set.", - "default": "5", - "format": "uint32", - "minimum": "0", - "maximum": "50", - "location": "query" - }, - "mine": { - "type": "boolean", - "description": "Set this parameter's value to true to instruct the API to only return playlists owned by the authenticated user.", - "location": "query" - }, - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - }, - "onBehalfOfContentOwnerChannel": { - "type": "string", - "description": "This parameter can only be used in a properly authorized request. Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwnerChannel parameter specifies the YouTube channel ID of the channel to which a video is being added. This parameter is required when a request specifies a value for the onBehalfOfContentOwner parameter, and it can only be used in conjunction with that parameter. In addition, the request must be authorized using a CMS account that is linked to the content owner that the onBehalfOfContentOwner parameter specifies. Finally, the channel that the onBehalfOfContentOwnerChannel parameter value specifies must be linked to the content owner that the onBehalfOfContentOwner parameter specifies.\n\nThis parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and perform actions on behalf of the channel specified in the parameter value, without having to provide authentication credentials for each separate channel.", - "location": "query" - }, - "pageToken": { - "type": "string", - "description": "The pageToken parameter identifies a specific page in the result set that should be returned. In an API response, the nextPageToken and prevPageToken properties identify other pages that could be retrieved.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter specifies a comma-separated list of one or more playlist resource properties that the API response will include.\n\nIf the parameter identifies a property that contains child properties, the child properties will be included in the response. For example, in a playlist resource, the snippet property contains properties like author, title, description, tags, and timeCreated. As such, if you set part=snippet, the API response will contain all of those properties.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "response": { - "$ref": "PlaylistListResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtube.readonly", - "https://www.googleapis.com/auth/youtubepartner" - ] - }, - "update": { - "id": "youtube.playlists.update", - "path": "playlists", - "httpMethod": "PUT", - "description": "Modifies a playlist. For example, you could change a playlist's title, description, or privacy status.", - "parameters": { - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter serves two purposes in this operation. It identifies the properties that the write operation will set as well as the properties that the API response will include.\n\nNote that this method will override the existing values for mutable properties that are contained in any parts that the request body specifies. For example, a playlist's description is contained in the snippet part, which must be included in the request body. If the request does not specify a value for the snippet.description property, the playlist's existing description will be deleted.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "request": { - "$ref": "Playlist" - }, - "response": { - "$ref": "Playlist" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtubepartner" - ] - } - } - }, - "search": { - "methods": { - "list": { - "id": "youtube.search.list", - "path": "search", - "httpMethod": "GET", - "description": "Returns a collection of search results that match the query parameters specified in the API request. By default, a search result set identifies matching video, channel, and playlist resources, but you can also configure queries to only retrieve a specific type of resource.", - "parameters": { - "channelId": { - "type": "string", - "description": "The channelId parameter indicates that the API response should only contain resources created by the channel", - "location": "query" - }, - "channelType": { - "type": "string", - "description": "The channelType parameter lets you restrict a search to a particular type of channel.", - "enum": [ - "any", - "show" - ], - "enumDescriptions": [ - "Return all channels.", - "Only retrieve shows." - ], - "location": "query" - }, - "eventType": { - "type": "string", - "description": "The eventType parameter restricts a search to broadcast events. If you specify a value for this parameter, you must also set the type parameter's value to video.", - "enum": [ - "completed", - "live", - "upcoming" - ], - "enumDescriptions": [ - "Only include completed broadcasts.", - "Only include active broadcasts.", - "Only include upcoming broadcasts." - ], - "location": "query" - }, - "forContentOwner": { - "type": "boolean", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe forContentOwner parameter restricts the search to only retrieve resources owned by the content owner specified by the onBehalfOfContentOwner parameter. The user must be authenticated using a CMS account linked to the specified content owner and onBehalfOfContentOwner must be provided.", - "location": "query" - }, - "forDeveloper": { - "type": "boolean", - "description": "The forDeveloper parameter restricts the search to only retrieve videos uploaded via the developer's application or website. The API server uses the request's authorization credentials to identify the developer. Therefore, a developer can restrict results to videos uploaded through the developer's own app or website but not to videos uploaded through other apps or sites.", - "location": "query" - }, - "forMine": { - "type": "boolean", - "description": "The forMine parameter restricts the search to only retrieve videos owned by the authenticated user. If you set this parameter to true, then the type parameter's value must also be set to video.", - "location": "query" - }, - "location": { - "type": "string", - "description": "The location parameter, in conjunction with the locationRadius parameter, defines a circular geographic area and also restricts a search to videos that specify, in their metadata, a geographic location that falls within that area. The parameter value is a string that specifies latitude/longitude coordinates e.g. (37.42307,-122.08427).\n\n\n- The location parameter value identifies the point at the center of the area.\n- The locationRadius parameter specifies the maximum distance that the location associated with a video can be from that point for the video to still be included in the search results.The API returns an error if your request specifies a value for the location parameter but does not also specify a value for the locationRadius parameter.", - "location": "query" - }, - "locationRadius": { - "type": "string", - "description": "The locationRadius parameter, in conjunction with the location parameter, defines a circular geographic area.\n\nThe parameter value must be a floating point number followed by a measurement unit. Valid measurement units are m, km, ft, and mi. For example, valid parameter values include 1500m, 5km, 10000ft, and 0.75mi. The API does not support locationRadius parameter values larger than 1000 kilometers.\n\nNote: See the definition of the location parameter for more information.", - "location": "query" - }, - "maxResults": { - "type": "integer", - "description": "The maxResults parameter specifies the maximum number of items that should be returned in the result set.", - "default": "5", - "format": "uint32", - "minimum": "0", - "maximum": "50", - "location": "query" - }, - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - }, - "order": { - "type": "string", - "description": "The order parameter specifies the method that will be used to order resources in the API response.", - "default": "SEARCH_SORT_RELEVANCE", - "enum": [ - "date", - "rating", - "relevance", - "title", - "videoCount", - "viewCount" - ], - "enumDescriptions": [ - "Resources are sorted in reverse chronological order based on the date they were created.", - "Resources are sorted from highest to lowest rating.", - "Resources are sorted based on their relevance to the search query. This is the default value for this parameter.", - "Resources are sorted alphabetically by title.", - "Channels are sorted in descending order of their number of uploaded videos.", - "Resources are sorted from highest to lowest number of views." - ], - "location": "query" - }, - "pageToken": { - "type": "string", - "description": "The pageToken parameter identifies a specific page in the result set that should be returned. In an API response, the nextPageToken and prevPageToken properties identify other pages that could be retrieved.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter specifies a comma-separated list of one or more search resource properties that the API response will include. Set the parameter value to snippet.", - "required": true, - "location": "query" - }, - "publishedAfter": { - "type": "string", - "description": "The publishedAfter parameter indicates that the API response should only contain resources created after the specified time. The value is an RFC 3339 formatted date-time value (1970-01-01T00:00:00Z).", - "format": "date-time", - "location": "query" - }, - "publishedBefore": { - "type": "string", - "description": "The publishedBefore parameter indicates that the API response should only contain resources created before the specified time. The value is an RFC 3339 formatted date-time value (1970-01-01T00:00:00Z).", - "format": "date-time", - "location": "query" - }, - "q": { - "type": "string", - "description": "The q parameter specifies the query term to search for.\n\nYour request can also use the Boolean NOT (-) and OR (|) operators to exclude videos or to find videos that are associated with one of several search terms. For example, to search for videos matching either \"boating\" or \"sailing\", set the q parameter value to boating|sailing. Similarly, to search for videos matching either \"boating\" or \"sailing\" but not \"fishing\", set the q parameter value to boating|sailing -fishing. Note that the pipe character must be URL-escaped when it is sent in your API request. The URL-escaped value for the pipe character is %7C.", - "location": "query" - }, - "regionCode": { - "type": "string", - "description": "The regionCode parameter instructs the API to return search results for the specified country. The parameter value is an ISO 3166-1 alpha-2 country code.", - "location": "query" - }, - "relatedToVideoId": { - "type": "string", - "description": "The relatedToVideoId parameter retrieves a list of videos that are related to the video that the parameter value identifies. The parameter value must be set to a YouTube video ID and, if you are using this parameter, the type parameter must be set to video.", - "location": "query" - }, - "relevanceLanguage": { - "type": "string", - "description": "The relevanceLanguage parameter instructs the API to return search results that are most relevant to the specified language. The parameter value is typically an ISO 639-1 two-letter language code. However, you should use the values zh-Hans for simplified Chinese and zh-Hant for traditional Chinese. Please note that results in other languages will still be returned if they are highly relevant to the search query term.", - "location": "query" - }, - "safeSearch": { - "type": "string", - "description": "The safeSearch parameter indicates whether the search results should include restricted content as well as standard content.", - "enum": [ - "moderate", - "none", - "strict" - ], - "enumDescriptions": [ - "YouTube will filter some content from search results and, at the least, will filter content that is restricted in your locale. Based on their content, search results could be removed from search results or demoted in search results. This is the default parameter value.", - "YouTube will not filter the search result set.", - "YouTube will try to exclude all restricted content from the search result set. Based on their content, search results could be removed from search results or demoted in search results." - ], - "location": "query" - }, - "topicId": { - "type": "string", - "description": "The topicId parameter indicates that the API response should only contain resources associated with the specified topic. The value identifies a Freebase topic ID.", - "location": "query" - }, - "type": { - "type": "string", - "description": "The type parameter restricts a search query to only retrieve a particular type of resource. The value is a comma-separated list of resource types.", - "default": "video,channel,playlist", - "location": "query" - }, - "videoCaption": { - "type": "string", - "description": "The videoCaption parameter indicates whether the API should filter video search results based on whether they have captions. If you specify a value for this parameter, you must also set the type parameter's value to video.", - "enum": [ - "any", - "closedCaption", - "none" - ], - "enumDescriptions": [ - "Do not filter results based on caption availability.", - "Only include videos that have captions.", - "Only include videos that do not have captions." - ], - "location": "query" - }, - "videoCategoryId": { - "type": "string", - "description": "The videoCategoryId parameter filters video search results based on their category. If you specify a value for this parameter, you must also set the type parameter's value to video.", - "location": "query" - }, - "videoDefinition": { - "type": "string", - "description": "The videoDefinition parameter lets you restrict a search to only include either high definition (HD) or standard definition (SD) videos. HD videos are available for playback in at least 720p, though higher resolutions, like 1080p, might also be available. If you specify a value for this parameter, you must also set the type parameter's value to video.", - "enum": [ - "any", - "high", - "standard" - ], - "enumDescriptions": [ - "Return all videos, regardless of their resolution.", - "Only retrieve HD videos.", - "Only retrieve videos in standard definition." - ], - "location": "query" - }, - "videoDimension": { - "type": "string", - "description": "The videoDimension parameter lets you restrict a search to only retrieve 2D or 3D videos. If you specify a value for this parameter, you must also set the type parameter's value to video.", - "enum": [ - "2d", - "3d", - "any" - ], - "enumDescriptions": [ - "Restrict search results to exclude 3D videos.", - "Restrict search results to only include 3D videos.", - "Include both 3D and non-3D videos in returned results. This is the default value." - ], - "location": "query" - }, - "videoDuration": { - "type": "string", - "description": "The videoDuration parameter filters video search results based on their duration. If you specify a value for this parameter, you must also set the type parameter's value to video.", - "enum": [ - "any", - "long", - "medium", - "short" - ], - "enumDescriptions": [ - "Do not filter video search results based on their duration. This is the default value.", - "Only include videos longer than 20 minutes.", - "Only include videos that are between four and 20 minutes long (inclusive).", - "Only include videos that are less than four minutes long." - ], - "location": "query" - }, - "videoEmbeddable": { - "type": "string", - "description": "The videoEmbeddable parameter lets you to restrict a search to only videos that can be embedded into a webpage. If you specify a value for this parameter, you must also set the type parameter's value to video.", - "enum": [ - "any", - "true" - ], - "enumDescriptions": [ - "Return all videos, embeddable or not.", - "Only retrieve embeddable videos." - ], - "location": "query" - }, - "videoLicense": { - "type": "string", - "description": "The videoLicense parameter filters search results to only include videos with a particular license. YouTube lets video uploaders choose to attach either the Creative Commons license or the standard YouTube license to each of their videos. If you specify a value for this parameter, you must also set the type parameter's value to video.", - "enum": [ - "any", - "creativeCommon", - "youtube" - ], - "enumDescriptions": [ - "Return all videos, regardless of which license they have, that match the query parameters.", - "Only return videos that have a Creative Commons license. Users can reuse videos with this license in other videos that they create. Learn more.", - "Only return videos that have the standard YouTube license." - ], - "location": "query" - }, - "videoSyndicated": { - "type": "string", - "description": "The videoSyndicated parameter lets you to restrict a search to only videos that can be played outside youtube.com. If you specify a value for this parameter, you must also set the type parameter's value to video.", - "enum": [ - "any", - "true" - ], - "enumDescriptions": [ - "Return all videos, syndicated or not.", - "Only retrieve syndicated videos." - ], - "location": "query" - }, - "videoType": { - "type": "string", - "description": "The videoType parameter lets you restrict a search to a particular type of videos. If you specify a value for this parameter, you must also set the type parameter's value to video.", - "enum": [ - "any", - "episode", - "movie" - ], - "enumDescriptions": [ - "Return all videos.", - "Only retrieve episodes of shows.", - "Only retrieve movies." - ], - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "response": { - "$ref": "SearchListResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtube.readonly", - "https://www.googleapis.com/auth/youtubepartner" - ] - } - } - }, - "sponsors": { - "methods": { - "list": { - "id": "youtube.sponsors.list", - "path": "sponsors", - "httpMethod": "GET", - "description": "Lists sponsors for a channel.", - "parameters": { - "filter": { - "type": "string", - "description": "The filter parameter specifies which channel sponsors to return.", - "default": "POLL_NEWEST", - "enum": [ - "all", - "newest" - ], - "enumDescriptions": [ - "Return all sponsors, from newest to oldest.", - "Return the most recent sponsors, from newest to oldest." - ], - "location": "query" - }, - "maxResults": { - "type": "integer", - "description": "The maxResults parameter specifies the maximum number of items that should be returned in the result set.", - "default": "5", - "format": "uint32", - "minimum": "0", - "maximum": "50", - "location": "query" - }, - "pageToken": { - "type": "string", - "description": "The pageToken parameter identifies a specific page in the result set that should be returned. In an API response, the nextPageToken and prevPageToken properties identify other pages that could be retrieved.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter specifies the sponsor resource parts that the API response will include. Supported values are id and snippet.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "response": { - "$ref": "SponsorListResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtube.readonly" - ] - } - } - }, - "subscriptions": { - "methods": { - "delete": { - "id": "youtube.subscriptions.delete", - "path": "subscriptions", - "httpMethod": "DELETE", - "description": "Deletes a subscription.", - "parameters": { - "id": { - "type": "string", - "description": "The id parameter specifies the YouTube subscription ID for the resource that is being deleted. In a subscription resource, the id property specifies the YouTube subscription ID.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "id" - ], - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtubepartner" - ] - }, - "insert": { - "id": "youtube.subscriptions.insert", - "path": "subscriptions", - "httpMethod": "POST", - "description": "Adds a subscription for the authenticated user's channel.", - "parameters": { - "part": { - "type": "string", - "description": "The part parameter serves two purposes in this operation. It identifies the properties that the write operation will set as well as the properties that the API response will include.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "request": { - "$ref": "Subscription" - }, - "response": { - "$ref": "Subscription" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtubepartner" - ] - }, - "list": { - "id": "youtube.subscriptions.list", - "path": "subscriptions", - "httpMethod": "GET", - "description": "Returns subscription resources that match the API request criteria.", - "parameters": { - "channelId": { - "type": "string", - "description": "The channelId parameter specifies a YouTube channel ID. The API will only return that channel's subscriptions.", - "location": "query" - }, - "forChannelId": { - "type": "string", - "description": "The forChannelId parameter specifies a comma-separated list of channel IDs. The API response will then only contain subscriptions matching those channels.", - "location": "query" - }, - "id": { - "type": "string", - "description": "The id parameter specifies a comma-separated list of the YouTube subscription ID(s) for the resource(s) that are being retrieved. In a subscription resource, the id property specifies the YouTube subscription ID.", - "location": "query" - }, - "maxResults": { - "type": "integer", - "description": "The maxResults parameter specifies the maximum number of items that should be returned in the result set.", - "default": "5", - "format": "uint32", - "minimum": "0", - "maximum": "50", - "location": "query" - }, - "mine": { - "type": "boolean", - "description": "Set this parameter's value to true to retrieve a feed of the authenticated user's subscriptions.", - "location": "query" - }, - "myRecentSubscribers": { - "type": "boolean", - "description": "Set this parameter's value to true to retrieve a feed of the subscribers of the authenticated user in reverse chronological order (newest first).", - "location": "query" - }, - "mySubscribers": { - "type": "boolean", - "description": "Set this parameter's value to true to retrieve a feed of the subscribers of the authenticated user in no particular order.", - "location": "query" - }, - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - }, - "onBehalfOfContentOwnerChannel": { - "type": "string", - "description": "This parameter can only be used in a properly authorized request. Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwnerChannel parameter specifies the YouTube channel ID of the channel to which a video is being added. This parameter is required when a request specifies a value for the onBehalfOfContentOwner parameter, and it can only be used in conjunction with that parameter. In addition, the request must be authorized using a CMS account that is linked to the content owner that the onBehalfOfContentOwner parameter specifies. Finally, the channel that the onBehalfOfContentOwnerChannel parameter value specifies must be linked to the content owner that the onBehalfOfContentOwner parameter specifies.\n\nThis parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and perform actions on behalf of the channel specified in the parameter value, without having to provide authentication credentials for each separate channel.", - "location": "query" - }, - "order": { - "type": "string", - "description": "The order parameter specifies the method that will be used to sort resources in the API response.", - "default": "SUBSCRIPTION_ORDER_RELEVANCE", - "enum": [ - "alphabetical", - "relevance", - "unread" - ], - "enumDescriptions": [ - "Sort alphabetically.", - "Sort by relevance.", - "Sort by order of activity." - ], - "location": "query" - }, - "pageToken": { - "type": "string", - "description": "The pageToken parameter identifies a specific page in the result set that should be returned. In an API response, the nextPageToken and prevPageToken properties identify other pages that could be retrieved.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter specifies a comma-separated list of one or more subscription resource properties that the API response will include.\n\nIf the parameter identifies a property that contains child properties, the child properties will be included in the response. For example, in a subscription resource, the snippet property contains other properties, such as a display title for the subscription. If you set part=snippet, the API response will also contain all of those nested properties.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "response": { - "$ref": "SubscriptionListResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtube.readonly", - "https://www.googleapis.com/auth/youtubepartner" - ] - } - } - }, - "superChatEvents": { - "methods": { - "list": { - "id": "youtube.superChatEvents.list", - "path": "superChatEvents", - "httpMethod": "GET", - "description": "Lists Super Chat events for a channel.", - "parameters": { - "hl": { - "type": "string", - "description": "The hl parameter instructs the API to retrieve localized resource metadata for a specific application language that the YouTube website supports. The parameter value must be a language code included in the list returned by the i18nLanguages.list method.\n\nIf localized resource details are available in that language, the resource's snippet.localized object will contain the localized values. However, if localized details are not available, the snippet.localized object will contain resource details in the resource's default language.", - "location": "query" - }, - "maxResults": { - "type": "integer", - "description": "The maxResults parameter specifies the maximum number of items that should be returned in the result set.", - "default": "5", - "format": "uint32", - "minimum": "0", - "maximum": "50", - "location": "query" - }, - "pageToken": { - "type": "string", - "description": "The pageToken parameter identifies a specific page in the result set that should be returned. In an API response, the nextPageToken and prevPageToken properties identify other pages that could be retrieved.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter specifies the superChatEvent resource parts that the API response will include. Supported values are id and snippet.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "response": { - "$ref": "SuperChatEventListResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtube.readonly" - ] - } - } - }, - "thumbnails": { - "methods": { - "set": { - "id": "youtube.thumbnails.set", - "path": "thumbnails/set", - "httpMethod": "POST", - "description": "Uploads a custom video thumbnail to YouTube and sets it for a video.", - "parameters": { - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The actual CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - }, - "videoId": { - "type": "string", - "description": "The videoId parameter specifies a YouTube video ID for which the custom video thumbnail is being provided.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "videoId" - ], - "response": { - "$ref": "ThumbnailSetResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtube.upload", - "https://www.googleapis.com/auth/youtubepartner" - ], - "supportsMediaUpload": true, - "mediaUpload": { - "accept": [ - "application/octet-stream", - "image/jpeg", - "image/png" - ], - "maxSize": "2MB", - "protocols": { - "simple": { - "multipart": true, - "path": "/upload/youtube/v3/thumbnails/set" - }, - "resumable": { - "multipart": true, - "path": "/resumable/upload/youtube/v3/thumbnails/set" - } - } - } - } - } - }, - "videoAbuseReportReasons": { - "methods": { - "list": { - "id": "youtube.videoAbuseReportReasons.list", - "path": "videoAbuseReportReasons", - "httpMethod": "GET", - "description": "Returns a list of abuse reasons that can be used for reporting abusive videos.", - "parameters": { - "hl": { - "type": "string", - "description": "The hl parameter specifies the language that should be used for text values in the API response.", - "default": "en_US", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter specifies the videoCategory resource parts that the API response will include. Supported values are id and snippet.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "response": { - "$ref": "VideoAbuseReportReasonListResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtube.readonly" - ] - } - } - }, - "videoCategories": { - "methods": { - "list": { - "id": "youtube.videoCategories.list", - "path": "videoCategories", - "httpMethod": "GET", - "description": "Returns a list of categories that can be associated with YouTube videos.", - "parameters": { - "hl": { - "type": "string", - "description": "The hl parameter specifies the language that should be used for text values in the API response.", - "default": "en_US", - "location": "query" - }, - "id": { - "type": "string", - "description": "The id parameter specifies a comma-separated list of video category IDs for the resources that you are retrieving.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter specifies the videoCategory resource properties that the API response will include. Set the parameter value to snippet.", - "required": true, - "location": "query" - }, - "regionCode": { - "type": "string", - "description": "The regionCode parameter instructs the API to return the list of video categories available in the specified country. The parameter value is an ISO 3166-1 alpha-2 country code.", - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "response": { - "$ref": "VideoCategoryListResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtube.readonly", - "https://www.googleapis.com/auth/youtubepartner" - ] - } - } - }, - "videos": { - "methods": { - "delete": { - "id": "youtube.videos.delete", - "path": "videos", - "httpMethod": "DELETE", - "description": "Deletes a YouTube video.", - "parameters": { - "id": { - "type": "string", - "description": "The id parameter specifies the YouTube video ID for the resource that is being deleted. In a video resource, the id property specifies the video's ID.", - "required": true, - "location": "query" - }, - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The actual CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - } - }, - "parameterOrder": [ - "id" - ], - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtubepartner" - ] - }, - "getRating": { - "id": "youtube.videos.getRating", - "path": "videos/getRating", - "httpMethod": "GET", - "description": "Retrieves the ratings that the authorized user gave to a list of specified videos.", - "parameters": { - "id": { - "type": "string", - "description": "The id parameter specifies a comma-separated list of the YouTube video ID(s) for the resource(s) for which you are retrieving rating data. In a video resource, the id property specifies the video's ID.", - "required": true, - "location": "query" - }, - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - } - }, - "parameterOrder": [ - "id" - ], - "response": { - "$ref": "VideoGetRatingResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtubepartner" - ] - }, - "insert": { - "id": "youtube.videos.insert", - "path": "videos", - "httpMethod": "POST", - "description": "Uploads a video to YouTube and optionally sets the video's metadata.", - "parameters": { - "autoLevels": { - "type": "boolean", - "description": "The autoLevels parameter indicates whether YouTube should automatically enhance the video's lighting and color.", - "location": "query" - }, - "notifySubscribers": { - "type": "boolean", - "description": "The notifySubscribers parameter indicates whether YouTube should send a notification about the new video to users who subscribe to the video's channel. A parameter value of True indicates that subscribers will be notified of newly uploaded videos. However, a channel owner who is uploading many videos might prefer to set the value to False to avoid sending a notification about each new video to the channel's subscribers.", - "default": "true", - "location": "query" - }, - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - }, - "onBehalfOfContentOwnerChannel": { - "type": "string", - "description": "This parameter can only be used in a properly authorized request. Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwnerChannel parameter specifies the YouTube channel ID of the channel to which a video is being added. This parameter is required when a request specifies a value for the onBehalfOfContentOwner parameter, and it can only be used in conjunction with that parameter. In addition, the request must be authorized using a CMS account that is linked to the content owner that the onBehalfOfContentOwner parameter specifies. Finally, the channel that the onBehalfOfContentOwnerChannel parameter value specifies must be linked to the content owner that the onBehalfOfContentOwner parameter specifies.\n\nThis parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and perform actions on behalf of the channel specified in the parameter value, without having to provide authentication credentials for each separate channel.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter serves two purposes in this operation. It identifies the properties that the write operation will set as well as the properties that the API response will include.\n\nNote that not all parts contain properties that can be set when inserting or updating a video. For example, the statistics object encapsulates statistics that YouTube calculates for a video and does not contain values that you can set or modify. If the parameter value specifies a part that does not contain mutable values, that part will still be included in the API response.", - "required": true, - "location": "query" - }, - "stabilize": { - "type": "boolean", - "description": "The stabilize parameter indicates whether YouTube should adjust the video to remove shaky camera motions.", - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "request": { - "$ref": "Video" - }, - "response": { - "$ref": "Video" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtube.upload", - "https://www.googleapis.com/auth/youtubepartner" - ], - "supportsMediaUpload": true, - "mediaUpload": { - "accept": [ - "application/octet-stream", - "video/*" - ], - "maxSize": "64GB", - "protocols": { - "simple": { - "multipart": true, - "path": "/upload/youtube/v3/videos" - }, - "resumable": { - "multipart": true, - "path": "/resumable/upload/youtube/v3/videos" - } - } - } - }, - "list": { - "id": "youtube.videos.list", - "path": "videos", - "httpMethod": "GET", - "description": "Returns a list of videos that match the API request parameters.", - "parameters": { - "chart": { - "type": "string", - "description": "The chart parameter identifies the chart that you want to retrieve.", - "enum": [ - "mostPopular" - ], - "enumDescriptions": [ - "Return the most popular videos for the specified content region and video category." - ], - "location": "query" - }, - "hl": { - "type": "string", - "description": "The hl parameter instructs the API to retrieve localized resource metadata for a specific application language that the YouTube website supports. The parameter value must be a language code included in the list returned by the i18nLanguages.list method.\n\nIf localized resource details are available in that language, the resource's snippet.localized object will contain the localized values. However, if localized details are not available, the snippet.localized object will contain resource details in the resource's default language.", - "location": "query" - }, - "id": { - "type": "string", - "description": "The id parameter specifies a comma-separated list of the YouTube video ID(s) for the resource(s) that are being retrieved. In a video resource, the id property specifies the video's ID.", - "location": "query" - }, - "locale": { - "type": "string", - "description": "DEPRECATED", - "location": "query" - }, - "maxHeight": { - "type": "integer", - "description": "The maxHeight parameter specifies a maximum height of the embedded player. If maxWidth is provided, maxHeight may not be reached in order to not violate the width request.", - "format": "uint32", - "minimum": "72", - "maximum": "8192", - "location": "query" - }, - "maxResults": { - "type": "integer", - "description": "The maxResults parameter specifies the maximum number of items that should be returned in the result set.\n\nNote: This parameter is supported for use in conjunction with the myRating and chart parameters, but it is not supported for use in conjunction with the id parameter.", - "default": "5", - "format": "uint32", - "minimum": "1", - "maximum": "50", - "location": "query" - }, - "maxWidth": { - "type": "integer", - "description": "The maxWidth parameter specifies a maximum width of the embedded player. If maxHeight is provided, maxWidth may not be reached in order to not violate the height request.", - "format": "uint32", - "minimum": "72", - "maximum": "8192", - "location": "query" - }, - "myRating": { - "type": "string", - "description": "Set this parameter's value to like or dislike to instruct the API to only return videos liked or disliked by the authenticated user.", - "enum": [ - "dislike", - "like" - ], - "enumDescriptions": [ - "Returns only videos disliked by the authenticated user.", - "Returns only video liked by the authenticated user." - ], - "location": "query" - }, - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - }, - "pageToken": { - "type": "string", - "description": "The pageToken parameter identifies a specific page in the result set that should be returned. In an API response, the nextPageToken and prevPageToken properties identify other pages that could be retrieved.\n\nNote: This parameter is supported for use in conjunction with the myRating and chart parameters, but it is not supported for use in conjunction with the id parameter.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter specifies a comma-separated list of one or more video resource properties that the API response will include.\n\nIf the parameter identifies a property that contains child properties, the child properties will be included in the response. For example, in a video resource, the snippet property contains the channelId, title, description, tags, and categoryId properties. As such, if you set part=snippet, the API response will contain all of those properties.", - "required": true, - "location": "query" - }, - "regionCode": { - "type": "string", - "description": "The regionCode parameter instructs the API to select a video chart available in the specified region. This parameter can only be used in conjunction with the chart parameter. The parameter value is an ISO 3166-1 alpha-2 country code.", - "location": "query" - }, - "videoCategoryId": { - "type": "string", - "description": "The videoCategoryId parameter identifies the video category for which the chart should be retrieved. This parameter can only be used in conjunction with the chart parameter. By default, charts are not restricted to a particular category.", - "default": "0", - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "response": { - "$ref": "VideoListResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtube.readonly", - "https://www.googleapis.com/auth/youtubepartner" - ] - }, - "rate": { - "id": "youtube.videos.rate", - "path": "videos/rate", - "httpMethod": "POST", - "description": "Add a like or dislike rating to a video or remove a rating from a video.", - "parameters": { - "id": { - "type": "string", - "description": "The id parameter specifies the YouTube video ID of the video that is being rated or having its rating removed.", - "required": true, - "location": "query" - }, - "rating": { - "type": "string", - "description": "Specifies the rating to record.", - "required": true, - "enum": [ - "dislike", - "like", - "none" - ], - "enumDescriptions": [ - "Records that the authenticated user disliked the video.", - "Records that the authenticated user liked the video.", - "Removes any rating that the authenticated user had previously set for the video." - ], - "location": "query" - } - }, - "parameterOrder": [ - "id", - "rating" - ], - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtubepartner" - ] - }, - "reportAbuse": { - "id": "youtube.videos.reportAbuse", - "path": "videos/reportAbuse", - "httpMethod": "POST", - "description": "Report abuse for a video.", - "parameters": { - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - } - }, - "request": { - "$ref": "VideoAbuseReport" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtubepartner" - ] - }, - "update": { - "id": "youtube.videos.update", - "path": "videos", - "httpMethod": "PUT", - "description": "Updates a video's metadata.", - "parameters": { - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The actual CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - }, - "part": { - "type": "string", - "description": "The part parameter serves two purposes in this operation. It identifies the properties that the write operation will set as well as the properties that the API response will include.\n\nNote that this method will override the existing values for all of the mutable properties that are contained in any parts that the parameter value specifies. For example, a video's privacy setting is contained in the status part. As such, if your request is updating a private video, and the request's part parameter value includes the status part, the video's privacy setting will be updated to whatever value the request body specifies. If the request body does not specify a value, the existing privacy setting will be removed and the video will revert to the default privacy setting.\n\nIn addition, not all parts contain properties that can be set when inserting or updating a video. For example, the statistics object encapsulates statistics that YouTube calculates for a video and does not contain values that you can set or modify. If the parameter value specifies a part that does not contain mutable values, that part will still be included in the API response.", - "required": true, - "location": "query" - } - }, - "parameterOrder": [ - "part" - ], - "request": { - "$ref": "Video" - }, - "response": { - "$ref": "Video" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtubepartner" - ] - } - } - }, - "watermarks": { - "methods": { - "set": { - "id": "youtube.watermarks.set", - "path": "watermarks/set", - "httpMethod": "POST", - "description": "Uploads a watermark image to YouTube and sets it for a channel.", - "parameters": { - "channelId": { - "type": "string", - "description": "The channelId parameter specifies the YouTube channel ID for which the watermark is being provided.", - "required": true, - "location": "query" - }, - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - } - }, - "parameterOrder": [ - "channelId" - ], - "request": { - "$ref": "InvideoBranding" - }, - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtube.upload", - "https://www.googleapis.com/auth/youtubepartner" - ], - "supportsMediaUpload": true, - "mediaUpload": { - "accept": [ - "application/octet-stream", - "image/jpeg", - "image/png" - ], - "maxSize": "10MB", - "protocols": { - "simple": { - "multipart": true, - "path": "/upload/youtube/v3/watermarks/set" - }, - "resumable": { - "multipart": true, - "path": "/resumable/upload/youtube/v3/watermarks/set" - } - } - } - }, - "unset": { - "id": "youtube.watermarks.unset", - "path": "watermarks/unset", - "httpMethod": "POST", - "description": "Deletes a channel's watermark image.", - "parameters": { - "channelId": { - "type": "string", - "description": "The channelId parameter specifies the YouTube channel ID for which the watermark is being unset.", - "required": true, - "location": "query" - }, - "onBehalfOfContentOwner": { - "type": "string", - "description": "Note: This parameter is intended exclusively for YouTube content partners.\n\nThe onBehalfOfContentOwner parameter indicates that the request's authorization credentials identify a YouTube CMS user who is acting on behalf of the content owner specified in the parameter value. This parameter is intended for YouTube content partners that own and manage many different YouTube channels. It allows content owners to authenticate once and get access to all their video and channel data, without having to provide authentication credentials for each individual channel. The CMS account that the user authenticates with must be linked to the specified YouTube content owner.", - "location": "query" - } - }, - "parameterOrder": [ - "channelId" - ], - "scopes": [ - "https://www.googleapis.com/auth/youtube", - "https://www.googleapis.com/auth/youtube.force-ssl", - "https://www.googleapis.com/auth/youtubepartner" - ] - } - } - } - } -} diff --git a/test/data/youtube-playlistid.json b/test/data/youtube-playlistid.json deleted file mode 100644 index b69205f434..0000000000 --- a/test/data/youtube-playlistid.json +++ /dev/null @@ -1 +0,0 @@ -{"regionCode": "US", "kind": "youtube#searchListResponse", "etag": "\"m2yskBQFythfE4irbTIeOgYYfBU/eoV8llUEbIu5LXnwqBaLOkOK0Hg\"", "pageInfo": {"resultsPerPage": 1, "totalResults": 1}, "items": [{"snippet": {"thumbnails": {"default": {"url": "https://i.ytimg.com/vi/lhYWB5FFkg4/default.jpg", "width": 120, "height": 90}, "high": {"url": "https://i.ytimg.com/vi/lhYWB5FFkg4/hqdefault.jpg", "width": 480, "height": 360}, "medium": {"url": "https://i.ytimg.com/vi/lhYWB5FFkg4/mqdefault.jpg", "width": 320, "height": 180}}, "title": "IETF98", "channelId": "UC8dtK9njBLdFnBahHFp0eZQ", "publishedAt": "2017-03-30T12:41:04.000Z", "liveBroadcastContent": "none", "channelTitle": "IETF - Internet Engineering Task Force", "description": "Videos from the IETF 98 Meeting held in Chicago, Illinois, United States 26-31 March 2017."}, "kind": "youtube#searchResult", "etag": "\"m2yskBQFythfE4irbTIeOgYYfBU/X3dbZGRvgpvedtOP0KLGhZLg5UI\"", "id": {"kind": "youtube#playlist", "playlistId": "PLC86T-test"}}]} diff --git a/test/data/youtube-playlistitems.json b/test/data/youtube-playlistitems.json deleted file mode 100644 index 4b42e3d21b..0000000000 --- a/test/data/youtube-playlistitems.json +++ /dev/null @@ -1 +0,0 @@ -{"items": [{"snippet": {"playlistId": "PLC86T-6ZTP5jo6kIuqdyeYYhsKv9sUwG1", "thumbnails": {"default": {"url": "https://i.ytimg.com/vi/lhYWB5FFkg4/default.jpg", "width": 120, "height": 90}, "high": {"url": "https://i.ytimg.com/vi/lhYWB5FFkg4/hqdefault.jpg", "width": 480, "height": 360}, "medium": {"url": "https://i.ytimg.com/vi/lhYWB5FFkg4/mqdefault.jpg", "width": 320, "height": 180}, "maxres": {"url": "https://i.ytimg.com/vi/lhYWB5FFkg4/maxresdefault.jpg", "width": 1280, "height": 720}, "standard": {"url": "https://i.ytimg.com/vi/lhYWB5FFkg4/sddefault.jpg", "width": 640, "height": 480}}, "title": "IETF98 Wrap Up", "resourceId": {"kind": "youtube#video", "videoId": "lhYWB5FFkg4"}, "channelId": "UC8dtK9njBLdFnBahHFp0eZQ", "publishedAt": "2017-04-06T13:32:39.000Z", "channelTitle": "IETF - Internet Engineering Task Force", "position": 0, "description": "Jari Arkko and Alissa Cooper recap some highlights the IETF 98 meeting held 26-31 March 2017 in Chicago, Illinois, United States"}, "kind": "youtube#playlistItem", "etag": "\"m2yskBQFythfE4irbTIeOgYYfBU/eW_De3gQF2fRzN_rPBbX-kY7oBI\"", "id": "UExDODZULTZaVFA1am82a0l1cWR5ZVlZaHNLdjlzVXdHMS40OTQ5QjlEMDgzN0FBNUIw"}, {"snippet": {"playlistId": "PLC86T-6ZTP5jo6kIuqdyeYYhsKv9sUwG1", "thumbnails": {"default": {"url": "https://i.ytimg.com/vi/lPSTcBITbvs/default.jpg", "width": 120, "height": 90}, "high": {"url": "https://i.ytimg.com/vi/lPSTcBITbvs/hqdefault.jpg", "width": 480, "height": 360}, "medium": {"url": "https://i.ytimg.com/vi/lPSTcBITbvs/mqdefault.jpg", "width": 320, "height": 180}}, "title": "IETF 98 - QUIC Tutorial", "resourceId": {"kind": "youtube#video", "videoId": "lPSTcBITbvs"}, "channelId": "UC8dtK9njBLdFnBahHFp0eZQ", "publishedAt": "2017-03-30T12:41:35.000Z", "channelTitle": "IETF - Internet Engineering Task Force", "position": 1, "description": "A tutorial about the new QUIC protocol"}, "kind": "youtube#playlistItem", "etag": "\"m2yskBQFythfE4irbTIeOgYYfBU/GhKVt6zTuEpFavgtf9GWlWuzX9s\"", "id": "UExDODZULTZaVFA1am82a0l1cWR5ZVlZaHNLdjlzVXdHMS41NkI0NEY2RDEwNTU3Q0M2"}], "kind": "youtube#playlistItemListResponse", "etag": "\"m2yskBQFythfE4irbTIeOgYYfBU/jlFue-jZVpFMOuLUXQZH4Y0Lh3Y\"", "pageInfo": {"resultsPerPage": 2, "totalResults": 110}} diff --git a/test/lib/.gitignore b/test/lib/.gitignore deleted file mode 100644 index 330b0c2b64..0000000000 --- a/test/lib/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/django diff --git a/test/lib/README b/test/lib/README deleted file mode 100644 index 2dda507f05..0000000000 --- a/test/lib/README +++ /dev/null @@ -1,9 +0,0 @@ -This directory will be used to set up packages used for testing if they need any -special handling which should not be applied to the system-wide setup. - -For instance, many of the tests to be run on the Django application should be run -with a standard Django environment; but there are some Django test features which -are broken in 0.9.6, and need patching in order to do the testing, like the ability -to create a test database to run unit tests, according to this issue and patch: -http://code.djangoproject.com/changeset/5106 - diff --git a/test/media/floor/.gitignore b/test/media/floor/.gitignore deleted file mode 100644 index 33662f5545..0000000000 --- a/test/media/floor/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/* diff --git a/test/media/photo/.gitignore b/test/media/photo/.gitignore deleted file mode 100644 index 33662f5545..0000000000 --- a/test/media/photo/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/* diff --git a/test/settings_local_test.py b/test/settings_local_test.py index 06d810e4af..7097f76459 100644 --- a/test/settings_local_test.py +++ b/test/settings_local_test.py @@ -5,6 +5,5 @@ SERVER_MODE = 'test' -IPR_DOCUMENT_PATH = '/home/ietf/adm/IPR/' SITE_ID = 1 diff --git a/vite.config.js b/vite.config.js index 41a2cb02e0..bde2b9ed57 100644 --- a/vite.config.js +++ b/vite.config.js @@ -16,7 +16,8 @@ export default defineConfig(({ command, mode }) => { main: 'client/main.js', embedded: 'client/embedded.js' } - } + }, + sourcemap: true }, cacheDir: '.vite', plugins: [ diff --git a/yarn.lock b/yarn.lock index d59df5441b..47d675d6b9 100644 --- a/yarn.lock +++ b/yarn.lock @@ -39,12 +39,12 @@ __metadata: languageName: node linkType: hard -"@babel/parser@npm:^7.20.15, @babel/parser@npm:^7.21.3": - version: 7.22.4 - resolution: "@babel/parser@npm:7.22.4" +"@babel/parser@npm:^7.23.9": + version: 7.23.9 + resolution: "@babel/parser@npm:7.23.9" bin: parser: ./bin/babel-parser.js - checksum: 0ca6d3a2d9aae2504ba1bc494704b64a83140884f7379f609de69bd39b60adb58a4f8ec692fe53fef8657dd82705d01b7e6efb65e18296326bdd66f71d52d9a9 + checksum: e7cd4960ac8671774e13803349da88d512f9292d7baa952173260d3e8f15620a28a3701f14f709d769209022f9e7b79965256b8be204fc550cfe783cdcabe7c7 languageName: node linkType: hard @@ -282,10 +282,10 @@ __metadata: languageName: node linkType: hard -"@eslint-community/regexpp@npm:^4.5.0": - version: 4.5.1 - resolution: "@eslint-community/regexpp@npm:4.5.1" - checksum: 6d901166d64998d591fab4db1c2f872981ccd5f6fe066a1ad0a93d4e11855ecae6bfb76660869a469563e8882d4307228cebd41142adb409d182f2966771e57e +"@eslint-community/regexpp@npm:^4.6.0": + version: 4.10.0 + resolution: "@eslint-community/regexpp@npm:4.10.0" + checksum: 2a6e345429ea8382aaaf3a61f865cae16ed44d31ca917910033c02dc00d505d939f10b81e079fa14d43b51499c640138e153b7e40743c4c094d9df97d4e56f7b languageName: node linkType: hard @@ -296,9 +296,9 @@ __metadata: languageName: node linkType: hard -"@eslint/eslintrc@npm:^2.1.2": - version: 2.1.2 - resolution: "@eslint/eslintrc@npm:2.1.2" +"@eslint/eslintrc@npm:^2.1.4": + version: 2.1.4 + resolution: "@eslint/eslintrc@npm:2.1.4" dependencies: ajv: ^6.12.4 debug: ^4.3.2 @@ -309,14 +309,14 @@ __metadata: js-yaml: ^4.1.0 minimatch: ^3.1.2 strip-json-comments: ^3.1.1 - checksum: bc742a1e3b361f06fedb4afb6bf32cbd27171292ef7924f61c62f2aed73048367bcc7ac68f98c06d4245cd3fabc43270f844e3c1699936d4734b3ac5398814a7 + checksum: 10957c7592b20ca0089262d8c2a8accbad14b4f6507e35416c32ee6b4dbf9cad67dfb77096bbd405405e9ada2b107f3797fe94362e1c55e0b09d6e90dd149127 languageName: node linkType: hard -"@eslint/js@npm:8.51.0": - version: 8.51.0 - resolution: "@eslint/js@npm:8.51.0" - checksum: 0228bf1e1e0414843e56d9ff362a2a72d579c078f93174666f29315690e9e30a8633ad72c923297f7fd7182381b5a476805ff04dac8debe638953eb1ded3ac73 +"@eslint/js@npm:8.57.0": + version: 8.57.0 + resolution: "@eslint/js@npm:8.57.0" + checksum: 315dc65b0e9893e2bff139bddace7ea601ad77ed47b4550e73da8c9c2d2766c7a575c3cddf17ef85b8fd6a36ff34f91729d0dcca56e73ca887c10df91a41b0bb languageName: node linkType: hard @@ -346,89 +346,89 @@ __metadata: languageName: node linkType: hard -"@fullcalendar/bootstrap5@npm:6.1.9": - version: 6.1.9 - resolution: "@fullcalendar/bootstrap5@npm:6.1.9" +"@fullcalendar/bootstrap5@npm:6.1.11": + version: 6.1.11 + resolution: "@fullcalendar/bootstrap5@npm:6.1.11" peerDependencies: - "@fullcalendar/core": ~6.1.9 - checksum: 1d6168fafcdfa294cba8de978aae3e019bf84bcd1560b6038b9da5ede7653bfbed54f8b7f6b9bdb4b34fc769f3bcc48a8b101dc1a7a8b13a91238647dd2cd331 + "@fullcalendar/core": ~6.1.11 + checksum: a0c3b9434668f0ba9b19765d13ff53bbc536ac530dc4303ed7a0812f1dafd7ed094073328cdfd58608ff00e13fe9e42f38d8314372642a14e17fb582bfb6eb24 languageName: node linkType: hard -"@fullcalendar/core@npm:6.1.9": - version: 6.1.9 - resolution: "@fullcalendar/core@npm:6.1.9" +"@fullcalendar/core@npm:6.1.11": + version: 6.1.11 + resolution: "@fullcalendar/core@npm:6.1.11" dependencies: preact: ~10.12.1 - checksum: 836db3e40cd441faab974665a454e46f8e5288d6a5fcb38193ae95c19ec3dc639c48c944a69d633ec64e73f7b6752d76f92363c3c6ca8c08dbd88d84b160ab77 + checksum: 0078a6f96b06a637de08ba28a317bbcbf7768f53ce7891faa2a656ca2bed0e887e555d6f3203b77d6c271ccb128fa85d592411fcfd87746514a5cec68376ad87 languageName: node linkType: hard -"@fullcalendar/daygrid@npm:6.1.9, @fullcalendar/daygrid@npm:~6.1.9": - version: 6.1.9 - resolution: "@fullcalendar/daygrid@npm:6.1.9" +"@fullcalendar/daygrid@npm:6.1.11, @fullcalendar/daygrid@npm:~6.1.11": + version: 6.1.11 + resolution: "@fullcalendar/daygrid@npm:6.1.11" peerDependencies: - "@fullcalendar/core": ~6.1.9 - checksum: 3db55247c4e1911462c37ff0448fcef8be132c5a0a0eada9ec00aef6c203f2d4fdffb82131566318e6eec2848b9ad6b8174b5e36c307106fa0c48071942096f4 + "@fullcalendar/core": ~6.1.11 + checksum: 6eb5606de58b7a8ec30d96618a6d15b2c0d7108c94593ff94e81a8d87ce8efb1f29f3849c6c3f2b8ae56198ffe6235e2ec0e4a1270993c022dc194016e595685 languageName: node linkType: hard -"@fullcalendar/icalendar@npm:6.1.9": - version: 6.1.9 - resolution: "@fullcalendar/icalendar@npm:6.1.9" +"@fullcalendar/icalendar@npm:6.1.11": + version: 6.1.11 + resolution: "@fullcalendar/icalendar@npm:6.1.11" peerDependencies: - "@fullcalendar/core": ~6.1.9 + "@fullcalendar/core": ~6.1.11 ical.js: ^1.4.0 - checksum: d47daf4ae0bb9ee60672c21373da042bc115762e61377c97f29dd8f9d9821e5181c80841480f81259e5d3dc0588e6adc0c6be38cc5913f11cd684e9a42939970 + checksum: 4e6eff15a81dda9d275ba555a0b4648a1410c1504694915a1669eb3c1c2299e1bce2817b78dbf33378621972bb0bc90a1d1f53515dc071b5f5abf79d10d1854a languageName: node linkType: hard -"@fullcalendar/interaction@npm:6.1.9": - version: 6.1.9 - resolution: "@fullcalendar/interaction@npm:6.1.9" +"@fullcalendar/interaction@npm:6.1.11": + version: 6.1.11 + resolution: "@fullcalendar/interaction@npm:6.1.11" peerDependencies: - "@fullcalendar/core": ~6.1.9 - checksum: 787111ea6f115a8a43b2d336bf9e20c5ea7fe6d2b71522025cdfec7682dd3207398a9321f65ef5b5af2495fb89e749136ebba23e79750112faf48fac0d05e926 + "@fullcalendar/core": ~6.1.11 + checksum: c67d4cfa0b158b848fb482835c5f44c52650037a4b912e16e2ea1955bf476c847d0ec95aea79b37b78207b2da3a7c4d2b37bd5c8b15a89bdd5e3b7ae3b7af9ba languageName: node linkType: hard -"@fullcalendar/list@npm:6.1.9": - version: 6.1.9 - resolution: "@fullcalendar/list@npm:6.1.9" +"@fullcalendar/list@npm:6.1.11": + version: 6.1.11 + resolution: "@fullcalendar/list@npm:6.1.11" peerDependencies: - "@fullcalendar/core": ~6.1.9 - checksum: 978dd54b7131369d023e4d8a0e97b986a89a986b94a0d71dc6e9782e60e6c268184f2c596dcc7fa0580b143bfd39390a40ea4c9114afd1fa2eca5c48a7b0aaab + "@fullcalendar/core": ~6.1.11 + checksum: 84a8cd6e63407e8fb95b4b2810a49c8815d9491a298a4761b9399cc8384abebf6227cc5ec93b942783f6ea6c6bcb4e94844fd5a12d73700e535f4f15ee02b7d6 languageName: node linkType: hard -"@fullcalendar/luxon3@npm:6.1.9": - version: 6.1.9 - resolution: "@fullcalendar/luxon3@npm:6.1.9" +"@fullcalendar/luxon3@npm:6.1.11": + version: 6.1.11 + resolution: "@fullcalendar/luxon3@npm:6.1.11" peerDependencies: - "@fullcalendar/core": ~6.1.9 + "@fullcalendar/core": ~6.1.11 luxon: ^3.0.0 - checksum: 25122126e290fb15400c57972552d5abe64b2c6fbc41e290ccfada2280d2aca855c53ea6b6b1efc4c2d1b5aafa97f37b71395ad2b642d3411ddd285335f9467f + checksum: 8e7f45aab2e2235b2027ca99aeabb35a91f0b2fcb608d52357abb582b4640ed8a0d7a4569ffa25628fbe04d2ee13051ec66304c6abef7cd7a364fa173db09ab7 languageName: node linkType: hard -"@fullcalendar/timegrid@npm:6.1.9": - version: 6.1.9 - resolution: "@fullcalendar/timegrid@npm:6.1.9" +"@fullcalendar/timegrid@npm:6.1.11": + version: 6.1.11 + resolution: "@fullcalendar/timegrid@npm:6.1.11" dependencies: - "@fullcalendar/daygrid": ~6.1.9 + "@fullcalendar/daygrid": ~6.1.11 peerDependencies: - "@fullcalendar/core": ~6.1.9 - checksum: 8c12a508f7c96be7897ba7496e2253efd1a3a01a8e7e7049bc5f1832fbbb04dec55d32cb41f8e2f03bc1f8d4a404508c48548a7b8f66684db36093e3ee12b412 + "@fullcalendar/core": ~6.1.11 + checksum: 4a11e6dd908e7d7f660149e6d61eff847efa14d0dcf532f8793de6b035d1a573ef7423fea0df791b6dc5f3d9792df77b72c7e6a1150289d04eca3ff9959a80ec languageName: node linkType: hard -"@fullcalendar/vue3@npm:6.1.9": - version: 6.1.9 - resolution: "@fullcalendar/vue3@npm:6.1.9" +"@fullcalendar/vue3@npm:6.1.11": + version: 6.1.11 + resolution: "@fullcalendar/vue3@npm:6.1.11" peerDependencies: - "@fullcalendar/core": ~6.1.9 + "@fullcalendar/core": ~6.1.11 vue: ^3.0.11 - checksum: 2c1c0fbe724a40af53d7f9a4409058db1cdd61a708f09fc1947d0f5d60b3eff293e3cbf6d66fbfee57cffe89008c23c536efb6f8a80038343c5f8a60ad0c4575 + checksum: 5891a596e92269151cb62feaaffdc87ac8ad55b277e8bbad435855ab872fabb2f88766b8bc0659745c5205e3550a2c8923c5fc990ade8401de2ed6a2a9c5701e languageName: node linkType: hard @@ -448,14 +448,14 @@ __metadata: languageName: node linkType: hard -"@humanwhocodes/config-array@npm:^0.11.11": - version: 0.11.11 - resolution: "@humanwhocodes/config-array@npm:0.11.11" +"@humanwhocodes/config-array@npm:^0.11.14": + version: 0.11.14 + resolution: "@humanwhocodes/config-array@npm:0.11.14" dependencies: - "@humanwhocodes/object-schema": ^1.2.1 - debug: ^4.1.1 + "@humanwhocodes/object-schema": ^2.0.2 + debug: ^4.3.1 minimatch: ^3.0.5 - checksum: db84507375ab77b8ffdd24f498a5b49ad6b64391d30dd2ac56885501d03964d29637e05b1ed5aefa09d57ac667e28028bc22d2da872bfcd619652fbdb5f4ca19 + checksum: 861ccce9eaea5de19546653bccf75bf09fe878bc39c3aab00aeee2d2a0e654516adad38dd1098aab5e3af0145bbcbf3f309bdf4d964f8dab9dcd5834ae4c02f2 languageName: node linkType: hard @@ -466,10 +466,10 @@ __metadata: languageName: node linkType: hard -"@humanwhocodes/object-schema@npm:^1.2.1": - version: 1.2.1 - resolution: "@humanwhocodes/object-schema@npm:1.2.1" - checksum: a824a1ec31591231e4bad5787641f59e9633827d0a2eaae131a288d33c9ef0290bd16fda8da6f7c0fcb014147865d12118df10db57f27f41e20da92369fcb3f1 +"@humanwhocodes/object-schema@npm:^2.0.2": + version: 2.0.2 + resolution: "@humanwhocodes/object-schema@npm:2.0.2" + checksum: 2fc11503361b5fb4f14714c700c02a3f4c7c93e9acd6b87a29f62c522d90470f364d6161b03d1cc618b979f2ae02aed1106fd29d302695d8927e2fc8165ba8ee languageName: node linkType: hard @@ -508,7 +508,7 @@ __metadata: languageName: node linkType: hard -"@jridgewell/sourcemap-codec@npm:^1.4.13": +"@jridgewell/sourcemap-codec@npm:^1.4.15": version: 1.4.15 resolution: "@jridgewell/sourcemap-codec@npm:1.4.15" checksum: b881c7e503db3fc7f3c1f35a1dd2655a188cc51a3612d76efc8a6eb74728bef5606e6758ee77423e564092b4a518aba569bbb21c9bac5ab7a35b0c6ae7e344c8 @@ -532,6 +532,20 @@ __metadata: languageName: node linkType: hard +"@kurkle/color@npm:0.3.1": + version: 0.3.1 + resolution: "@kurkle/color@npm:0.3.1" + checksum: e6be5c081bf5acfd4a1803dcd5a0733caf450e73148d5f02dc536b1ff0c60c959c23472a26c9c3c6c78ada04fb6a53c9202db9b2de8ea56f6eeec381f9cc3a1a + languageName: node + linkType: hard + +"@kurkle/color@npm:^0.3.0": + version: 0.3.4 + resolution: "@kurkle/color@npm:0.3.4" + checksum: b95c6abe0241ba1745b3c84de3b464296b95ce577110b54f46e6c6dcc9a0966491533df43812bd6c66f92cf818e385d1390b280cd5851d4afb52fc37f8a6c0b9 + languageName: node + linkType: hard + "@lezer/common@npm:^0.15.0, @lezer/common@npm:^0.15.7": version: 0.15.12 resolution: "@lezer/common@npm:0.15.12" @@ -774,31 +788,31 @@ __metadata: languageName: node linkType: hard -"@parcel/bundler-default@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/bundler-default@npm:2.10.0" +"@parcel/bundler-default@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/bundler-default@npm:2.12.0" dependencies: - "@parcel/diagnostic": 2.10.0 - "@parcel/graph": 3.0.0 - "@parcel/plugin": 2.10.0 - "@parcel/rust": 2.10.0 - "@parcel/utils": 2.10.0 + "@parcel/diagnostic": 2.12.0 + "@parcel/graph": 3.2.0 + "@parcel/plugin": 2.12.0 + "@parcel/rust": 2.12.0 + "@parcel/utils": 2.12.0 nullthrows: ^1.1.1 - checksum: 58d361992805758798f3e64c4c38a584bcb1ae1427714c4c0fa66aa6d9dbe4a85b4ea861a1ebd8a9d781f1fa73d5522027c52c524fa1deceabed929f7f1279e7 + checksum: f211a76f55dc34918715c5f1911660cfe0461a55a975929fd419a57423c97eeb4f6db9c14775fc078f6879916cef185f468a1e97077d13a76cf735dc1c885892 languageName: node linkType: hard -"@parcel/cache@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/cache@npm:2.10.0" +"@parcel/cache@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/cache@npm:2.12.0" dependencies: - "@parcel/fs": 2.10.0 - "@parcel/logger": 2.10.0 - "@parcel/utils": 2.10.0 + "@parcel/fs": 2.12.0 + "@parcel/logger": 2.12.0 + "@parcel/utils": 2.12.0 lmdb: 2.8.5 peerDependencies: - "@parcel/core": ^2.10.0 - checksum: 209d474abd5175309aaae06f4cbaebc7d1aadee260702e26268b7889d74132430c8ea52b1cd1829a98b60474856f671d26b99e698bd8a66a08d2c58d3e1ba264 + "@parcel/core": ^2.12.0 + checksum: a45e7998098c4ad31e8a55ea242b50ec638fb3d4614293cf1910a6f227ccc8e324ab56a7486d66d88a6e6d9f2a68621450e42d95dde3d1e986f4918e8f8e0912 languageName: node linkType: hard @@ -816,12 +830,12 @@ __metadata: languageName: node linkType: hard -"@parcel/codeframe@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/codeframe@npm:2.10.0" +"@parcel/codeframe@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/codeframe@npm:2.12.0" dependencies: chalk: ^4.1.0 - checksum: d87b17d3ce1c88652b3d66ee873e0578aadf90e718bcafaf16c7b39e0272cf61851530d9a3cda7482702afb9058a54f497b9e3cc706f2c36904ca0a6182ae2a1 + checksum: 265c4d7ebee57323c0ff6f28f9cbb1a4b988409a6317eddc1d98d779f3221338739513106f2247d4cd3d6f6edd642f0719e7663d6a2fd98361fdb87bc72666f0 languageName: node linkType: hard @@ -834,75 +848,75 @@ __metadata: languageName: node linkType: hard -"@parcel/compressor-raw@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/compressor-raw@npm:2.10.0" - dependencies: - "@parcel/plugin": 2.10.0 - checksum: 043fca0ecb9e574300045dce5b5a91d604e19682329a6be8e7e6c9365fa58f28415fd1a8353adcb98140395d789c3c1f764c092f717d4795df8de02b597cf57d - languageName: node - linkType: hard - -"@parcel/config-default@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/config-default@npm:2.10.0" - dependencies: - "@parcel/bundler-default": 2.10.0 - "@parcel/compressor-raw": 2.10.0 - "@parcel/namer-default": 2.10.0 - "@parcel/optimizer-css": 2.10.0 - "@parcel/optimizer-htmlnano": 2.10.0 - "@parcel/optimizer-image": 2.10.0 - "@parcel/optimizer-svgo": 2.10.0 - "@parcel/optimizer-swc": 2.10.0 - "@parcel/packager-css": 2.10.0 - "@parcel/packager-html": 2.10.0 - "@parcel/packager-js": 2.10.0 - "@parcel/packager-raw": 2.10.0 - "@parcel/packager-svg": 2.10.0 - "@parcel/packager-wasm": 2.10.0 - "@parcel/reporter-dev-server": 2.10.0 - "@parcel/resolver-default": 2.10.0 - "@parcel/runtime-browser-hmr": 2.10.0 - "@parcel/runtime-js": 2.10.0 - "@parcel/runtime-react-refresh": 2.10.0 - "@parcel/runtime-service-worker": 2.10.0 - "@parcel/transformer-babel": 2.10.0 - "@parcel/transformer-css": 2.10.0 - "@parcel/transformer-html": 2.10.0 - "@parcel/transformer-image": 2.10.0 - "@parcel/transformer-js": 2.10.0 - "@parcel/transformer-json": 2.10.0 - "@parcel/transformer-postcss": 2.10.0 - "@parcel/transformer-posthtml": 2.10.0 - "@parcel/transformer-raw": 2.10.0 - "@parcel/transformer-react-refresh-wrap": 2.10.0 - "@parcel/transformer-svg": 2.10.0 +"@parcel/compressor-raw@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/compressor-raw@npm:2.12.0" + dependencies: + "@parcel/plugin": 2.12.0 + checksum: 16c56704f33a91f7694a1a6b7ab157d731331123cbb32faf1ab09356327f7214fd2eb3c54babc120f7f41dded8742a6e58b524b5f410d3ef1bc47aaf47bc75c8 + languageName: node + linkType: hard + +"@parcel/config-default@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/config-default@npm:2.12.0" + dependencies: + "@parcel/bundler-default": 2.12.0 + "@parcel/compressor-raw": 2.12.0 + "@parcel/namer-default": 2.12.0 + "@parcel/optimizer-css": 2.12.0 + "@parcel/optimizer-htmlnano": 2.12.0 + "@parcel/optimizer-image": 2.12.0 + "@parcel/optimizer-svgo": 2.12.0 + "@parcel/optimizer-swc": 2.12.0 + "@parcel/packager-css": 2.12.0 + "@parcel/packager-html": 2.12.0 + "@parcel/packager-js": 2.12.0 + "@parcel/packager-raw": 2.12.0 + "@parcel/packager-svg": 2.12.0 + "@parcel/packager-wasm": 2.12.0 + "@parcel/reporter-dev-server": 2.12.0 + "@parcel/resolver-default": 2.12.0 + "@parcel/runtime-browser-hmr": 2.12.0 + "@parcel/runtime-js": 2.12.0 + "@parcel/runtime-react-refresh": 2.12.0 + "@parcel/runtime-service-worker": 2.12.0 + "@parcel/transformer-babel": 2.12.0 + "@parcel/transformer-css": 2.12.0 + "@parcel/transformer-html": 2.12.0 + "@parcel/transformer-image": 2.12.0 + "@parcel/transformer-js": 2.12.0 + "@parcel/transformer-json": 2.12.0 + "@parcel/transformer-postcss": 2.12.0 + "@parcel/transformer-posthtml": 2.12.0 + "@parcel/transformer-raw": 2.12.0 + "@parcel/transformer-react-refresh-wrap": 2.12.0 + "@parcel/transformer-svg": 2.12.0 peerDependencies: - "@parcel/core": ^2.10.0 - checksum: d780d05021c87d77d3aff116f4679a41e4a042f7b41c24ce114d320ecc3d9784faeed90c9ccb1c8465106e73b1174df1002218b3fa313a3f3f66d92a0bce3e8c + "@parcel/core": ^2.12.0 + checksum: 72877c5dc432d6f6a8ffe8dba1342a6c0c2f615d9346f78f654adc61b62cecb4cc425726ee7a088d86894742397b4fb25cfeee7abd1ad6cbe2cfd5d77cd5a781 languageName: node linkType: hard -"@parcel/core@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/core@npm:2.10.0" +"@parcel/core@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/core@npm:2.12.0" dependencies: "@mischnic/json-sourcemap": ^0.1.0 - "@parcel/cache": 2.10.0 - "@parcel/diagnostic": 2.10.0 - "@parcel/events": 2.10.0 - "@parcel/fs": 2.10.0 - "@parcel/graph": 3.0.0 - "@parcel/logger": 2.10.0 - "@parcel/package-manager": 2.10.0 - "@parcel/plugin": 2.10.0 - "@parcel/profiler": 2.10.0 - "@parcel/rust": 2.10.0 + "@parcel/cache": 2.12.0 + "@parcel/diagnostic": 2.12.0 + "@parcel/events": 2.12.0 + "@parcel/fs": 2.12.0 + "@parcel/graph": 3.2.0 + "@parcel/logger": 2.12.0 + "@parcel/package-manager": 2.12.0 + "@parcel/plugin": 2.12.0 + "@parcel/profiler": 2.12.0 + "@parcel/rust": 2.12.0 "@parcel/source-map": ^2.1.1 - "@parcel/types": 2.10.0 - "@parcel/utils": 2.10.0 - "@parcel/workers": 2.10.0 + "@parcel/types": 2.12.0 + "@parcel/utils": 2.12.0 + "@parcel/workers": 2.12.0 abortcontroller-polyfill: ^1.1.9 base-x: ^3.0.8 browserslist: ^4.6.6 @@ -910,10 +924,10 @@ __metadata: dotenv: ^7.0.0 dotenv-expand: ^5.1.0 json5: ^2.2.0 - msgpackr: ^1.5.4 + msgpackr: ^1.9.9 nullthrows: ^1.1.1 semver: ^7.5.2 - checksum: c59c2971ea6edc379fc166fefe60a6cbbbea0ee1c84120b7396c5b1a6bcfac2e0409647a007b38d6da27285b43d6582144f136a4b8f1652d01df589f16ffa6f0 + checksum: 5bf674630833a157867a5d0b5448cb36ab82fcabdc8f0486efbf896f6321e7b224d6e2b724cebdca2f227690a55d085bd1c89cb1430e2ebcd3583876e33cacce languageName: node linkType: hard @@ -949,13 +963,13 @@ __metadata: languageName: node linkType: hard -"@parcel/diagnostic@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/diagnostic@npm:2.10.0" +"@parcel/diagnostic@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/diagnostic@npm:2.12.0" dependencies: "@mischnic/json-sourcemap": ^0.1.0 nullthrows: ^1.1.1 - checksum: 45c606ca52316433524060b4297b0d34a1b971a94bbd5e9e282aeaac3abb3d9f0839a97a7027fa653e7b3b77269962a0db5a960ffe345fac41c6235a14e15aa1 + checksum: a4b918c1a00406de73755b5bb5c7d862c69e49e2cd1837889a85279f9e5be1f8f7b8f96e66f358e30e7dbc7a3919ebe5dafeeb9771db2b682ed9ecf60daba431 languageName: node linkType: hard @@ -969,10 +983,10 @@ __metadata: languageName: node linkType: hard -"@parcel/events@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/events@npm:2.10.0" - checksum: 1d21cd41862de2f21f2ab754b1d949fe17b062fbb8009a23d4d42f6836df7e1d37c2c8ca71304dce18d1168dedf0fcaa88c7c3eacc20611215b00700df0c9c73 +"@parcel/events@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/events@npm:2.12.0" + checksum: 136a8a2921fbc84f9228fd133eec87fbd5cde2beaf974f1aef47fab1a99f11c2919a5d7507b4fc8da81b5c00a474a4808c05b178fca9f8c0c897044d3f5ff342 languageName: node linkType: hard @@ -992,18 +1006,18 @@ __metadata: languageName: node linkType: hard -"@parcel/fs@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/fs@npm:2.10.0" +"@parcel/fs@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/fs@npm:2.12.0" dependencies: - "@parcel/rust": 2.10.0 - "@parcel/types": 2.10.0 - "@parcel/utils": 2.10.0 + "@parcel/rust": 2.12.0 + "@parcel/types": 2.12.0 + "@parcel/utils": 2.12.0 "@parcel/watcher": ^2.0.7 - "@parcel/workers": 2.10.0 + "@parcel/workers": 2.12.0 peerDependencies: - "@parcel/core": ^2.10.0 - checksum: 10faae481cf4cd0d0ae270c60e070a925f510ca7c93d208bd3c4159da4aff5e4e2e0ea61b58f3638557a469aa706037b3065b526ef207bbb4fdbe6048d04c082 + "@parcel/core": ^2.12.0 + checksum: 43d454d55da6ed14f5c422ade547485fe3d31a58a0e10c502f96dd8bb933f4402979c0ae252776d6ae83b3d0a27873390f892337a8fe78ddbc3729e531254007 languageName: node linkType: hard @@ -1032,12 +1046,12 @@ __metadata: languageName: node linkType: hard -"@parcel/graph@npm:3.0.0": - version: 3.0.0 - resolution: "@parcel/graph@npm:3.0.0" +"@parcel/graph@npm:3.2.0": + version: 3.2.0 + resolution: "@parcel/graph@npm:3.2.0" dependencies: nullthrows: ^1.1.1 - checksum: 0a9d5017f6179d3a35a9f97060d24486efe045277e831550e2885b8afc05288a2a898da3ec1f920cb89c02cce8941642b4522e67194a773d50062dadb36f4567 + checksum: b4d31624fc684aab053721b1bdcd3ba4ca465159a4253725a32393aac473eb6016fe7d1a2742f123b6b67437c8af89ee36291220dae51d807833f61ab60744f3 languageName: node linkType: hard @@ -1051,13 +1065,13 @@ __metadata: languageName: node linkType: hard -"@parcel/logger@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/logger@npm:2.10.0" +"@parcel/logger@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/logger@npm:2.12.0" dependencies: - "@parcel/diagnostic": 2.10.0 - "@parcel/events": 2.10.0 - checksum: 52d0b5331dd72778da4a2be798bb2b6d5e72ecb317cdad3a4c546807be1e30aa59b1cca6c57314438b30e62d89b578354098b939d72618ab0634c33d0261ee91 + "@parcel/diagnostic": 2.12.0 + "@parcel/events": 2.12.0 + checksum: be3fe9d9eaec60d8f2546a5f521048629b9206cd37b9863c9311fcd021b4748c57479490f1e7188a36e6eabfb42cda7d4eaf60bc11664ef9b87d164487774a23 languageName: node linkType: hard @@ -1071,12 +1085,12 @@ __metadata: languageName: node linkType: hard -"@parcel/markdown-ansi@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/markdown-ansi@npm:2.10.0" +"@parcel/markdown-ansi@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/markdown-ansi@npm:2.12.0" dependencies: chalk: ^4.1.0 - checksum: 35e2d07ec81e271144fc0f7b5f00913dce787e8a6f4a308eb59269fbc50d751cae939fedfd03e30b4aeef0c7f1210af2d1990a5551f1d39eeb80c977feb72b04 + checksum: 850ee665d934ef059d914e15d2dce601618db5d28ac700da9ac1197455135b7cb8ebe560ecae4905f2225ce37c5b5dad86fbe6210afb10d46c513b64ea6faec7 languageName: node linkType: hard @@ -1089,128 +1103,129 @@ __metadata: languageName: node linkType: hard -"@parcel/namer-default@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/namer-default@npm:2.10.0" +"@parcel/namer-default@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/namer-default@npm:2.12.0" dependencies: - "@parcel/diagnostic": 2.10.0 - "@parcel/plugin": 2.10.0 + "@parcel/diagnostic": 2.12.0 + "@parcel/plugin": 2.12.0 nullthrows: ^1.1.1 - checksum: f2a32096d1574b0c871770622d20c619c35d77da8206bc0f74c1ab1d9107001cbb60771133da80b8ca2c927d65de58e6940a049de023f5cbc6e3131b80eea3fb + checksum: dc92ec094595658aad21ec668290ee158f71a400783188292ebf00240b81c2041afda1749a1a6081a465943d03cf26a92cf549cbead95f2873450d063361677f languageName: node linkType: hard -"@parcel/node-resolver-core@npm:3.1.0": - version: 3.1.0 - resolution: "@parcel/node-resolver-core@npm:3.1.0" +"@parcel/node-resolver-core@npm:3.3.0": + version: 3.3.0 + resolution: "@parcel/node-resolver-core@npm:3.3.0" dependencies: "@mischnic/json-sourcemap": ^0.1.0 - "@parcel/diagnostic": 2.10.0 - "@parcel/fs": 2.10.0 - "@parcel/rust": 2.10.0 - "@parcel/utils": 2.10.0 + "@parcel/diagnostic": 2.12.0 + "@parcel/fs": 2.12.0 + "@parcel/rust": 2.12.0 + "@parcel/utils": 2.12.0 nullthrows: ^1.1.1 semver: ^7.5.2 - checksum: dcdd39bc6a044200fa734fbc58bba9b59c5ee2978c460b2734378adc7d47d724bdd98e942b0822305d0cfbd93bed1bb4ea37a6b8f8f7198e002890d9095f28ed + checksum: acc3721678d88b20f0bd6c90520e495a4032039332eb1155b69dc093ddb2ab7890240eb553f243f1383bd4e441c64a9870f5b5f84a2bb783b94574f859a813fd languageName: node linkType: hard -"@parcel/optimizer-css@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/optimizer-css@npm:2.10.0" +"@parcel/optimizer-css@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/optimizer-css@npm:2.12.0" dependencies: - "@parcel/diagnostic": 2.10.0 - "@parcel/plugin": 2.10.0 + "@parcel/diagnostic": 2.12.0 + "@parcel/plugin": 2.12.0 "@parcel/source-map": ^2.1.1 - "@parcel/utils": 2.10.0 + "@parcel/utils": 2.12.0 browserslist: ^4.6.6 - lightningcss: ^1.16.1 + lightningcss: ^1.22.1 nullthrows: ^1.1.1 - checksum: ea159895120b697bc2b9afd1b0706a2a769bb1714a697d0e9397ebef7753f15522d385cb3b863573fe19847a5b75b45b69d695ad4d96d6978102816425cecef6 + checksum: abcdf58c2999b53931274528ad5763a05202c65a5251b978f4989230430b5ecc620dbd6527de1a1970db80f993a6052eacea9c8b7d9d738335cce7f01a016751 languageName: node linkType: hard -"@parcel/optimizer-data-url@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/optimizer-data-url@npm:2.10.0" +"@parcel/optimizer-data-url@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/optimizer-data-url@npm:2.12.0" dependencies: - "@parcel/plugin": 2.10.0 - "@parcel/utils": 2.10.0 + "@parcel/plugin": 2.12.0 + "@parcel/utils": 2.12.0 isbinaryfile: ^4.0.2 mime: ^2.4.4 - checksum: ec9530be83335cc921ee41a2b15529d304cd16dea9373ac5e515ef247643672ad4155fab199ac003b6fb05fb0ec138a335c49bdc824f3cabcb6e49fcc8795a85 + checksum: 03972939615d2c8fddc6df223bd8f17d26b24712ed165ec60a95feda4759f3fe1b5ee25f295a02933022bbf8ef480211490c34c412e7e8e53fee7c4b970291a0 languageName: node linkType: hard -"@parcel/optimizer-htmlnano@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/optimizer-htmlnano@npm:2.10.0" +"@parcel/optimizer-htmlnano@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/optimizer-htmlnano@npm:2.12.0" dependencies: - "@parcel/plugin": 2.10.0 + "@parcel/plugin": 2.12.0 htmlnano: ^2.0.0 nullthrows: ^1.1.1 posthtml: ^0.16.5 svgo: ^2.4.0 - checksum: 1f6de13022437a49b2c9350deac8b193e950e090bc73409254dae99bbafb9f8f54b2bcb4d97e423ddf914925589f6809f7f63edc22c22e6828398fa58a17f621 + checksum: 64e571f56f959c4cf1fd724e3b50e741b57f90acf035ca5a6908cf7186c42993bfb372db9ac39f9a9dd9bd57be4bba12a527da451893547f6da27db55d63ff13 languageName: node linkType: hard -"@parcel/optimizer-image@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/optimizer-image@npm:2.10.0" +"@parcel/optimizer-image@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/optimizer-image@npm:2.12.0" dependencies: - "@parcel/diagnostic": 2.10.0 - "@parcel/plugin": 2.10.0 - "@parcel/rust": 2.10.0 - "@parcel/utils": 2.10.0 - "@parcel/workers": 2.10.0 + "@parcel/diagnostic": 2.12.0 + "@parcel/plugin": 2.12.0 + "@parcel/rust": 2.12.0 + "@parcel/utils": 2.12.0 + "@parcel/workers": 2.12.0 peerDependencies: - "@parcel/core": ^2.10.0 - checksum: 94d5db2837fd2cdefeeccd279d16f1c74dd23be6146cc42f72d87800cb6391f3e6d0d1a1b483b3b23b2c5b6820682ac13f3e8253d8391d3e1d2e5891f9259a9b + "@parcel/core": ^2.12.0 + checksum: 7d28379bf1619d6ea0c70fbfef8b6b05941ac2cc0c1de46f2639ec5c40b53a984985538dfeefd35ba20cde31778502631ace1294c9bc0bcce36607ac53c5a3a8 languageName: node linkType: hard -"@parcel/optimizer-svgo@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/optimizer-svgo@npm:2.10.0" +"@parcel/optimizer-svgo@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/optimizer-svgo@npm:2.12.0" dependencies: - "@parcel/diagnostic": 2.10.0 - "@parcel/plugin": 2.10.0 - "@parcel/utils": 2.10.0 + "@parcel/diagnostic": 2.12.0 + "@parcel/plugin": 2.12.0 + "@parcel/utils": 2.12.0 svgo: ^2.4.0 - checksum: 7201c632228e2ff6d9db681f5109a5df8239b0f33585c6648ebdd89822d46ae1bb37292cd9c5b4cd1173aa80a0850f7976b8c4317f7548b8fcc127b7bf68be04 + checksum: d3a4d2de9f77b4b084e88b611f1f431d4651f8b819122c92f9d9c1479b5936962a85bf1297e15e07823c3521dffec6083f4b1f4d962392f481dfb7b2a148e7f7 languageName: node linkType: hard -"@parcel/optimizer-swc@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/optimizer-swc@npm:2.10.0" +"@parcel/optimizer-swc@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/optimizer-swc@npm:2.12.0" dependencies: - "@parcel/diagnostic": 2.10.0 - "@parcel/plugin": 2.10.0 + "@parcel/diagnostic": 2.12.0 + "@parcel/plugin": 2.12.0 "@parcel/source-map": ^2.1.1 - "@parcel/utils": 2.10.0 + "@parcel/utils": 2.12.0 "@swc/core": ^1.3.36 nullthrows: ^1.1.1 - checksum: 1fe68ee6ff8843a33de953eddd6cf2b1061f8017677ee2f4924efcb4a24f0cac517ff4a508b899ce268cf983542b0406c8dfb9af190f30164f4ce6de6b32f9fb + checksum: 0b7fdf3df1e1fff3ed821d7e73f8cd7df4e8e96abd5b12f4e695d762d37736b24eb5bbf365f217ccb04e7a2b5807afec9af9d8c8ab7a97130d74cdc1347c3951 languageName: node linkType: hard -"@parcel/package-manager@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/package-manager@npm:2.10.0" +"@parcel/package-manager@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/package-manager@npm:2.12.0" dependencies: - "@parcel/diagnostic": 2.10.0 - "@parcel/fs": 2.10.0 - "@parcel/logger": 2.10.0 - "@parcel/node-resolver-core": 3.1.0 - "@parcel/types": 2.10.0 - "@parcel/utils": 2.10.0 - "@parcel/workers": 2.10.0 + "@parcel/diagnostic": 2.12.0 + "@parcel/fs": 2.12.0 + "@parcel/logger": 2.12.0 + "@parcel/node-resolver-core": 3.3.0 + "@parcel/types": 2.12.0 + "@parcel/utils": 2.12.0 + "@parcel/workers": 2.12.0 + "@swc/core": ^1.3.36 semver: ^7.5.2 peerDependencies: - "@parcel/core": ^2.10.0 - checksum: 7c4a95d9df4a819f2613839d30165e653182017a6d7c1b155ee46c46a921d086672daa70effba70cbc78379bb634176da254162b868b89c0e1b117079cc8c914 + "@parcel/core": ^2.12.0 + checksum: a517e9efe1330a34ead2758b2c44ac4e635450dccad87051dcc98b6090ba76f472de4de91f1de8151027397286b11e000faf4c80d34a2bc06a4c7f7bd23e97f5 languageName: node linkType: hard @@ -1231,84 +1246,85 @@ __metadata: languageName: node linkType: hard -"@parcel/packager-css@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/packager-css@npm:2.10.0" +"@parcel/packager-css@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/packager-css@npm:2.12.0" dependencies: - "@parcel/diagnostic": 2.10.0 - "@parcel/plugin": 2.10.0 + "@parcel/diagnostic": 2.12.0 + "@parcel/plugin": 2.12.0 "@parcel/source-map": ^2.1.1 - "@parcel/utils": 2.10.0 + "@parcel/utils": 2.12.0 + lightningcss: ^1.22.1 nullthrows: ^1.1.1 - checksum: 11bf4cae4cc7e0247842bfcffce952b4e240dd0d4caf1a0628faae57a7b9b651c173cc5c14c79a921d1d485d18fc56c5bcaae99ba873ca08d7e3d2582969ba9c + checksum: 684aaa1d8551e65c0af0d44905f1c08f1c0247d05b1af224abaf5007e197e12facb2b511bf2eee66c432613f31e04753d94dd23773c08fe77eb0f2b8ee41799f languageName: node linkType: hard -"@parcel/packager-html@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/packager-html@npm:2.10.0" +"@parcel/packager-html@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/packager-html@npm:2.12.0" dependencies: - "@parcel/plugin": 2.10.0 - "@parcel/types": 2.10.0 - "@parcel/utils": 2.10.0 + "@parcel/plugin": 2.12.0 + "@parcel/types": 2.12.0 + "@parcel/utils": 2.12.0 nullthrows: ^1.1.1 posthtml: ^0.16.5 - checksum: 8dfd86e7d68417d52625817532b9f1aa7a68686345379b339c00e0d079a7922eaf5c23db2661602e80e8a3207090b5f7d1fed20b7ec5b6811c830ba9467c9060 + checksum: ee558ad616a21b94781a922c7ac8ee6da831cc8f7c4e4642a43027ce6df32ea93f4addabf573b9a955f4aa5cc5462bf8a42fc33809fab68249044e4ab2900a14 languageName: node linkType: hard -"@parcel/packager-js@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/packager-js@npm:2.10.0" +"@parcel/packager-js@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/packager-js@npm:2.12.0" dependencies: - "@parcel/diagnostic": 2.10.0 - "@parcel/plugin": 2.10.0 - "@parcel/rust": 2.10.0 + "@parcel/diagnostic": 2.12.0 + "@parcel/plugin": 2.12.0 + "@parcel/rust": 2.12.0 "@parcel/source-map": ^2.1.1 - "@parcel/types": 2.10.0 - "@parcel/utils": 2.10.0 + "@parcel/types": 2.12.0 + "@parcel/utils": 2.12.0 globals: ^13.2.0 nullthrows: ^1.1.1 - checksum: 9b625988640da09195f9032184091c6298aa5a80aaa1eafc0ac6ab1856b5d74a453ecca79d494b8548dd74a4356e92e0675f1c69be2fe70351541759f8bcfc03 + checksum: 2189b7ff152ddb80739f65f5dffbcce12dbaeb9c8ef5b702e0c253c9b57e390f055b46e8874017b43313b67cfb4e89675a49854a844fcbed6bd1f7885e193cd5 languageName: node linkType: hard -"@parcel/packager-raw@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/packager-raw@npm:2.10.0" +"@parcel/packager-raw@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/packager-raw@npm:2.12.0" dependencies: - "@parcel/plugin": 2.10.0 - checksum: 492fe07ae58e55b6fb492cccd12a0eb6733134ae93c1381a516c0e24b1e11c9f1fa0e88dc626f30c07100adaf2b6c1d299255734e9becaebe8e4bcdc8c118074 + "@parcel/plugin": 2.12.0 + checksum: 39ce2fc7aede5b81be4bcd1939c49d9166250bedf8c408687c9a125154cc4fcfcd7181e38faa3137817144f75f070c5eaa40472f68ec0aaa9bd2a070674a1093 languageName: node linkType: hard -"@parcel/packager-svg@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/packager-svg@npm:2.10.0" +"@parcel/packager-svg@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/packager-svg@npm:2.12.0" dependencies: - "@parcel/plugin": 2.10.0 - "@parcel/types": 2.10.0 - "@parcel/utils": 2.10.0 + "@parcel/plugin": 2.12.0 + "@parcel/types": 2.12.0 + "@parcel/utils": 2.12.0 posthtml: ^0.16.4 - checksum: f49d7f3b88ceded019609e4c1f0747325540eecec334dc134ab31d1ec0f1799affbe9f254b7817df8dd0b4afd88023fff7781f9a07c8439a2de8b1403c07034c + checksum: 436ac9ea3988ed79e637f6c8990f5f3de75816edc912d26388deeee94ef49b782ced25f427e15b4e721c9e25da6e90ca19f1efd85c3a8aedb1850cb293250b9f languageName: node linkType: hard -"@parcel/packager-wasm@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/packager-wasm@npm:2.10.0" +"@parcel/packager-wasm@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/packager-wasm@npm:2.12.0" dependencies: - "@parcel/plugin": 2.10.0 - checksum: d9a13eb838b6bcaa1dd58af6b58f40daa4d3634a28b0b259983e8d8fb7e83f5cb8be3441135bd2ce55a7a459d818ddb77bee44cc52c8fb5169da215ca85341cd + "@parcel/plugin": 2.12.0 + checksum: a10e1cd9885a48ad1153b2ca83ef3c852f4a2ed48c67df4f1677da8660878faa1ee3d9da16f0b820f33d17f9181d845d6038f0ea3470c937f973fbe2dd3b86b6 languageName: node linkType: hard -"@parcel/plugin@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/plugin@npm:2.10.0" +"@parcel/plugin@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/plugin@npm:2.12.0" dependencies: - "@parcel/types": 2.10.0 - checksum: e13ba6e7e521078e7755ecdf906c0f1d8fe63efaec26bb66e2f0df22417e755bbced6dbbdea50698ad27944ed1a16944c143eb08c79e4118ea62bc7c60f62b5f + "@parcel/types": 2.12.0 + checksum: 0b52f1dd0675ea4f597a3f882f47434b7c5dabc997a875d07f1cf178e37adc927ed86e084502030a04ac6c9b548152741dfdeb8b6d730f7d8af2bfe3465a77d3 languageName: node linkType: hard @@ -1321,111 +1337,111 @@ __metadata: languageName: node linkType: hard -"@parcel/profiler@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/profiler@npm:2.10.0" +"@parcel/profiler@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/profiler@npm:2.12.0" dependencies: - "@parcel/diagnostic": 2.10.0 - "@parcel/events": 2.10.0 + "@parcel/diagnostic": 2.12.0 + "@parcel/events": 2.12.0 chrome-trace-event: ^1.0.2 - checksum: 78d545edb76d72f962769df8964a3a19fe3d0b2b8c47a929eeafd9d71b3f00f2733428a649804057fce153a7fb71fa8bfdd25cc3102b32ea542b69af35f7ce4a + checksum: b683b74e10ca469d34588e6a15fe5abbeae66f844c75eaf8aaa588912c41f3668bcff087f6c4ff931a861731443f3addf5a16cfad644827e1daa89e020cf0fb3 languageName: node linkType: hard -"@parcel/reporter-cli@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/reporter-cli@npm:2.10.0" +"@parcel/reporter-cli@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/reporter-cli@npm:2.12.0" dependencies: - "@parcel/plugin": 2.10.0 - "@parcel/types": 2.10.0 - "@parcel/utils": 2.10.0 + "@parcel/plugin": 2.12.0 + "@parcel/types": 2.12.0 + "@parcel/utils": 2.12.0 chalk: ^4.1.0 term-size: ^2.2.1 - checksum: 0137a91e459121207f83109d68e555124170e2c585d7cb9e685380b753fffee68e4903b9e9e79be8e42ed85b724460989031154bcdcc25845f3bef945a8e0a74 + checksum: 8cc524fa155fa0b9cf0f084cdc184f8cacdaf439d4ac7a74cf431ab9a2a6d0f6c238563efa30e3d49da01e78b61c31a81879c510bb05d44c226e7fcde553994d languageName: node linkType: hard -"@parcel/reporter-dev-server@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/reporter-dev-server@npm:2.10.0" +"@parcel/reporter-dev-server@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/reporter-dev-server@npm:2.12.0" dependencies: - "@parcel/plugin": 2.10.0 - "@parcel/utils": 2.10.0 - checksum: e72fd6ec095e36fb3c758b5a043c4c2038c464dad8528b12f57df83f19d6c138ce045c5897c88fd307fd03a2ffb0c53a1a799aec9e74e7de82261e32c031ff89 + "@parcel/plugin": 2.12.0 + "@parcel/utils": 2.12.0 + checksum: 43957b4656442f4609f29a74cd07b1c358dba263faa622c18841dbd4065e251a959b1e2675de45cf0e42f17a52f27594d4ae83f86e30b59e53f143ce6fe13c52 languageName: node linkType: hard -"@parcel/reporter-tracer@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/reporter-tracer@npm:2.10.0" +"@parcel/reporter-tracer@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/reporter-tracer@npm:2.12.0" dependencies: - "@parcel/plugin": 2.10.0 - "@parcel/utils": 2.10.0 + "@parcel/plugin": 2.12.0 + "@parcel/utils": 2.12.0 chrome-trace-event: ^1.0.3 nullthrows: ^1.1.1 - checksum: 0f8249b9983943ba1ee47e0b63366495da3c4af80f70de40fe8151e15ea285ac727f122988079cc24fe34ad43f2106a48b8bad4ad99fc8c96b031de6d31a90d2 + checksum: 24cddacd19f2f5dfde30133fbc1d484666a59cc384013a81e7eb1ba8517ad362e0f92d81e7b42f909657eb4df0d7519a3ed51e0de36a9f3f7c9a3b703054a20f languageName: node linkType: hard -"@parcel/resolver-default@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/resolver-default@npm:2.10.0" +"@parcel/resolver-default@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/resolver-default@npm:2.12.0" dependencies: - "@parcel/node-resolver-core": 3.1.0 - "@parcel/plugin": 2.10.0 - checksum: c82e2d3c4b4870178845df6b46cd4ac41114b4b256c13d670de61cbc43fa7c0f76f7e2ee5043c0ace205bb76ba2e2886e49e6df15c716e271c72435620885257 + "@parcel/node-resolver-core": 3.3.0 + "@parcel/plugin": 2.12.0 + checksum: f3652eea094151f8a820c0214251209c625ac80ecc086b1869893a14620ad9b6bc86d65496a7687929484ade6db61e375647811d23a114509b4a16e7caf40408 languageName: node linkType: hard -"@parcel/runtime-browser-hmr@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/runtime-browser-hmr@npm:2.10.0" +"@parcel/runtime-browser-hmr@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/runtime-browser-hmr@npm:2.12.0" dependencies: - "@parcel/plugin": 2.10.0 - "@parcel/utils": 2.10.0 - checksum: 12928462c812253a6ceb2338e7f3a1db1745f2102060804b144854c1788dd48fb753b2c5ce8ad59027ad44aa7aad803d0521f9823cf7c3bf913715157a35849e + "@parcel/plugin": 2.12.0 + "@parcel/utils": 2.12.0 + checksum: bbba57ecee5668fe2316fc8961f559d2c9296f05fb0feee002dfc1010aa1f2bc4a4ae2ab7778f132ed793e3ebcae05c558552ff86871b37ed25bfab572499191 languageName: node linkType: hard -"@parcel/runtime-js@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/runtime-js@npm:2.10.0" +"@parcel/runtime-js@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/runtime-js@npm:2.12.0" dependencies: - "@parcel/diagnostic": 2.10.0 - "@parcel/plugin": 2.10.0 - "@parcel/utils": 2.10.0 + "@parcel/diagnostic": 2.12.0 + "@parcel/plugin": 2.12.0 + "@parcel/utils": 2.12.0 nullthrows: ^1.1.1 - checksum: 3bbd64c5b9fb64387497a6c5d310fb9216ad218f4b1cf74c9d8cb1325335903bd45582942ae7b54bd08a81d8e871af6ac8d598c311a05cdd2ecd7ce39ead9a31 + checksum: 6afa3e7eb27c11b4fdb2236d3f2e3f07c284927217b5811ebb0d73cd24dfdc8718a6bbb6f43be0d86bb9473f0493bc207d35ce25beaa1ba384b3141ced7ff3bc languageName: node linkType: hard -"@parcel/runtime-react-refresh@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/runtime-react-refresh@npm:2.10.0" +"@parcel/runtime-react-refresh@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/runtime-react-refresh@npm:2.12.0" dependencies: - "@parcel/plugin": 2.10.0 - "@parcel/utils": 2.10.0 + "@parcel/plugin": 2.12.0 + "@parcel/utils": 2.12.0 react-error-overlay: 6.0.9 react-refresh: ^0.9.0 - checksum: dc567474a154a73c90041b51b1122d9cb8c677ae2ebdacf534325ab3cf18b506aa6477b8c4005438a6b86c2bac88025d78407d84b5904fe035b2191d829ac98a + checksum: 41aee9a87484575b67dcce07d676a4e26bf0bb79ddea5328ef4a8d729a74da29f0c625b0a7a479c5086e5c79e4616e89034138aad3c97a6db2cf059f1a19d1c9 languageName: node linkType: hard -"@parcel/runtime-service-worker@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/runtime-service-worker@npm:2.10.0" +"@parcel/runtime-service-worker@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/runtime-service-worker@npm:2.12.0" dependencies: - "@parcel/plugin": 2.10.0 - "@parcel/utils": 2.10.0 + "@parcel/plugin": 2.12.0 + "@parcel/utils": 2.12.0 nullthrows: ^1.1.1 - checksum: d0bfd113b9f26dca4e0936c316d25c3e774e13442f738812c8e20663e4c7ea766f3940ba11ab4d163cad2ad8ad1c2a1292c72ed162de0a34a64a07b44a4baa23 + checksum: c71246428e1ba69649fe4ecc1ed272f34fb52ff14f364c159e6f979332bb1280483b4eb7633bfe3ab3b3d7c381b524f669e356d9705ba4764bc149977e965c53 languageName: node linkType: hard -"@parcel/rust@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/rust@npm:2.10.0" - checksum: 466a78d27db445780593a6a5a1ce39406daabf3661b81aa8a714ba647d0e3b08532a23b86508a7e8b9cb95f8aa4f755c6e33148ba961f173283b96b72cc51ee6 +"@parcel/rust@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/rust@npm:2.12.0" + checksum: 51c5b67b9ee83e12d544774dad705d500dda52948f65cdb6c7bfa4275a9692561aa141c68be9c8fd29a8cd795a1fe4f3537bc2f1f91a80163d0bb5a0bd223ad0 languageName: node linkType: hard @@ -1447,198 +1463,198 @@ __metadata: languageName: node linkType: hard -"@parcel/transformer-babel@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/transformer-babel@npm:2.10.0" +"@parcel/transformer-babel@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/transformer-babel@npm:2.12.0" dependencies: - "@parcel/diagnostic": 2.10.0 - "@parcel/plugin": 2.10.0 + "@parcel/diagnostic": 2.12.0 + "@parcel/plugin": 2.12.0 "@parcel/source-map": ^2.1.1 - "@parcel/utils": 2.10.0 + "@parcel/utils": 2.12.0 browserslist: ^4.6.6 json5: ^2.2.0 nullthrows: ^1.1.1 semver: ^7.5.2 - checksum: fd64092c9cb9d4ecc78cefc02720154a39681a56365173bc8fab82942f5b7236e38411ed288a71202717a5f97599727e101cba08db1cb3d0eaadbf383b0576e3 + checksum: b8c457c0be7662d8262671469fa7e7cc69dcf72e67a7abeadfd41a71c193f10eae857e1ea6d5db9842cd3f471f9b299c5d716c99dbc0929d537c7d050a995e6e languageName: node linkType: hard -"@parcel/transformer-css@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/transformer-css@npm:2.10.0" +"@parcel/transformer-css@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/transformer-css@npm:2.12.0" dependencies: - "@parcel/diagnostic": 2.10.0 - "@parcel/plugin": 2.10.0 + "@parcel/diagnostic": 2.12.0 + "@parcel/plugin": 2.12.0 "@parcel/source-map": ^2.1.1 - "@parcel/utils": 2.10.0 + "@parcel/utils": 2.12.0 browserslist: ^4.6.6 - lightningcss: ^1.16.1 + lightningcss: ^1.22.1 nullthrows: ^1.1.1 - checksum: acc26e9b3df28b45e89dcb71c6e29ab6c8507e44fde1d7f2b3aa6b8230dc17fbd043a4e2afd33b6cadd284842fd2582d937b16c4f43f10568fba839ce520ef94 + checksum: 3a6f16321d4759b17e13db8953c43cf9ed00aad8ef4354bea04647be60c0b6d36c8a28765a78c79038cbcbb2b32e9cc955f8bc6bddf0e59aa30cae6b89f8a8e9 languageName: node linkType: hard -"@parcel/transformer-html@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/transformer-html@npm:2.10.0" +"@parcel/transformer-html@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/transformer-html@npm:2.12.0" dependencies: - "@parcel/diagnostic": 2.10.0 - "@parcel/plugin": 2.10.0 - "@parcel/rust": 2.10.0 + "@parcel/diagnostic": 2.12.0 + "@parcel/plugin": 2.12.0 + "@parcel/rust": 2.12.0 nullthrows: ^1.1.1 posthtml: ^0.16.5 posthtml-parser: ^0.10.1 posthtml-render: ^3.0.0 semver: ^7.5.2 srcset: 4 - checksum: f28e0d36063a1213449b3ae988d984fb8fe03e6b34f005dcf5a4f6578346d9780b99023d42c7834cc4733267b68ebc2df9d372588d3d3949e19e60d52fbb70a9 + checksum: 7fcfac62ca73f239b1a4a4b049c1ef5eb6831a625e873a784c51c9f28957f7c8c7d5f8e86b8e98b9f8a0f7c8f27c3782f5a620931e96c400a0e6e9c203a200bb languageName: node linkType: hard -"@parcel/transformer-image@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/transformer-image@npm:2.10.0" +"@parcel/transformer-image@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/transformer-image@npm:2.12.0" dependencies: - "@parcel/plugin": 2.10.0 - "@parcel/utils": 2.10.0 - "@parcel/workers": 2.10.0 + "@parcel/plugin": 2.12.0 + "@parcel/utils": 2.12.0 + "@parcel/workers": 2.12.0 nullthrows: ^1.1.1 peerDependencies: - "@parcel/core": ^2.10.0 - checksum: 61a47d7d8e97a1874adf15f92410715c95a1b3747e0466299cb1a28cc1835855c758d7c3ebe40389b651bc1d7de2e942d0747e2787b63db5b6fff62803f6f900 + "@parcel/core": ^2.12.0 + checksum: 0a1581eaccd9c26fbc83da6b576c2b3dc07080d694744b6224ed35a8d77d30a2c3231061f67700281e6963f8a0d23d67f67c73553ea5b94ebfbbbc9c34f60ba3 languageName: node linkType: hard -"@parcel/transformer-inline-string@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/transformer-inline-string@npm:2.10.0" +"@parcel/transformer-inline-string@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/transformer-inline-string@npm:2.12.0" dependencies: - "@parcel/plugin": 2.10.0 - checksum: 618c919108256488d17afd2155faf83989fdba8c7061be2dfa8895721670c9192e16a63b0b98a3660b931c5077bc0518265372a973ecc0f757e9139949962cec + "@parcel/plugin": 2.12.0 + checksum: 5f63c086956b64cf67ca006efe99048e2b2ce7d23df7703d2709da1971f391f62620dc9186ae604d00918345718656af438f3d681a312fbdcc05fd0477499c83 languageName: node linkType: hard -"@parcel/transformer-js@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/transformer-js@npm:2.10.0" +"@parcel/transformer-js@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/transformer-js@npm:2.12.0" dependencies: - "@parcel/diagnostic": 2.10.0 - "@parcel/plugin": 2.10.0 - "@parcel/rust": 2.10.0 + "@parcel/diagnostic": 2.12.0 + "@parcel/plugin": 2.12.0 + "@parcel/rust": 2.12.0 "@parcel/source-map": ^2.1.1 - "@parcel/utils": 2.10.0 - "@parcel/workers": 2.10.0 + "@parcel/utils": 2.12.0 + "@parcel/workers": 2.12.0 "@swc/helpers": ^0.5.0 browserslist: ^4.6.6 nullthrows: ^1.1.1 regenerator-runtime: ^0.13.7 semver: ^7.5.2 peerDependencies: - "@parcel/core": ^2.10.0 - checksum: e9944ce77cfa062719f1a352bc873fc3dd2afe87baec024752f6da92714b591ef16b38d9f8c1eeeaad2a44e0d173c401005f4d11326deb236c08499f72519ae8 + "@parcel/core": ^2.12.0 + checksum: b9fe4c887b08d5032a2dc87e529dbcf19b75e1274d6fcbe5e7e8d92bae0186c063e93b93747e49eb67763c29232f1b2411f237c64d5af782d2f6ff663f98a9fd languageName: node linkType: hard -"@parcel/transformer-json@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/transformer-json@npm:2.10.0" +"@parcel/transformer-json@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/transformer-json@npm:2.12.0" dependencies: - "@parcel/plugin": 2.10.0 + "@parcel/plugin": 2.12.0 json5: ^2.2.0 - checksum: 9c7aceb8e6372035ebd49dfd16f656f8c631c80c6750a2a2a51444e55013caa2d0bdef6b868a42c149c0500c988440ddf3f4a9a565dd15bf2a1c80571e9f21e9 + checksum: a711cb65a8bfa4bcffcced0a8ecc91c4e4ddc65d77d2328a7ca8800170f2fa4e6316df06ad55816c65852f45092bcb4e42f8125d179d3223abe4d0650306c134 languageName: node linkType: hard -"@parcel/transformer-postcss@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/transformer-postcss@npm:2.10.0" +"@parcel/transformer-postcss@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/transformer-postcss@npm:2.12.0" dependencies: - "@parcel/diagnostic": 2.10.0 - "@parcel/plugin": 2.10.0 - "@parcel/rust": 2.10.0 - "@parcel/utils": 2.10.0 + "@parcel/diagnostic": 2.12.0 + "@parcel/plugin": 2.12.0 + "@parcel/rust": 2.12.0 + "@parcel/utils": 2.12.0 clone: ^2.1.1 nullthrows: ^1.1.1 postcss-value-parser: ^4.2.0 semver: ^7.5.2 - checksum: 2e524bd513ca7f8f4ad6904c0a8c8fdd3e1092872a2e7ced7ba26b91d76e723ada6c3f2ae537a4b04d611a00babafd8b780fa43cde2d97b30b4e6dbb5ed942a6 + checksum: b210044a7f13078ed5acf1d02c0169f1daab3e5134de5cfb4aa4900c70a0e19b7cef08e1f03793a1e9af6e625b0ae0b0598803cfa8338e13ba6e8cc792fbba0b languageName: node linkType: hard -"@parcel/transformer-posthtml@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/transformer-posthtml@npm:2.10.0" +"@parcel/transformer-posthtml@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/transformer-posthtml@npm:2.12.0" dependencies: - "@parcel/plugin": 2.10.0 - "@parcel/utils": 2.10.0 + "@parcel/plugin": 2.12.0 + "@parcel/utils": 2.12.0 nullthrows: ^1.1.1 posthtml: ^0.16.5 posthtml-parser: ^0.10.1 posthtml-render: ^3.0.0 semver: ^7.5.2 - checksum: 7de343f0f9ae1a1686e0a3ffdbb64d37edecd8704441efda1c9b999dbc8dd3c7feef97361ba785588521f13cf025db6ca9ded765fb69267b12bbd6f2a0c64cf8 + checksum: b62582ae7e0af9e3fbca8baf589261548c994c8fbfa45ca57901faa1a1cf23122035784a92688fdad9f8b626d26d877f3f465bb5799d56eef264314ecfe74b1d languageName: node linkType: hard -"@parcel/transformer-raw@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/transformer-raw@npm:2.10.0" +"@parcel/transformer-raw@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/transformer-raw@npm:2.12.0" dependencies: - "@parcel/plugin": 2.10.0 - checksum: c7b1b9c6f7fbcfc51dd7360ae9551fa4611ef96990df954e057eeeb44f3915c84da7bb6bea10eb4e246ec4e217c387db8cd8fe3fbf999bf0bfb2e91979092dc3 + "@parcel/plugin": 2.12.0 + checksum: de6681e2e723d9877f3e2fd3c4983ac4de8ecae26f5d0c51ce6d231bd29d644f86db9558426cd69adfdbb89edd824c08ef92ada09aaceaa66dd1f44d1c027d60 languageName: node linkType: hard -"@parcel/transformer-react-refresh-wrap@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/transformer-react-refresh-wrap@npm:2.10.0" +"@parcel/transformer-react-refresh-wrap@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/transformer-react-refresh-wrap@npm:2.12.0" dependencies: - "@parcel/plugin": 2.10.0 - "@parcel/utils": 2.10.0 + "@parcel/plugin": 2.12.0 + "@parcel/utils": 2.12.0 react-refresh: ^0.9.0 - checksum: fc3163bcb03a16a0581d23f2373d87b2b9d10dc49e48297b60a87b5a555ba0cd7e4d3ab1787cac0974c2871ddd9ada0fa0551685037cf4dce518d96e1a371917 + checksum: 9aba8c1ab0e7a3dc4da735f093b38e6bcda04385b5ba3373d2b2d09f8099c5dd40493d4b77ca697f499d8d204b6288fd1a5dc1b6c717041d612dcdc501908937 languageName: node linkType: hard -"@parcel/transformer-sass@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/transformer-sass@npm:2.10.0" +"@parcel/transformer-sass@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/transformer-sass@npm:2.12.0" dependencies: - "@parcel/plugin": 2.10.0 + "@parcel/plugin": 2.12.0 "@parcel/source-map": ^2.1.1 sass: ^1.38.0 - checksum: 2d697077ac5b74083bca9b071c1677a6bb50b9f41f2b0f3ec381aef3c7ee3f284dc3b4e26c7a50d237a49669534bf05cf704e1c90d2ea5e8b59c62c52a645aa6 + checksum: ce6b4d329b60dd4766a47b064cb10d18406ce569488b7f7c6fe561e9180786b813194935d9679bbe4b9afa43877a034d57a4b61e1166a2801af3889196a1e3d8 languageName: node linkType: hard -"@parcel/transformer-svg@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/transformer-svg@npm:2.10.0" +"@parcel/transformer-svg@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/transformer-svg@npm:2.12.0" dependencies: - "@parcel/diagnostic": 2.10.0 - "@parcel/plugin": 2.10.0 - "@parcel/rust": 2.10.0 + "@parcel/diagnostic": 2.12.0 + "@parcel/plugin": 2.12.0 + "@parcel/rust": 2.12.0 nullthrows: ^1.1.1 posthtml: ^0.16.5 posthtml-parser: ^0.10.1 posthtml-render: ^3.0.0 semver: ^7.5.2 - checksum: d5f55f6eee03053e2ede02cf68534fa3cb579b3d45dc121ce60cbd2de37fd0ec729b7bb536dad923c0f71f2ad0c02938fff887c26741e1f4ac8aaa9fc2e9944e + checksum: 92b7c6589477e93f8ded857924dee82c498a83641c03b1ce3f836219ca3e8e543b9281128f8647529e561eb5212a1f173d2cb1a1eed5d7cc9487b782db82158c languageName: node linkType: hard -"@parcel/types@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/types@npm:2.10.0" +"@parcel/types@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/types@npm:2.12.0" dependencies: - "@parcel/cache": 2.10.0 - "@parcel/diagnostic": 2.10.0 - "@parcel/fs": 2.10.0 - "@parcel/package-manager": 2.10.0 + "@parcel/cache": 2.12.0 + "@parcel/diagnostic": 2.12.0 + "@parcel/fs": 2.12.0 + "@parcel/package-manager": 2.12.0 "@parcel/source-map": ^2.1.1 - "@parcel/workers": 2.10.0 + "@parcel/workers": 2.12.0 utility-types: ^3.10.0 - checksum: 387aa079020ffec27f92d9496e0abd5e71e4b8e0ca1c9cf5b692737dd172d037302dd2c9d3cc32143813707050eb9de23e4e4eca32a65c402f6564a08da61e6f + checksum: 250f95580cd441ee9c5178d65088da9eb105d4b300b753fb6c4b54383e8fa6272eb6273ff45cd223c7eb02fefdee17a18997116f1da26b9a24455c51a8aaf6b2 languageName: node linkType: hard @@ -1657,19 +1673,19 @@ __metadata: languageName: node linkType: hard -"@parcel/utils@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/utils@npm:2.10.0" +"@parcel/utils@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/utils@npm:2.12.0" dependencies: - "@parcel/codeframe": 2.10.0 - "@parcel/diagnostic": 2.10.0 - "@parcel/logger": 2.10.0 - "@parcel/markdown-ansi": 2.10.0 - "@parcel/rust": 2.10.0 + "@parcel/codeframe": 2.12.0 + "@parcel/diagnostic": 2.12.0 + "@parcel/logger": 2.12.0 + "@parcel/markdown-ansi": 2.12.0 + "@parcel/rust": 2.12.0 "@parcel/source-map": ^2.1.1 chalk: ^4.1.0 nullthrows: ^1.1.1 - checksum: 9f4953ff9af730b59abbaa6f5f0c452a26848fc52d6f04a0facdc9ebfabb4434fd548ae38a7f882a4a5cd2db5a0b389b540fe7d2ffc6c286323062a764d2cc43 + checksum: ba80a60fed98c572a4e1dc81f87e0d63fc570221f6759e980b04eff88d3c92a83411a787a08da2720a7e541e52cc6890b1122f59ad7f4fc444f9dbfa8beba818 languageName: node linkType: hard @@ -1710,19 +1726,19 @@ __metadata: languageName: node linkType: hard -"@parcel/workers@npm:2.10.0": - version: 2.10.0 - resolution: "@parcel/workers@npm:2.10.0" +"@parcel/workers@npm:2.12.0": + version: 2.12.0 + resolution: "@parcel/workers@npm:2.12.0" dependencies: - "@parcel/diagnostic": 2.10.0 - "@parcel/logger": 2.10.0 - "@parcel/profiler": 2.10.0 - "@parcel/types": 2.10.0 - "@parcel/utils": 2.10.0 + "@parcel/diagnostic": 2.12.0 + "@parcel/logger": 2.12.0 + "@parcel/profiler": 2.12.0 + "@parcel/types": 2.12.0 + "@parcel/utils": 2.12.0 nullthrows: ^1.1.1 peerDependencies: - "@parcel/core": ^2.10.0 - checksum: e8b1701b53b2f3e913eee98e1e16def38a67d08b4835d25e1dd610505edc2f38c23729ec9b264ccd3c6ea3221814a314b657a3a3c41feebbdb01dcc34e69741c + "@parcel/core": ^2.12.0 + checksum: e19c3c0a6651a9cef760aca3210356cff36c29d1472b544bec298bc4ffa9aa7429749cf6ce0b1009d034d8a086412833e3af48b3a88f95bb1700e09a8e62ca2f languageName: node linkType: hard @@ -1763,9 +1779,9 @@ __metadata: languageName: node linkType: hard -"@rollup/pluginutils@npm:5.0.5": - version: 5.0.5 - resolution: "@rollup/pluginutils@npm:5.0.5" +"@rollup/pluginutils@npm:5.1.0": + version: 5.1.0 + resolution: "@rollup/pluginutils@npm:5.1.0" dependencies: "@types/estree": ^1.0.0 estree-walker: ^2.0.2 @@ -1775,19 +1791,19 @@ __metadata: peerDependenciesMeta: rollup: optional: true - checksum: dcd4d6e3cb6047f18c465a5f2bcd29995c565f083fb6ca5505bcf2018ae0c16634fd38d99538fbb7dcef4e1b491cf4b4465f8845b5666778a925a27e9202dbab + checksum: 3cc5a6d91452a6eabbfd1ae79b4dd1f1e809d2eecda6e175deb784e75b0911f47e9ecce73f8dd315d6a8b3f362582c91d3c0f66908b6ced69345b3cbe28f8ce8 languageName: node linkType: hard -"@sidvind/better-ajv-errors@npm:^2.0.0": - version: 2.0.0 - resolution: "@sidvind/better-ajv-errors@npm:2.0.0" +"@sidvind/better-ajv-errors@npm:2.1.3": + version: 2.1.3 + resolution: "@sidvind/better-ajv-errors@npm:2.1.3" dependencies: "@babel/code-frame": ^7.16.0 chalk: ^4.1.0 peerDependencies: ajv: 4.11.8 - 8 - checksum: 12b0d87855737d1f36e869f9a55c706fa9eb232dfebaca5209c21d8d5caf7fa25671238064d68e619492c68591a73b6f49fcc1dff182fd77d2370e277033bd60 + checksum: 949cb805a130a61c00895231aa33c1c9e51b72ae21bd59fe088fc9671b0e921b99183d816d34a02fe5d07647477c570a92d7d327c5e99670605e92b0d2ef163b languageName: node linkType: hard @@ -1942,6 +1958,13 @@ __metadata: languageName: node linkType: hard +"@types/hammerjs@npm:^2.0.45": + version: 2.0.46 + resolution: "@types/hammerjs@npm:2.0.46" + checksum: caba6ec788d19905c71092670b58514b3d1f5eee5382bf9205e8df688d51e7857b7994e2dd7aed57fac8977bdf0e456d67fbaf23440a4385b8ce25fe2af1ec39 + languageName: node + linkType: hard + "@types/istanbul-lib-coverage@npm:^2.0.1": version: 2.0.4 resolution: "@types/istanbul-lib-coverage@npm:2.0.4" @@ -2000,63 +2023,107 @@ __metadata: languageName: node linkType: hard -"@vitejs/plugin-vue@npm:4.4.0": - version: 4.4.0 - resolution: "@vitejs/plugin-vue@npm:4.4.0" +"@ungap/structured-clone@npm:^1.2.0": + version: 1.2.0 + resolution: "@ungap/structured-clone@npm:1.2.0" + checksum: 4f656b7b4672f2ce6e272f2427d8b0824ed11546a601d8d5412b9d7704e83db38a8d9f402ecdf2b9063fc164af842ad0ec4a55819f621ed7e7ea4d1efcc74524 + languageName: node + linkType: hard + +"@vitejs/plugin-vue@npm:4.6.2": + version: 4.6.2 + resolution: "@vitejs/plugin-vue@npm:4.6.2" peerDependencies: - vite: ^4.0.0 + vite: ^4.0.0 || ^5.0.0 vue: ^3.2.25 - checksum: 37b6987951f2e6fac0d2b7bad58aa4392142c1f325b7d189865426dbf97ee6c545aa489f952fa16d2f422adce5b6c9977785f398868c5d2f5333e43e361a0e0b + checksum: 01bc4ed64319444f7dcad89f2c8da209f2a2fae1b7b9308c5f8593b5a307287d23178e7b252e1e6f89b20b69ae6629479e06adb7b49c70f5c409401d657e909b + languageName: node + linkType: hard + +"@volar/language-core@npm:2.1.4": + version: 2.1.4 + resolution: "@volar/language-core@npm:2.1.4" + dependencies: + "@volar/source-map": 2.1.4 + checksum: 7430f651431ed00eb7489d48c0596f4653fe70da3c779acfaa5807051db4491c9e4e154e9f0de3c9d863a3b4b1194a517a75395ca9134ea2b1b8af5ff637b204 + languageName: node + linkType: hard + +"@volar/language-service@npm:~2.1.0": + version: 2.1.4 + resolution: "@volar/language-service@npm:2.1.4" + dependencies: + "@volar/language-core": 2.1.4 + vscode-languageserver-protocol: ^3.17.5 + vscode-languageserver-textdocument: ^1.0.11 + vscode-uri: ^3.0.8 + checksum: 06cdcfacf0fab22cee652cab1ae1729628d7ebf68f5f9e791e19e3715b2a4775c0bd2ec2e7a9b0815d93f244d7a745f3ea41aa5084923b10e9258a5f54c1107b + languageName: node + linkType: hard + +"@volar/source-map@npm:2.1.4, @volar/source-map@npm:~2.1.3": + version: 2.1.4 + resolution: "@volar/source-map@npm:2.1.4" + dependencies: + muggle-string: ^0.4.0 + checksum: e2f65bcfd667a02ee5cfe49e612b12e75c05fdaecf3b3590fdd7a0255dce7e51d09e8d4c390c2098ca7321cea219c16a8ea3f6c0f36ca9c0edff3975990b458b languageName: node linkType: hard -"@vue/compiler-core@npm:3.3.4": - version: 3.3.4 - resolution: "@vue/compiler-core@npm:3.3.4" +"@vscode/l10n@npm:^0.0.18": + version: 0.0.18 + resolution: "@vscode/l10n@npm:0.0.18" + checksum: c33876cebdef0385359619200ecb5d7c46d7f9abffb80f9fab1f83abb5d6bfdb44cc6d792d1b1b9c736c729121274733bbdcd5d2d2eea0d157bdf662d521edef + languageName: node + linkType: hard + +"@vue/compiler-core@npm:3.4.21": + version: 3.4.21 + resolution: "@vue/compiler-core@npm:3.4.21" dependencies: - "@babel/parser": ^7.21.3 - "@vue/shared": 3.3.4 + "@babel/parser": ^7.23.9 + "@vue/shared": 3.4.21 + entities: ^4.5.0 estree-walker: ^2.0.2 source-map-js: ^1.0.2 - checksum: 5437942ea6575b316c9cd84f4f128a44939713da8b6958060e152c599e6d771d5db056c398d7574ee706ff8092e0d99ac4f14e7eef8712a8dd923d2323201b9e + checksum: 0d6b7732bc5ca5b4561526bbe646f9acd09cd70561b6c822d15856347f21a009ebf30f2f85b1b7500f24f7c0333a2af8ee645c389abe52485c1f4724c982b306 languageName: node linkType: hard -"@vue/compiler-dom@npm:3.3.4": - version: 3.3.4 - resolution: "@vue/compiler-dom@npm:3.3.4" +"@vue/compiler-dom@npm:3.4.21": + version: 3.4.21 + resolution: "@vue/compiler-dom@npm:3.4.21" dependencies: - "@vue/compiler-core": 3.3.4 - "@vue/shared": 3.3.4 - checksum: 1c2ac0c89de8eef7be1c568d57504e6245adaaec40c2c4d9717bc231ca10bf682d918a3b358d24c786eeaf8e0d7eb8a65f57d9044775a304783fde1d069a1896 + "@vue/compiler-core": 3.4.21 + "@vue/shared": 3.4.21 + checksum: f53e4f4e0afc954cede91a8cbeb3a4e053531a43a0f5999d1b18da443ca3f1f6fc9344a8741c72c5719a61bb34e18004ac88e16747bcf145ebc8a31188263690 languageName: node linkType: hard -"@vue/compiler-sfc@npm:3.3.4": - version: 3.3.4 - resolution: "@vue/compiler-sfc@npm:3.3.4" +"@vue/compiler-sfc@npm:3.4.21": + version: 3.4.21 + resolution: "@vue/compiler-sfc@npm:3.4.21" dependencies: - "@babel/parser": ^7.20.15 - "@vue/compiler-core": 3.3.4 - "@vue/compiler-dom": 3.3.4 - "@vue/compiler-ssr": 3.3.4 - "@vue/reactivity-transform": 3.3.4 - "@vue/shared": 3.3.4 + "@babel/parser": ^7.23.9 + "@vue/compiler-core": 3.4.21 + "@vue/compiler-dom": 3.4.21 + "@vue/compiler-ssr": 3.4.21 + "@vue/shared": 3.4.21 estree-walker: ^2.0.2 - magic-string: ^0.30.0 - postcss: ^8.1.10 + magic-string: ^0.30.7 + postcss: ^8.4.35 source-map-js: ^1.0.2 - checksum: 0a0adfdd3e812f528e25e4b3bbf14b2296b719a8aac609eca42035295527cc253b918a552dc15218e917efef26b7ca94054dc8784a1a18c06c3d4bb4d18ab8b9 + checksum: 226dc404be96a2811777825918d971feb42650e262159183548d64a463c4153fab97cdc2647224c609c89dbc0d930c6d9dbe6528ef52a1396b4b22163c20569a languageName: node linkType: hard -"@vue/compiler-ssr@npm:3.3.4": - version: 3.3.4 - resolution: "@vue/compiler-ssr@npm:3.3.4" +"@vue/compiler-ssr@npm:3.4.21": + version: 3.4.21 + resolution: "@vue/compiler-ssr@npm:3.4.21" dependencies: - "@vue/compiler-dom": 3.3.4 - "@vue/shared": 3.3.4 - checksum: 5d1875d55ea864080dd90e5d81a29f93308e312faf00163db5b391b38c2fe799fd3eb58955823dc632f2f8bdd271a4534cc0020646b7f82717be1a8d30dc16e7 + "@vue/compiler-dom": 3.4.21 + "@vue/shared": 3.4.21 + checksum: c510bee68b1a5b7f8ae3fe771c10ce9c397f876a234ced9df89e4a8353f3874870857e929cbb37e6d785d355b43f2264dc3a7fd5cb6867dc5b39ddca607ea3ed languageName: node linkType: hard @@ -2067,65 +2134,69 @@ __metadata: languageName: node linkType: hard -"@vue/reactivity-transform@npm:3.3.4": - version: 3.3.4 - resolution: "@vue/reactivity-transform@npm:3.3.4" +"@vue/devtools-api@npm:^6.5.1": + version: 6.6.1 + resolution: "@vue/devtools-api@npm:6.6.1" + checksum: cf12b5ebcc7729725087072289410107b55bb82e0b86b8442e4e85516977110a8a3f4e1dec763be8b567a59173703b4e9c0ac1b0489bb2bb81363af7ea258a27 + languageName: node + linkType: hard + +"@vue/language-plugin-pug@npm:2.0.7": + version: 2.0.7 + resolution: "@vue/language-plugin-pug@npm:2.0.7" dependencies: - "@babel/parser": ^7.20.15 - "@vue/compiler-core": 3.3.4 - "@vue/shared": 3.3.4 - estree-walker: ^2.0.2 - magic-string: ^0.30.0 - checksum: b425e78b2084ac7037887fbe012dcad5e5963ac9714ae15a04fda1c6766ec8c53ef231de1cfdc4d3cf46bd5d84bfec8ebdccf48da4ff5ee2f4b5084e54f0a1b1 + "@volar/source-map": ~2.1.3 + volar-service-pug: 0.0.34 + checksum: 11cc96eb5f240144e91b27fe06fcd48de4ef1e4c7fe666d1173b346ed64b7edfa922bd4eb2e512a91a0c6b907975afcaf69cfee4c91af11168590142b3aba4c3 languageName: node linkType: hard -"@vue/reactivity@npm:3.3.4": - version: 3.3.4 - resolution: "@vue/reactivity@npm:3.3.4" +"@vue/reactivity@npm:3.4.21": + version: 3.4.21 + resolution: "@vue/reactivity@npm:3.4.21" dependencies: - "@vue/shared": 3.3.4 - checksum: 81c3d0c587d23656a57a7a31afb51357274f6512b51baffc67cda183b2361a7e65e646029c26a8bc28587f26b65bba808dcd93cdd3bacab48d2b99d11ad0ec97 + "@vue/shared": 3.4.21 + checksum: 79c7ebe3ec9295cdcb4d762e3a4c0e3eb67d7f12c9deb37baf372c4f48cd5914cdeeba14add433c3149b9c4dd890dc9891ee76e9d13c8ebcd521b5a754a8cc0d languageName: node linkType: hard -"@vue/runtime-core@npm:3.3.4": - version: 3.3.4 - resolution: "@vue/runtime-core@npm:3.3.4" +"@vue/runtime-core@npm:3.4.21": + version: 3.4.21 + resolution: "@vue/runtime-core@npm:3.4.21" dependencies: - "@vue/reactivity": 3.3.4 - "@vue/shared": 3.3.4 - checksum: d402da51269658cba5d857d65fbe322121160bcb1a6fcf03601d5183705e92505c6e90418f491a331ca3e27628f457a6ca7158b9add25f5b0cf5cf53664b8011 + "@vue/reactivity": 3.4.21 + "@vue/shared": 3.4.21 + checksum: 4eb9b5d91fe58bc5b3f38293099d704ba7699a16d4ce68de03fbe5fc703e521ebfe3cefc156ef866d2ce0cbd1c2af1795674b39ab2b764bfedc069aa05233231 languageName: node linkType: hard -"@vue/runtime-dom@npm:3.3.4": - version: 3.3.4 - resolution: "@vue/runtime-dom@npm:3.3.4" +"@vue/runtime-dom@npm:3.4.21": + version: 3.4.21 + resolution: "@vue/runtime-dom@npm:3.4.21" dependencies: - "@vue/runtime-core": 3.3.4 - "@vue/shared": 3.3.4 - csstype: ^3.1.1 - checksum: dac9ada7f6128bcccc031fe5c25d00db94ffb7c011fcb70bada22fa4d889ff842eeb139ab9304bcc52cb5ae9030911a52cb3510b691bb190bbe5fab680b4411a + "@vue/runtime-core": 3.4.21 + "@vue/shared": 3.4.21 + csstype: ^3.1.3 + checksum: ebfdaa081fb7f18214a4e3324a7b58cc1bfe9b585cfc9dc5cf2ee480f233f992c32a6a3a3b595040babf26570ca18e748049d9284c42beceac8665e8f4ce5383 languageName: node linkType: hard -"@vue/server-renderer@npm:3.3.4": - version: 3.3.4 - resolution: "@vue/server-renderer@npm:3.3.4" +"@vue/server-renderer@npm:3.4.21": + version: 3.4.21 + resolution: "@vue/server-renderer@npm:3.4.21" dependencies: - "@vue/compiler-ssr": 3.3.4 - "@vue/shared": 3.3.4 + "@vue/compiler-ssr": 3.4.21 + "@vue/shared": 3.4.21 peerDependencies: - vue: 3.3.4 - checksum: e8598ed1a44df70edaea0ad6786aea6443b9b3d9266249eec5690401859d72d45a1e29ba3eef20e37a95f020abd5e763088b79070ee848af436a4390a253a37a + vue: 3.4.21 + checksum: faa3dc48767fc4308ffa031d07a6dbb362f26b0b8893f82747e6d879f046c373978402d1c15ed08267ebc0f090809cd3d554e6a4f582affcefb5239be5d4860c languageName: node linkType: hard -"@vue/shared@npm:3.3.4": - version: 3.3.4 - resolution: "@vue/shared@npm:3.3.4" - checksum: 12fe53ff816bfa29ea53f89212067a86512c626b8d30149ff28b36705820f6150e1fb4e4e46897ad9eddb1d1cfc02d8941053939910eed69a905f7a5509baabe +"@vue/shared@npm:3.4.21": + version: 3.4.21 + resolution: "@vue/shared@npm:3.4.21" + checksum: 5f30a408911f339c647baa88c45c3a2f6d58dbdaf2bd404753690f24b612717bdfe9050401d8ffb02613a9a06dd0b43c8307420cd69fda6e92e6d65bf9bc0c6f languageName: node linkType: hard @@ -2316,67 +2387,68 @@ __metadata: languageName: node linkType: hard -"array-includes@npm:^3.1.6": - version: 3.1.6 - resolution: "array-includes@npm:3.1.6" +"array-includes@npm:^3.1.7": + version: 3.1.7 + resolution: "array-includes@npm:3.1.7" dependencies: call-bind: ^1.0.2 - define-properties: ^1.1.4 - es-abstract: ^1.20.4 - get-intrinsic: ^1.1.3 + define-properties: ^1.2.0 + es-abstract: ^1.22.1 + get-intrinsic: ^1.2.1 is-string: ^1.0.7 - checksum: f22f8cd8ba8a6448d91eebdc69f04e4e55085d09232b5216ee2d476dab3ef59984e8d1889e662c6a0ed939dcb1b57fd05b2c0209c3370942fc41b752c82a2ca5 + checksum: 06f9e4598fac12a919f7c59a3f04f010ea07f0b7f0585465ed12ef528a60e45f374e79d1bddbb34cdd4338357d00023ddbd0ac18b0be36964f5e726e8965d7fc languageName: node linkType: hard -"array.prototype.findlastindex@npm:^1.2.2": - version: 1.2.2 - resolution: "array.prototype.findlastindex@npm:1.2.2" +"array.prototype.findlastindex@npm:^1.2.3": + version: 1.2.3 + resolution: "array.prototype.findlastindex@npm:1.2.3" dependencies: call-bind: ^1.0.2 - define-properties: ^1.1.4 - es-abstract: ^1.20.4 + define-properties: ^1.2.0 + es-abstract: ^1.22.1 es-shim-unscopables: ^1.0.0 - get-intrinsic: ^1.1.3 - checksum: 8a166359f69a2a751c843f26b9c8cd03d0dc396a92cdcb85f4126b5f1cecdae5b2c0c616a71ea8aff026bde68165b44950b3664404bb73db0673e288495ba264 + get-intrinsic: ^1.2.1 + checksum: 31f35d7b370c84db56484618132041a9af401b338f51899c2e78ef7690fbba5909ee7ca3c59a7192085b328cc0c68c6fd1f6d1553db01a689a589ae510f3966e languageName: node linkType: hard -"array.prototype.flat@npm:^1.3.1": - version: 1.3.1 - resolution: "array.prototype.flat@npm:1.3.1" +"array.prototype.flat@npm:^1.3.2": + version: 1.3.2 + resolution: "array.prototype.flat@npm:1.3.2" dependencies: call-bind: ^1.0.2 - define-properties: ^1.1.4 - es-abstract: ^1.20.4 + define-properties: ^1.2.0 + es-abstract: ^1.22.1 es-shim-unscopables: ^1.0.0 - checksum: 5a8415949df79bf6e01afd7e8839bbde5a3581300e8ad5d8449dea52639e9e59b26a467665622783697917b43bf39940a6e621877c7dd9b3d1c1f97484b9b88b + checksum: 5d6b4bf102065fb3f43764bfff6feb3295d372ce89591e6005df3d0ce388527a9f03c909af6f2a973969a4d178ab232ffc9236654149173e0e187ec3a1a6b87b languageName: node linkType: hard -"array.prototype.flatmap@npm:^1.3.1": - version: 1.3.1 - resolution: "array.prototype.flatmap@npm:1.3.1" +"array.prototype.flatmap@npm:^1.3.2": + version: 1.3.2 + resolution: "array.prototype.flatmap@npm:1.3.2" dependencies: call-bind: ^1.0.2 - define-properties: ^1.1.4 - es-abstract: ^1.20.4 + define-properties: ^1.2.0 + es-abstract: ^1.22.1 es-shim-unscopables: ^1.0.0 - checksum: 8c1c43a4995f12cf12523436da28515184c753807b3f0bc2ca6c075f71c470b099e2090cc67dba8e5280958fea401c1d0c59e1db0143272aef6cd1103921a987 + checksum: ce09fe21dc0bcd4f30271f8144083aa8c13d4639074d6c8dc82054b847c7fc9a0c97f857491f4da19d4003e507172a78f4bcd12903098adac8b9cd374f734be3 languageName: node linkType: hard -"arraybuffer.prototype.slice@npm:^1.0.1": - version: 1.0.1 - resolution: "arraybuffer.prototype.slice@npm:1.0.1" +"arraybuffer.prototype.slice@npm:^1.0.2": + version: 1.0.2 + resolution: "arraybuffer.prototype.slice@npm:1.0.2" dependencies: array-buffer-byte-length: ^1.0.0 call-bind: ^1.0.2 define-properties: ^1.2.0 + es-abstract: ^1.22.1 get-intrinsic: ^1.2.1 is-array-buffer: ^3.0.2 is-shared-array-buffer: ^1.0.2 - checksum: e3e9b2a3e988ebfeddce4c7e8f69df730c9e48cb04b0d40ff0874ce3d86b3d1339dd520ffde5e39c02610bc172ecfbd4bc93324b1cabd9554c44a56b131ce0ce + checksum: c200faf437786f5b2c80d4564ff5481c886a16dee642ef02abdc7306c7edd523d1f01d1dd12b769c7eb42ac9bc53874510db19a92a2c035c0f6696172aafa5d3 languageName: node linkType: hard @@ -2447,19 +2519,19 @@ __metadata: languageName: node linkType: hard -"bootstrap-icons@npm:1.11.1": - version: 1.11.1 - resolution: "bootstrap-icons@npm:1.11.1" - checksum: d78ff24a832fc47db0c7ba5d92d150caee6d8e9ccef24c6de5c2d9728b9610884c8ade3ecadcf09fffae207786acdf40bb026ea5b79a774a1ada62026d9fe16b +"bootstrap-icons@npm:1.11.3": + version: 1.11.3 + resolution: "bootstrap-icons@npm:1.11.3" + checksum: d5cdb90fe37af9051f369cbced8aa25bde9c29895f6ab47cbadcfdca71ae5b49093fceb4261c910a84d4352a5a4f998fdae4f1c245897bc6a1042321f4380c07 languageName: node linkType: hard -"bootstrap@npm:5.3.2": - version: 5.3.2 - resolution: "bootstrap@npm:5.3.2" +"bootstrap@npm:5.3.3": + version: 5.3.3 + resolution: "bootstrap@npm:5.3.3" peerDependencies: "@popperjs/core": ^2.11.8 - checksum: d5580b253d121ffc137388d41da58dce8d15f1ccd574e12f28d4a08e7649ca15e95db645b2b677cb8025bccd446bff04138fc0fe64f8cba0ccc5dc004a8644cf + checksum: 537b68db30150075614310e9ebdf1be9b4affdf89ca226d59f4352e82a368b203af13ed0ce5ccfa4e06f141ecd233f7432ca3817e9c1a39863a05fbe13c73c4b languageName: node linkType: hard @@ -2533,6 +2605,13 @@ browserlist@latest: languageName: node linkType: hard +"builtin-modules@npm:^3.3.0": + version: 3.3.0 + resolution: "builtin-modules@npm:3.3.0" + checksum: db021755d7ed8be048f25668fe2117620861ef6703ea2c65ed2779c9e3636d5c3b82325bd912244293959ff3ae303afa3471f6a15bf5060c103e4cc3a839749d + languageName: node + linkType: hard + "builtins@npm:^4.0.0": version: 4.1.0 resolution: "builtins@npm:4.1.0" @@ -2551,25 +2630,24 @@ browserlist@latest: languageName: node linkType: hard -"c8@npm:8.0.1": - version: 8.0.1 - resolution: "c8@npm:8.0.1" +"c8@npm:9.1.0": + version: 9.1.0 + resolution: "c8@npm:9.1.0" dependencies: "@bcoe/v8-coverage": ^0.2.3 "@istanbuljs/schema": ^0.1.3 find-up: ^5.0.0 - foreground-child: ^2.0.0 + foreground-child: ^3.1.1 istanbul-lib-coverage: ^3.2.0 istanbul-lib-report: ^3.0.1 istanbul-reports: ^3.1.6 - rimraf: ^3.0.2 test-exclude: ^6.0.0 v8-to-istanbul: ^9.0.0 yargs: ^17.7.2 yargs-parser: ^21.1.1 bin: c8: bin/c8.js - checksum: 2c47531d21cb67b1e533fbb203ddb5a1c4b45d52c004dcf4eb1376ac8df205f2f4a1b2b9611777ca88dadbbcc2bbdad26b8c5f7ca58a02ecd52afa2aebef73fe + checksum: c5249bf9c390784a33b05f5e930f5301793c15105c874a0130839dbf3309ce8832376f77be5e325a40cc2955f455f1d7aea754858befd07eee535dd42b287bbe languageName: node linkType: hard @@ -2609,6 +2687,17 @@ browserlist@latest: languageName: node linkType: hard +"call-bind@npm:^1.0.4, call-bind@npm:^1.0.5": + version: 1.0.5 + resolution: "call-bind@npm:1.0.5" + dependencies: + function-bind: ^1.1.2 + get-intrinsic: ^1.2.1 + set-function-length: ^1.1.1 + checksum: 449e83ecbd4ba48e7eaac5af26fea3b50f8f6072202c2dd7c5a6e7a6308f2421abe5e13a3bbd55221087f76320c5e09f25a8fdad1bab2b77c68ae74d92234ea5 + languageName: node + linkType: hard + "callsites@npm:^3.0.0": version: 3.1.0 resolution: "callsites@npm:3.1.0" @@ -2616,10 +2705,10 @@ browserlist@latest: languageName: node linkType: hard -"caniuse-lite@npm:1.0.30001538": - version: 1.0.30001538 - resolution: "caniuse-lite@npm:1.0.30001538" - checksum: 94c5d55757a339c7cc175f08a024671e2b4e7c04f130b1015793303d637061347efb6ad84447c3b8137333e742d150b8ad9672716bbf2482646c2e63a56f6c55 +"caniuse-lite@npm:1.0.30001603": + version: 1.0.30001603 + resolution: "caniuse-lite@npm:1.0.30001603" + checksum: e66e0d24b899c2ed3fdcc2dd44df29c4fc06d74fa8f43abe81fc7cff4a72b092d438e0fb5b7daeb252ee267519f32c6c7d229a15e7a4f4263afef6ea3832b661 languageName: node linkType: hard @@ -2660,6 +2749,37 @@ browserlist@latest: languageName: node linkType: hard +"chart.js@npm:^4.5.1": + version: 4.5.1 + resolution: "chart.js@npm:4.5.1" + dependencies: + "@kurkle/color": ^0.3.0 + checksum: 34b35b373642994b2adac197e91363625930530e29fc1baa6dbb411b5e1295f9f6572922003a0224a21a3019aec916567c1ed00c33b1373081f189fc188e5a7b + languageName: node + linkType: hard + +"chartjs-plugin-autocolors@npm:0.3.1": + version: 0.3.1 + resolution: "chartjs-plugin-autocolors@npm:0.3.1" + peerDependencies: + "@kurkle/color": ^0.3.1 + chart.js: ">=2" + checksum: de4f87b5bb3e042aa1d3de3886425bbd2340a55ca455b645569d0def602079833182ef214e205ff4466fb5ab1e708761cf37eb51ab3cd622284242c05ed94128 + languageName: node + linkType: hard + +"chartjs-plugin-zoom@npm:2.2.0": + version: 2.2.0 + resolution: "chartjs-plugin-zoom@npm:2.2.0" + dependencies: + "@types/hammerjs": ^2.0.45 + hammerjs: ^2.0.8 + peerDependencies: + chart.js: ">=3.2.0" + checksum: a540e3834082eeb4dedb5ec6ca381f94d7e101075c19a7b65f2a4cd2d12685b3a416e718c9cf7145799802874fb397f69b71a955dfc56b035946cde4d1eb6c8e + languageName: node + linkType: hard + "chokidar@npm:>=3.0.0 <4.0.0": version: 3.5.3 resolution: "chokidar@npm:3.5.3" @@ -2892,10 +3012,10 @@ browserlist@latest: languageName: node linkType: hard -"csstype@npm:^3.1.1": - version: 3.1.2 - resolution: "csstype@npm:3.1.2" - checksum: e1a52e6c25c1314d6beef5168da704ab29c5186b877c07d822bd0806717d9a265e8493a2e35ca7e68d0f5d472d43fac1cdce70fd79fd0853dff81f3028d857b5 +"csstype@npm:^3.1.3": + version: 3.1.3 + resolution: "csstype@npm:3.1.3" + checksum: 8db785cc92d259102725b3c694ec0c823f5619a84741b5c7991b8ad135dfaa66093038a1cc63e03361a6cd28d122be48f2106ae72334e067dd619a51f49eddf7 languageName: node linkType: hard @@ -3195,9 +3315,9 @@ browserlist@latest: languageName: node linkType: hard -"d3@npm:7.8.5": - version: 7.8.5 - resolution: "d3@npm:7.8.5" +"d3@npm:7.9.0": + version: 7.9.0 + resolution: "d3@npm:7.9.0" dependencies: d3-array: 3 d3-axis: 3 @@ -3229,7 +3349,7 @@ browserlist@latest: d3-timer: 3 d3-transition: 3 d3-zoom: 3 - checksum: e407e79731f74d946a5eb8dec2f037b5a4ad33c294409b1d3531fdf7094de48adfe364974cb37e2396bdb81e23149d56d0ede716c004d6aebb52b3cc114cd15c + checksum: 1c0e9135f1fb78aa32b187fafc8b56ae6346102bd0e4e5e5a5339611a51e6038adbaa293fae373994228100eddd87320e930b1be922baeadc07c9fd43d26d99b languageName: node linkType: hard @@ -3260,7 +3380,7 @@ browserlist@latest: languageName: node linkType: hard -"debug@npm:4, debug@npm:^4.1.0, debug@npm:^4.1.1, debug@npm:^4.3.2, debug@npm:^4.3.3, debug@npm:^4.3.4": +"debug@npm:4, debug@npm:^4.1.0, debug@npm:^4.3.1, debug@npm:^4.3.2, debug@npm:^4.3.3, debug@npm:^4.3.4": version: 4.3.4 resolution: "debug@npm:4.3.4" dependencies: @@ -3288,20 +3408,24 @@ browserlist@latest: languageName: node linkType: hard -"deepmerge@npm:^4.2.0": - version: 4.2.2 - resolution: "deepmerge@npm:4.2.2" - checksum: a8c43a1ed8d6d1ed2b5bf569fa4c8eb9f0924034baf75d5d406e47e157a451075c4db353efea7b6bcc56ec48116a8ce72fccf867b6e078e7c561904b5897530b - languageName: node - linkType: hard - -"deepmerge@npm:^4.3.1": +"deepmerge@npm:4.3.1, deepmerge@npm:^4.3.1": version: 4.3.1 resolution: "deepmerge@npm:4.3.1" checksum: 2024c6a980a1b7128084170c4cf56b0fd58a63f2da1660dcfe977415f27b17dbe5888668b59d0b063753f3220719d5e400b7f113609489c90160bb9a5518d052 languageName: node linkType: hard +"define-data-property@npm:^1.0.1, define-data-property@npm:^1.1.1": + version: 1.1.1 + resolution: "define-data-property@npm:1.1.1" + dependencies: + get-intrinsic: ^1.2.1 + gopd: ^1.0.1 + has-property-descriptors: ^1.0.0 + checksum: a29855ad3f0630ea82e3c5012c812efa6ca3078d5c2aa8df06b5f597c1cde6f7254692df41945851d903e05a1668607b6d34e778f402b9ff9ffb38111f1a3f0d + languageName: node + linkType: hard + "define-properties@npm:^1.1.3, define-properties@npm:^1.1.4": version: 1.1.4 resolution: "define-properties@npm:1.1.4" @@ -3517,6 +3641,13 @@ browserlist@latest: languageName: node linkType: hard +"entities@npm:^4.5.0": + version: 4.5.0 + resolution: "entities@npm:4.5.0" + checksum: 853f8ebd5b425d350bffa97dd6958143179a5938352ccae092c62d1267c4e392a039be1bae7d51b6e4ffad25f51f9617531fedf5237f15df302ccfb452cbf2d7 + languageName: node + linkType: hard + "env-paths@npm:^2.2.0": version: 2.2.1 resolution: "env-paths@npm:2.2.1" @@ -3540,66 +3671,25 @@ browserlist@latest: languageName: node linkType: hard -"es-abstract@npm:^1.19.0, es-abstract@npm:^1.20.4": - version: 1.21.1 - resolution: "es-abstract@npm:1.21.1" - dependencies: - available-typed-arrays: ^1.0.5 - call-bind: ^1.0.2 - es-set-tostringtag: ^2.0.1 - es-to-primitive: ^1.2.1 - function-bind: ^1.1.1 - function.prototype.name: ^1.1.5 - get-intrinsic: ^1.1.3 - get-symbol-description: ^1.0.0 - globalthis: ^1.0.3 - gopd: ^1.0.1 - has: ^1.0.3 - has-property-descriptors: ^1.0.0 - has-proto: ^1.0.1 - has-symbols: ^1.0.3 - internal-slot: ^1.0.4 - is-array-buffer: ^3.0.1 - is-callable: ^1.2.7 - is-negative-zero: ^2.0.2 - is-regex: ^1.1.4 - is-shared-array-buffer: ^1.0.2 - is-string: ^1.0.7 - is-typed-array: ^1.1.10 - is-weakref: ^1.0.2 - object-inspect: ^1.12.2 - object-keys: ^1.1.1 - object.assign: ^4.1.4 - regexp.prototype.flags: ^1.4.3 - safe-regex-test: ^1.0.0 - string.prototype.trimend: ^1.0.6 - string.prototype.trimstart: ^1.0.6 - typed-array-length: ^1.0.4 - unbox-primitive: ^1.0.2 - which-typed-array: ^1.1.9 - checksum: 23ff60d42d17a55d150e7bcedbdb065d4077a8b98c436e0e2e1ef4dd532a6d78a56028673de0bd8ed464a43c46ba781c50d9af429b6a17e44dbd14c7d7fb7926 - languageName: node - linkType: hard - -"es-abstract@npm:^1.21.2": - version: 1.22.1 - resolution: "es-abstract@npm:1.22.1" +"es-abstract@npm:^1.22.1": + version: 1.22.3 + resolution: "es-abstract@npm:1.22.3" dependencies: array-buffer-byte-length: ^1.0.0 - arraybuffer.prototype.slice: ^1.0.1 + arraybuffer.prototype.slice: ^1.0.2 available-typed-arrays: ^1.0.5 - call-bind: ^1.0.2 + call-bind: ^1.0.5 es-set-tostringtag: ^2.0.1 es-to-primitive: ^1.2.1 - function.prototype.name: ^1.1.5 - get-intrinsic: ^1.2.1 + function.prototype.name: ^1.1.6 + get-intrinsic: ^1.2.2 get-symbol-description: ^1.0.0 globalthis: ^1.0.3 gopd: ^1.0.1 - has: ^1.0.3 has-property-descriptors: ^1.0.0 has-proto: ^1.0.1 has-symbols: ^1.0.3 + hasown: ^2.0.0 internal-slot: ^1.0.5 is-array-buffer: ^3.0.2 is-callable: ^1.2.7 @@ -3607,24 +3697,24 @@ browserlist@latest: is-regex: ^1.1.4 is-shared-array-buffer: ^1.0.2 is-string: ^1.0.7 - is-typed-array: ^1.1.10 + is-typed-array: ^1.1.12 is-weakref: ^1.0.2 - object-inspect: ^1.12.3 + object-inspect: ^1.13.1 object-keys: ^1.1.1 object.assign: ^4.1.4 - regexp.prototype.flags: ^1.5.0 - safe-array-concat: ^1.0.0 + regexp.prototype.flags: ^1.5.1 + safe-array-concat: ^1.0.1 safe-regex-test: ^1.0.0 - string.prototype.trim: ^1.2.7 - string.prototype.trimend: ^1.0.6 - string.prototype.trimstart: ^1.0.6 + string.prototype.trim: ^1.2.8 + string.prototype.trimend: ^1.0.7 + string.prototype.trimstart: ^1.0.7 typed-array-buffer: ^1.0.0 typed-array-byte-length: ^1.0.0 typed-array-byte-offset: ^1.0.0 typed-array-length: ^1.0.4 unbox-primitive: ^1.0.2 - which-typed-array: ^1.1.10 - checksum: 614e2c1c3717cb8d30b6128ef12ea110e06fd7d75ad77091ca1c5dbfb00da130e62e4bbbbbdda190eada098a22b27fe0f99ae5a1171dac2c8663b1e8be8a3a9b + which-typed-array: ^1.1.13 + checksum: b1bdc962856836f6e72be10b58dc128282bdf33771c7a38ae90419d920fc3b36cc5d2b70a222ad8016e3fc322c367bf4e9e89fc2bc79b7e933c05b218e83d79a languageName: node linkType: hard @@ -3764,6 +3854,15 @@ browserlist@latest: languageName: node linkType: hard +"eslint-compat-utils@npm:^0.1.2": + version: 0.1.2 + resolution: "eslint-compat-utils@npm:0.1.2" + peerDependencies: + eslint: ">=6.0.0" + checksum: 2315d9db81efb7f58808053bf32a1d5970b38e01cd8244f4f1b5aa05d883255c5c93fc184e9c29a0e7e2dcf16ff16330977302474d3fa870e41c5bed9c66f76b + languageName: node + linkType: hard + "eslint-config-standard@npm:17.1.0": version: 17.1.0 resolution: "eslint-config-standard@npm:17.1.0" @@ -3776,14 +3875,14 @@ browserlist@latest: languageName: node linkType: hard -"eslint-import-resolver-node@npm:^0.3.7": - version: 0.3.7 - resolution: "eslint-import-resolver-node@npm:0.3.7" +"eslint-import-resolver-node@npm:^0.3.9": + version: 0.3.9 + resolution: "eslint-import-resolver-node@npm:0.3.9" dependencies: debug: ^3.2.7 - is-core-module: ^2.11.0 - resolve: ^1.22.1 - checksum: 3379aacf1d2c6952c1b9666c6fa5982c3023df695430b0d391c0029f6403a7775414873d90f397e98ba6245372b6c8960e16e74d9e4a3b0c0a4582f3bdbe3d6e + is-core-module: ^2.13.0 + resolve: ^1.22.4 + checksum: 439b91271236b452d478d0522a44482e8c8540bf9df9bd744062ebb89ab45727a3acd03366a6ba2bdbcde8f9f718bab7fe8db64688aca75acf37e04eafd25e22 languageName: node linkType: hard @@ -3810,15 +3909,16 @@ browserlist@latest: languageName: node linkType: hard -"eslint-plugin-es-x@npm:^7.1.0": - version: 7.1.0 - resolution: "eslint-plugin-es-x@npm:7.1.0" +"eslint-plugin-es-x@npm:^7.5.0": + version: 7.5.0 + resolution: "eslint-plugin-es-x@npm:7.5.0" dependencies: "@eslint-community/eslint-utils": ^4.1.2 - "@eslint-community/regexpp": ^4.5.0 + "@eslint-community/regexpp": ^4.6.0 + eslint-compat-utils: ^0.1.2 peerDependencies: eslint: ">=8" - checksum: a19924313ce28214cc1b25fb749e3d977688f23c7fc9f01c3447b5853528b82b63586d94060924b49072b005314695af3b073dcd8f6b965ad1923a2cabf3e9f7 + checksum: e770e57df78c3c38582de9bc4b9632ec5101a6dae8ac84f6ac219e8d8eb137f943db9730e037cfbc82f5d3ab6358e1b494fa6c628f425ebfc7e3094d5aa9d223 languageName: node linkType: hard @@ -3846,30 +3946,30 @@ browserlist@latest: languageName: node linkType: hard -"eslint-plugin-import@npm:2.28.1": - version: 2.28.1 - resolution: "eslint-plugin-import@npm:2.28.1" +"eslint-plugin-import@npm:2.29.1": + version: 2.29.1 + resolution: "eslint-plugin-import@npm:2.29.1" dependencies: - array-includes: ^3.1.6 - array.prototype.findlastindex: ^1.2.2 - array.prototype.flat: ^1.3.1 - array.prototype.flatmap: ^1.3.1 + array-includes: ^3.1.7 + array.prototype.findlastindex: ^1.2.3 + array.prototype.flat: ^1.3.2 + array.prototype.flatmap: ^1.3.2 debug: ^3.2.7 doctrine: ^2.1.0 - eslint-import-resolver-node: ^0.3.7 + eslint-import-resolver-node: ^0.3.9 eslint-module-utils: ^2.8.0 - has: ^1.0.3 - is-core-module: ^2.13.0 + hasown: ^2.0.0 + is-core-module: ^2.13.1 is-glob: ^4.0.3 minimatch: ^3.1.2 - object.fromentries: ^2.0.6 - object.groupby: ^1.0.0 - object.values: ^1.1.6 + object.fromentries: ^2.0.7 + object.groupby: ^1.0.1 + object.values: ^1.1.7 semver: ^6.3.1 - tsconfig-paths: ^3.14.2 + tsconfig-paths: ^3.15.0 peerDependencies: eslint: ^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8 - checksum: e8ae6dd8f06d8adf685f9c1cfd46ac9e053e344a05c4090767e83b63a85c8421ada389807a39e73c643b9bff156715c122e89778169110ed68d6428e12607edf + checksum: e65159aef808136d26d029b71c8c6e4cb5c628e65e5de77f1eb4c13a379315ae55c9c3afa847f43f4ff9df7e54515c77ffc6489c6a6f81f7dd7359267577468c languageName: node linkType: hard @@ -3891,22 +3991,24 @@ browserlist@latest: languageName: node linkType: hard -"eslint-plugin-n@npm:16.2.0": - version: 16.2.0 - resolution: "eslint-plugin-n@npm:16.2.0" +"eslint-plugin-n@npm:16.6.2": + version: 16.6.2 + resolution: "eslint-plugin-n@npm:16.6.2" dependencies: "@eslint-community/eslint-utils": ^4.4.0 builtins: ^5.0.1 - eslint-plugin-es-x: ^7.1.0 + eslint-plugin-es-x: ^7.5.0 get-tsconfig: ^4.7.0 + globals: ^13.24.0 ignore: ^5.2.4 + is-builtin-module: ^3.2.1 is-core-module: ^2.12.1 minimatch: ^3.1.2 resolve: ^1.22.2 semver: ^7.5.3 peerDependencies: eslint: ">=7.0.0" - checksum: 124ba4f418c895d81201ddc0c61cdca246c8aaa652e572653fad0dd66701aaef30598956fbe676726ab1037e600eddeaba52cd1a71b86f41e50a97f6e725055e + checksum: 3b468da0038cf25af582608983491b33ac2d481b6a94a0ff2e715d3b85e1ff8cb93df4cd67b689d520bea1bfb8f2b717f01606bf6b2ea19fe8f9c0999ea7057d languageName: node linkType: hard @@ -3935,20 +4037,21 @@ browserlist@latest: languageName: node linkType: hard -"eslint-plugin-vue@npm:9.17.0": - version: 9.17.0 - resolution: "eslint-plugin-vue@npm:9.17.0" +"eslint-plugin-vue@npm:9.24.0": + version: 9.24.0 + resolution: "eslint-plugin-vue@npm:9.24.0" dependencies: "@eslint-community/eslint-utils": ^4.4.0 + globals: ^13.24.0 natural-compare: ^1.4.0 nth-check: ^2.1.1 - postcss-selector-parser: ^6.0.13 - semver: ^7.5.4 - vue-eslint-parser: ^9.3.1 + postcss-selector-parser: ^6.0.15 + semver: ^7.6.0 + vue-eslint-parser: ^9.4.2 xml-name-validator: ^4.0.0 peerDependencies: eslint: ^6.2.0 || ^7.0.0 || ^8.0.0 - checksum: 2ef53a03876f7c96828ad10dae7d1c4d87b51e348f58b16de3f2bedbbff9a3410eabfaf65e4890b0b7ae6d1e710c1c370998d5bc64d6ca3095a95713b3a4cf67 + checksum: 2309b919d8fced6210c11e09107f443990063c0392843909cf50fad682e820c48bf5cc28b82a1239c03fd7ceeb4239e1baa653370c4c76689ec5fb8a970cd303 languageName: node linkType: hard @@ -4027,17 +4130,18 @@ browserlist@latest: languageName: node linkType: hard -"eslint@npm:8.51.0": - version: 8.51.0 - resolution: "eslint@npm:8.51.0" +"eslint@npm:8.57.0": + version: 8.57.0 + resolution: "eslint@npm:8.57.0" dependencies: "@eslint-community/eslint-utils": ^4.2.0 "@eslint-community/regexpp": ^4.6.1 - "@eslint/eslintrc": ^2.1.2 - "@eslint/js": 8.51.0 - "@humanwhocodes/config-array": ^0.11.11 + "@eslint/eslintrc": ^2.1.4 + "@eslint/js": 8.57.0 + "@humanwhocodes/config-array": ^0.11.14 "@humanwhocodes/module-importer": ^1.0.1 "@nodelib/fs.walk": ^1.2.8 + "@ungap/structured-clone": ^1.2.0 ajv: ^6.12.4 chalk: ^4.0.0 cross-spawn: ^7.0.2 @@ -4070,7 +4174,7 @@ browserlist@latest: text-table: ^0.2.0 bin: eslint: bin/eslint.js - checksum: 214fa5d1fcb67af1b8992ce9584ccd85e1aa7a482f8b8ea5b96edc28fa838a18a3b69456db45fc1ed3ef95f1e9efa9714f737292dc681e572d471d02fda9649c + checksum: 3a48d7ff85ab420a8447e9810d8087aea5b1df9ef68c9151732b478de698389ee656fd895635b5f2871c89ee5a2652b3f343d11e9db6f8486880374ebc74a2d9 languageName: node linkType: hard @@ -4256,17 +4360,7 @@ browserlist@latest: languageName: node linkType: hard -"foreground-child@npm:^2.0.0": - version: 2.0.0 - resolution: "foreground-child@npm:2.0.0" - dependencies: - cross-spawn: ^7.0.0 - signal-exit: ^3.0.2 - checksum: f77ec9aff621abd6b754cb59e690743e7639328301fbea6ff09df27d2befaf7dd5b77cec51c32323d73a81a7d91caaf9413990d305cbe3d873eec4fe58960956 - languageName: node - linkType: hard - -"foreground-child@npm:^3.1.0": +"foreground-child@npm:^3.1.0, foreground-child@npm:^3.1.1": version: 3.1.1 resolution: "foreground-child@npm:3.1.1" dependencies: @@ -4325,19 +4419,26 @@ browserlist@latest: languageName: node linkType: hard -"function.prototype.name@npm:^1.1.5": - version: 1.1.5 - resolution: "function.prototype.name@npm:1.1.5" +"function-bind@npm:^1.1.2": + version: 1.1.2 + resolution: "function-bind@npm:1.1.2" + checksum: 2b0ff4ce708d99715ad14a6d1f894e2a83242e4a52ccfcefaee5e40050562e5f6dafc1adbb4ce2d4ab47279a45dc736ab91ea5042d843c3c092820dfe032efb1 + languageName: node + linkType: hard + +"function.prototype.name@npm:^1.1.6": + version: 1.1.6 + resolution: "function.prototype.name@npm:1.1.6" dependencies: call-bind: ^1.0.2 - define-properties: ^1.1.3 - es-abstract: ^1.19.0 - functions-have-names: ^1.2.2 - checksum: acd21d733a9b649c2c442f067567743214af5fa248dbeee69d8278ce7df3329ea5abac572be9f7470b4ec1cd4d8f1040e3c5caccf98ebf2bf861a0deab735c27 + define-properties: ^1.2.0 + es-abstract: ^1.22.1 + functions-have-names: ^1.2.3 + checksum: 7a3f9bd98adab09a07f6e1f03da03d3f7c26abbdeaeee15223f6c04a9fb5674792bdf5e689dac19b97ac71de6aad2027ba3048a9b883aa1b3173eed6ab07f479 languageName: node linkType: hard -"functions-have-names@npm:^1.2.2, functions-have-names@npm:^1.2.3": +"functions-have-names@npm:^1.2.3": version: 1.2.3 resolution: "functions-have-names@npm:1.2.3" checksum: c3f1f5ba20f4e962efb71344ce0a40722163e85bee2101ce25f88214e78182d2d2476aa85ef37950c579eb6cf6ee811c17b3101bb84004bb75655f3e33f3fdb5 @@ -4401,6 +4502,18 @@ browserlist@latest: languageName: node linkType: hard +"get-intrinsic@npm:^1.2.2": + version: 1.2.2 + resolution: "get-intrinsic@npm:1.2.2" + dependencies: + function-bind: ^1.1.2 + has-proto: ^1.0.1 + has-symbols: ^1.0.3 + hasown: ^2.0.0 + checksum: 447ff0724df26829908dc033b62732359596fcf66027bc131ab37984afb33842d9cd458fd6cecadfe7eac22fd8a54b349799ed334cf2726025c921c7250e7417 + languageName: node + linkType: hard + "get-port@npm:^4.2.0": version: 4.2.0 resolution: "get-port@npm:4.2.0" @@ -4514,6 +4627,15 @@ browserlist@latest: languageName: node linkType: hard +"globals@npm:^13.24.0": + version: 13.24.0 + resolution: "globals@npm:13.24.0" + dependencies: + type-fest: ^0.20.2 + checksum: 56066ef058f6867c04ff203b8a44c15b038346a62efbc3060052a1016be9f56f4cf0b2cd45b74b22b81e521a889fc7786c73691b0549c2f3a6e825b3d394f43c + languageName: node + linkType: hard + "globalthis@npm:^1.0.3": version: 1.0.3 resolution: "globalthis@npm:1.0.3" @@ -4546,6 +4668,13 @@ browserlist@latest: languageName: node linkType: hard +"hammerjs@npm:^2.0.8": + version: 2.0.8 + resolution: "hammerjs@npm:2.0.8" + checksum: b092da7d1565a165d7edb53ef0ce212837a8b11f897aa3cf81a7818b66686b0ab3f4747fbce8fc8a41d1376594639ce3a054b0fd4889ca8b5b136a29ca500e27 + languageName: node + linkType: hard + "has-bigints@npm:^1.0.1, has-bigints@npm:^1.0.2": version: 1.0.2 resolution: "has-bigints@npm:1.0.2" @@ -4615,10 +4744,19 @@ browserlist@latest: languageName: node linkType: hard -"highcharts@npm:11.1.0": - version: 11.1.0 - resolution: "highcharts@npm:11.1.0" - checksum: f9b8cdc38b3b41bcc4c3a2331d9b1c769400639e2d0094484a0f5274aaba619551b95b442a69f7f4e47c2c8445681e3651f6036207fe1928a1a982f5278ae85e +"hasown@npm:^2.0.0": + version: 2.0.0 + resolution: "hasown@npm:2.0.0" + dependencies: + function-bind: ^1.1.2 + checksum: 6151c75ca12554565098641c98a40f4cc86b85b0fd5b6fe92360967e4605a4f9610f7757260b4e8098dd1c2ce7f4b095f2006fe72a570e3b6d2d28de0298c176 + languageName: node + linkType: hard + +"highcharts@npm:11.4.0": + version: 11.4.0 + resolution: "highcharts@npm:11.4.0" + checksum: 873e6619148d346223f7a98e3d23c1d58975ef4143d67d57ef88898c967495519b76b47c1f546c48535362bf4542cbe4f9f3423cc4339db152454f86e7887ddf languageName: node linkType: hard @@ -4636,17 +4774,17 @@ browserlist@latest: languageName: node linkType: hard -"html-validate@npm:8.5.0": - version: 8.5.0 - resolution: "html-validate@npm:8.5.0" +"html-validate@npm:8.18.1": + version: 8.18.1 + resolution: "html-validate@npm:8.18.1" dependencies: "@babel/code-frame": ^7.10.0 "@html-validate/stylish": ^4.1.0 - "@sidvind/better-ajv-errors": ^2.0.0 + "@sidvind/better-ajv-errors": 2.1.3 ajv: ^8.0.0 - deepmerge: ^4.2.0 + deepmerge: 4.3.1 glob: ^10.0.0 - ignore: ^5.0.0 + ignore: 5.3.1 kleur: ^4.1.0 minimist: ^1.2.0 prompts: ^2.0.0 @@ -4655,7 +4793,7 @@ browserlist@latest: jest: ^27.1 || ^28.1.3 || ^29.0.3 jest-diff: ^27.1 || ^28.1.3 || ^29.0.3 jest-snapshot: ^27.1 || ^28.1.3 || ^29.0.3 - vitest: ^0.34 + vitest: ^0.34 || ^1 peerDependenciesMeta: jest: optional: true @@ -4667,7 +4805,7 @@ browserlist@latest: optional: true bin: html-validate: bin/html-validate.js - checksum: 38ef4c832effd22d0dfcce89fcc81e085327ede335b362661235ce009b03a8335099abe292cba6a366b083824d32802524f9368ec55e0be5aa9110aab437af17 + checksum: 53479bf75bcb6ad748a6543583de6a26bfb55d85c0ae793bd6619c0079795f482c01b4168a7dea2584219f31b8a05c3ea2a0d5ebfd639099caf623263d3ac536 languageName: node linkType: hard @@ -4786,7 +4924,14 @@ browserlist@latest: languageName: node linkType: hard -"ignore@npm:^5.0.0, ignore@npm:^5.1.1, ignore@npm:^5.2.0": +"ignore@npm:5.3.1": + version: 5.3.1 + resolution: "ignore@npm:5.3.1" + checksum: 71d7bb4c1dbe020f915fd881108cbe85a0db3d636a0ea3ba911393c53946711d13a9b1143c7e70db06d571a5822c0a324a6bcde5c9904e7ca5047f01f1bf8cd3 + languageName: node + linkType: hard + +"ignore@npm:^5.1.1, ignore@npm:^5.2.0": version: 5.2.0 resolution: "ignore@npm:5.2.0" checksum: 6b1f926792d614f64c6c83da3a1f9c83f6196c2839aa41e1e32dd7b8d174cef2e329d75caabb62cb61ce9dc432f75e67d07d122a037312db7caa73166a1bdb77 @@ -4855,17 +5000,6 @@ browserlist@latest: languageName: node linkType: hard -"internal-slot@npm:^1.0.4": - version: 1.0.4 - resolution: "internal-slot@npm:1.0.4" - dependencies: - get-intrinsic: ^1.1.3 - has: ^1.0.3 - side-channel: ^1.0.4 - checksum: 8974588d06bab4f675573a3b52975370facf6486df51bc0567a982c7024fa29495f10b76c0d4dc742dd951d1b72024fdc1e31bb0bedf1678dc7aacacaf5a4f73 - languageName: node - linkType: hard - "internal-slot@npm:^1.0.5": version: 1.0.5 resolution: "internal-slot@npm:1.0.5" @@ -4948,6 +5082,15 @@ browserlist@latest: languageName: node linkType: hard +"is-builtin-module@npm:^3.2.1": + version: 3.2.1 + resolution: "is-builtin-module@npm:3.2.1" + dependencies: + builtin-modules: ^3.3.0 + checksum: e8f0ffc19a98240bda9c7ada84d846486365af88d14616e737d280d378695c8c448a621dcafc8332dbf0fcd0a17b0763b845400709963fa9151ddffece90ae88 + languageName: node + linkType: hard + "is-callable@npm:^1.1.3, is-callable@npm:^1.2.7": version: 1.2.7 resolution: "is-callable@npm:1.2.7" @@ -4962,15 +5105,6 @@ browserlist@latest: languageName: node linkType: hard -"is-core-module@npm:^2.11.0": - version: 2.11.0 - resolution: "is-core-module@npm:2.11.0" - dependencies: - has: ^1.0.3 - checksum: f96fd490c6b48eb4f6d10ba815c6ef13f410b0ba6f7eb8577af51697de523e5f2cd9de1c441b51d27251bf0e4aebc936545e33a5d26d5d51f28d25698d4a8bab - languageName: node - linkType: hard - "is-core-module@npm:^2.12.0, is-core-module@npm:^2.12.1": version: 2.12.1 resolution: "is-core-module@npm:2.12.1" @@ -4989,21 +5123,21 @@ browserlist@latest: languageName: node linkType: hard -"is-core-module@npm:^2.3.0, is-core-module@npm:^2.8.1": - version: 2.9.0 - resolution: "is-core-module@npm:2.9.0" +"is-core-module@npm:^2.13.1": + version: 2.13.1 + resolution: "is-core-module@npm:2.13.1" dependencies: - has: ^1.0.3 - checksum: b27034318b4b462f1c8f1dfb1b32baecd651d891a4e2d1922135daeff4141dfced2b82b07aef83ef54275c4a3526aa38da859223664d0868ca24182badb784ce + hasown: ^2.0.0 + checksum: 256559ee8a9488af90e4bad16f5583c6d59e92f0742e9e8bb4331e758521ee86b810b93bae44f390766ffbc518a0488b18d9dab7da9a5ff997d499efc9403f7c languageName: node linkType: hard -"is-core-module@npm:^2.9.0": - version: 2.10.0 - resolution: "is-core-module@npm:2.10.0" +"is-core-module@npm:^2.3.0, is-core-module@npm:^2.8.1": + version: 2.9.0 + resolution: "is-core-module@npm:2.9.0" dependencies: has: ^1.0.3 - checksum: 0f3f77811f430af3256fa7bbc806f9639534b140f8ee69476f632c3e1eb4e28a38be0b9d1b8ecf596179c841b53576129279df95e7051d694dac4ceb6f967593 + checksum: b27034318b4b462f1c8f1dfb1b32baecd651d891a4e2d1922135daeff4141dfced2b82b07aef83ef54275c4a3526aa38da859223664d0868ca24182badb784ce languageName: node linkType: hard @@ -5150,6 +5284,15 @@ browserlist@latest: languageName: node linkType: hard +"is-typed-array@npm:^1.1.12": + version: 1.1.12 + resolution: "is-typed-array@npm:1.1.12" + dependencies: + which-typed-array: ^1.1.11 + checksum: 4c89c4a3be07186caddadf92197b17fda663a9d259ea0d44a85f171558270d36059d1c386d34a12cba22dfade5aba497ce22778e866adc9406098c8fc4771796 + languageName: node + linkType: hard + "is-weakref@npm:^1.0.2": version: 1.0.2 resolution: "is-weakref@npm:1.0.2" @@ -5589,19 +5732,19 @@ browserlist@latest: languageName: node linkType: hard -"luxon@npm:3.4.3": - version: 3.4.3 - resolution: "luxon@npm:3.4.3" - checksum: 3eade81506224d038ed24035a0cd0dd4887848d7eba9361dce9ad8ef81380596a68153240be3988721f9690c624fb449fcf8fd8c3fc0681a6a8496faf48e92a3 +"luxon@npm:3.4.4": + version: 3.4.4 + resolution: "luxon@npm:3.4.4" + checksum: 36c1f99c4796ee4bfddf7dc94fa87815add43ebc44c8934c924946260a58512f0fd2743a629302885df7f35ccbd2d13f178c15df046d0e3b6eb71db178f1c60c languageName: node linkType: hard -"magic-string@npm:^0.30.0": - version: 0.30.0 - resolution: "magic-string@npm:0.30.0" +"magic-string@npm:^0.30.7": + version: 0.30.7 + resolution: "magic-string@npm:0.30.7" dependencies: - "@jridgewell/sourcemap-codec": ^1.4.13 - checksum: 7bdf22e27334d8a393858a16f5f840af63a7c05848c000fd714da5aa5eefa09a1bc01d8469362f25cc5c4a14ec01b46557b7fff8751365522acddf21e57c488d + "@jridgewell/sourcemap-codec": ^1.4.15 + checksum: bdf102e36a44d1728ec61b69d655caba3f66ca58898e292f6debe57dc30896bd37908bfe3464a7464a435831a9e44aa905cebd681e21c2f44bbe4dddf225619f languageName: node linkType: hard @@ -5792,16 +5935,23 @@ browserlist@latest: languageName: node linkType: hard -"moment-timezone@npm:0.5.43": - version: 0.5.43 - resolution: "moment-timezone@npm:0.5.43" +"moment-timezone@npm:0.5.45": + version: 0.5.45 + resolution: "moment-timezone@npm:0.5.45" dependencies: moment: ^2.29.4 - checksum: 8075c897ed8a044f992ef26fe8cdbcad80caf974251db424cae157473cca03be2830de8c74d99341b76edae59f148c9d9d19c1c1d9363259085688ec1cf508d0 + checksum: a22e9f983fbe1a01757ce30685bce92e3f6efa692eb682afd47b82da3ff960b3c8c2c3883ec6715c124bc985a342b57cba1f6ba25a1c8b4c7ad766db3cd5e1d0 + languageName: node + linkType: hard + +"moment@npm:2.30.1": + version: 2.30.1 + resolution: "moment@npm:2.30.1" + checksum: 859236bab1e88c3e5802afcf797fc801acdbd0ee509d34ea3df6eea21eb6bcc2abd4ae4e4e64aa7c986aa6cba563c6e62806218e6412a765010712e5fa121ba6 languageName: node linkType: hard -"moment@npm:2.29.4, moment@npm:^2.29.4": +"moment@npm:^2.29.4": version: 2.29.4 resolution: "moment@npm:2.29.4" checksum: 0ec3f9c2bcba38dc2451b1daed5daded747f17610b92427bebe1d08d48d8b7bdd8d9197500b072d14e326dd0ccf3e326b9e3d07c5895d3d49e39b6803b76e80e @@ -5913,6 +6063,25 @@ browserlist@latest: languageName: node linkType: hard +"msgpackr@npm:^1.9.9": + version: 1.10.1 + resolution: "msgpackr@npm:1.10.1" + dependencies: + msgpackr-extract: ^3.0.2 + dependenciesMeta: + msgpackr-extract: + optional: true + checksum: e422d18b01051598b23701eebeb4b9e2c686b9c7826b20f564724837ba2b5cd4af74c91a549eaeaf8186645cc95e8196274a4a19442aa3286ac611b98069c194 + languageName: node + linkType: hard + +"muggle-string@npm:^0.4.0": + version: 0.4.1 + resolution: "muggle-string@npm:0.4.1" + checksum: 85fe1766d18d43cf22b6da7d047203a65b2e2b1ccfac505b699c2a459644f95ebb3c854a96db5be559eea0e213f6ee32b986b8c2f73c48e6c89e1fd829616532 + languageName: node + linkType: hard + "murmurhash-js@npm:1.0.0": version: 1.0.0 resolution: "murmurhash-js@npm:1.0.0" @@ -5920,9 +6089,9 @@ browserlist@latest: languageName: node linkType: hard -"naive-ui@npm:2.35.0": - version: 2.35.0 - resolution: "naive-ui@npm:2.35.0" +"naive-ui@npm:2.38.1": + version: 2.38.1 + resolution: "naive-ui@npm:2.38.1" dependencies: "@css-render/plugin-bem": ^0.15.12 "@css-render/vue3-ssr": ^0.15.12 @@ -5931,38 +6100,30 @@ browserlist@latest: "@types/lodash-es": ^4.17.9 async-validator: ^4.2.5 css-render: ^0.15.12 + csstype: ^3.1.3 date-fns: ^2.30.0 date-fns-tz: ^2.0.0 evtd: ^0.2.4 highlight.js: ^11.8.0 lodash: ^4.17.21 lodash-es: ^4.17.21 - seemly: ^0.3.6 + seemly: ^0.3.8 treemate: ^0.3.11 vdirs: ^0.1.8 vooks: ^0.2.12 - vueuc: ^0.4.51 + vueuc: ^0.4.58 peerDependencies: vue: ^3.0.0 - checksum: 53239b8cbe5092f719ac8975c76fd92cfc4b380975ad968843c8bbd0d5f0cd697573fde09480c058cb988d16acfb44757fd59af5049c9e7b388ed22ee49516d4 + checksum: 88a8f981dec2ebcdfe0f06d9123d46069e22f881e3286441d6396ea80ee56079d7f93e731321d2320156196e442df77a9ae45f8599b98b144af458d12b29d88c languageName: node linkType: hard -"nanoid@npm:^3.3.1": - version: 3.3.3 - resolution: "nanoid@npm:3.3.3" +"nanoid@npm:^3.3.7": + version: 3.3.7 + resolution: "nanoid@npm:3.3.7" bin: nanoid: bin/nanoid.cjs - checksum: ada019402a07464a694553c61d2dca8a4353645a7d92f2830f0d487fedff403678a0bee5323a46522752b2eab95a0bc3da98b6cccaa7c0c55cd9975130e6d6f0 - languageName: node - linkType: hard - -"nanoid@npm:^3.3.6": - version: 3.3.6 - resolution: "nanoid@npm:3.3.6" - bin: - nanoid: bin/nanoid.cjs - checksum: 7d0eda657002738aa5206107bd0580aead6c95c460ef1bdd0b1a87a9c7ae6277ac2e9b945306aaa5b32c6dcb7feaf462d0f552e7f8b5718abfc6ead5c94a71b3 + checksum: d36c427e530713e4ac6567d488b489a36582ef89da1d6d4e3b87eded11eb10d7042a877958c6f104929809b2ab0bafa17652b076cdf84324aa75b30b722204f2 languageName: node linkType: hard @@ -6144,10 +6305,10 @@ browserlist@latest: languageName: node linkType: hard -"object-inspect@npm:^1.12.2, object-inspect@npm:^1.12.3": - version: 1.12.3 - resolution: "object-inspect@npm:1.12.3" - checksum: dabfd824d97a5f407e6d5d24810d888859f6be394d8b733a77442b277e0808860555176719c5905e765e3743a7cada6b8b0a3b85e5331c530fd418cc8ae991db +"object-inspect@npm:^1.13.1": + version: 1.13.1 + resolution: "object-inspect@npm:1.13.1" + checksum: 7d9fa9221de3311dcb5c7c307ee5dc011cdd31dc43624b7c184b3840514e118e05ef0002be5388304c416c0eb592feb46e983db12577fc47e47d5752fbbfb61f languageName: node linkType: hard @@ -6177,37 +6338,37 @@ browserlist@latest: languageName: node linkType: hard -"object.fromentries@npm:^2.0.6": - version: 2.0.6 - resolution: "object.fromentries@npm:2.0.6" +"object.fromentries@npm:^2.0.7": + version: 2.0.7 + resolution: "object.fromentries@npm:2.0.7" dependencies: call-bind: ^1.0.2 - define-properties: ^1.1.4 - es-abstract: ^1.20.4 - checksum: 453c6d694180c0c30df451b60eaf27a5b9bca3fb43c37908fd2b78af895803dc631242bcf05582173afa40d8d0e9c96e16e8874b39471aa53f3ac1f98a085d85 + define-properties: ^1.2.0 + es-abstract: ^1.22.1 + checksum: 7341ce246e248b39a431b87a9ddd331ff52a454deb79afebc95609f94b1f8238966cf21f52188f2a353f0fdf83294f32f1ebf1f7826aae915ebad21fd0678065 languageName: node linkType: hard -"object.groupby@npm:^1.0.0": - version: 1.0.0 - resolution: "object.groupby@npm:1.0.0" +"object.groupby@npm:^1.0.1": + version: 1.0.1 + resolution: "object.groupby@npm:1.0.1" dependencies: call-bind: ^1.0.2 define-properties: ^1.2.0 - es-abstract: ^1.21.2 + es-abstract: ^1.22.1 get-intrinsic: ^1.2.1 - checksum: 64b00b287d57580111c958e7ff375c9b61811fa356f2cf0d35372d43cab61965701f00fac66c19fd8f49c4dfa28744bee6822379c69a73648ad03e09fcdeae70 + checksum: d7959d6eaaba358b1608066fc67ac97f23ce6f573dc8fc661f68c52be165266fcb02937076aedb0e42722fdda0bdc0bbf74778196ac04868178888e9fd3b78b5 languageName: node linkType: hard -"object.values@npm:^1.1.6": - version: 1.1.6 - resolution: "object.values@npm:1.1.6" +"object.values@npm:^1.1.7": + version: 1.1.7 + resolution: "object.values@npm:1.1.7" dependencies: call-bind: ^1.0.2 - define-properties: ^1.1.4 - es-abstract: ^1.20.4 - checksum: f6fff9fd817c24cfd8107f50fb33061d81cd11bacc4e3dbb3852e9ff7692fde4dbce823d4333ea27cd9637ef1b6690df5fbb61f1ed314fa2959598dc3ae23d8e + define-properties: ^1.2.0 + es-abstract: ^1.22.1 + checksum: f3e4ae4f21eb1cc7cebb6ce036d4c67b36e1c750428d7b7623c56a0db90edced63d08af8a316d81dfb7c41a3a5fa81b05b7cc9426e98d7da986b1682460f0777 languageName: node linkType: hard @@ -6284,27 +6445,27 @@ browserlist@latest: languageName: node linkType: hard -"parcel@npm:2.10.0": - version: 2.10.0 - resolution: "parcel@npm:2.10.0" +"parcel@npm:2.12.0": + version: 2.12.0 + resolution: "parcel@npm:2.12.0" dependencies: - "@parcel/config-default": 2.10.0 - "@parcel/core": 2.10.0 - "@parcel/diagnostic": 2.10.0 - "@parcel/events": 2.10.0 - "@parcel/fs": 2.10.0 - "@parcel/logger": 2.10.0 - "@parcel/package-manager": 2.10.0 - "@parcel/reporter-cli": 2.10.0 - "@parcel/reporter-dev-server": 2.10.0 - "@parcel/reporter-tracer": 2.10.0 - "@parcel/utils": 2.10.0 + "@parcel/config-default": 2.12.0 + "@parcel/core": 2.12.0 + "@parcel/diagnostic": 2.12.0 + "@parcel/events": 2.12.0 + "@parcel/fs": 2.12.0 + "@parcel/logger": 2.12.0 + "@parcel/package-manager": 2.12.0 + "@parcel/reporter-cli": 2.12.0 + "@parcel/reporter-dev-server": 2.12.0 + "@parcel/reporter-tracer": 2.12.0 + "@parcel/utils": 2.12.0 chalk: ^4.1.0 commander: ^7.0.0 get-port: ^4.2.0 bin: parcel: lib/bin.js - checksum: fe25ddcf2d5cdd133b8acdcb532b030dc14c9fb75df0ab53a745ee56171ffaf526f1a4ce58d35d63a89f245ca6bcba0ce7736564cdc68912c779651e1c5eca07 + checksum: d8e6cb690a26999e4b9be0f433d5b72060fdfbb22a9aae26b4705f7eaf3983906ba719e41a5ed102ca617135823931a6559d08a11fb48cdfea7ac333e9aebaef languageName: node linkType: hard @@ -6423,13 +6584,13 @@ browserlist@latest: languageName: node linkType: hard -"postcss-selector-parser@npm:^6.0.13": - version: 6.0.13 - resolution: "postcss-selector-parser@npm:6.0.13" +"postcss-selector-parser@npm:^6.0.15": + version: 6.0.15 + resolution: "postcss-selector-parser@npm:6.0.15" dependencies: cssesc: ^3.0.0 util-deprecate: ^1.0.2 - checksum: f89163338a1ce3b8ece8e9055cd5a3165e79a15e1c408e18de5ad8f87796b61ec2d48a2902d179ae0c4b5de10fccd3a325a4e660596549b040bc5ad1b465f096 + checksum: 57decb94152111004f15e27b9c61131eb50ee10a3288e7fcf424cebbb4aba82c2817517ae718f8b5d704ee9e02a638d4a2acff8f47685c295a33ecee4fd31055 languageName: node linkType: hard @@ -6440,25 +6601,25 @@ browserlist@latest: languageName: node linkType: hard -"postcss@npm:^8.1.10": - version: 8.4.12 - resolution: "postcss@npm:8.4.12" +"postcss@npm:^8.4.27": + version: 8.4.33 + resolution: "postcss@npm:8.4.33" dependencies: - nanoid: ^3.3.1 + nanoid: ^3.3.7 picocolors: ^1.0.0 source-map-js: ^1.0.2 - checksum: 248e3d0f9bbb8efaafcfda7f91627a29bdc9a19f456896886330beb28c5abea0e14c7901b35191928602e2eccbed496b1e94097d27a0b2a980854cd00c7a835f + checksum: 6f98b2af4b76632a3de20c4f47bf0e984a1ce1a531cf11adcb0b1d63a6cbda0aae4165e578b66c32ca4879038e3eaad386a6be725a8fb4429c78e3c1ab858fe9 languageName: node linkType: hard -"postcss@npm:^8.4.27": - version: 8.4.27 - resolution: "postcss@npm:8.4.27" +"postcss@npm:^8.4.35": + version: 8.4.35 + resolution: "postcss@npm:8.4.35" dependencies: - nanoid: ^3.3.6 + nanoid: ^3.3.7 picocolors: ^1.0.0 source-map-js: ^1.0.2 - checksum: 1cdd0c298849df6cd65f7e646a3ba36870a37b65f55fd59d1a165539c263e9b4872a402bf4ed1ca1bc31f58b68b2835545e33ea1a23b161a1f8aa6d5ded81e78 + checksum: cf3c3124d3912a507603f6d9a49b3783f741075e9aa73eb592a6dd9194f9edab9d20a8875d16d137d4f779fe7b6fbd1f5727e39bfd1c3003724980ee4995e1da languageName: node linkType: hard @@ -6745,25 +6906,14 @@ browserlist@latest: languageName: node linkType: hard -"regexp.prototype.flags@npm:^1.4.3": - version: 1.4.3 - resolution: "regexp.prototype.flags@npm:1.4.3" - dependencies: - call-bind: ^1.0.2 - define-properties: ^1.1.3 - functions-have-names: ^1.2.2 - checksum: 51228bae732592adb3ededd5e15426be25f289e9c4ef15212f4da73f4ec3919b6140806374b8894036a86020d054a8d2657d3fee6bb9b4d35d8939c20030b7a6 - languageName: node - linkType: hard - -"regexp.prototype.flags@npm:^1.5.0": - version: 1.5.0 - resolution: "regexp.prototype.flags@npm:1.5.0" +"regexp.prototype.flags@npm:^1.5.1": + version: 1.5.1 + resolution: "regexp.prototype.flags@npm:1.5.1" dependencies: call-bind: ^1.0.2 define-properties: ^1.2.0 - functions-have-names: ^1.2.3 - checksum: c541687cdbdfff1b9a07f6e44879f82c66bbf07665f9a7544c5fd16acdb3ec8d1436caab01662d2fbcad403f3499d49ab0b77fbc7ef29ef961d98cc4bc9755b4 + set-function-name: ^2.0.0 + checksum: 869edff00288442f8d7fa4c9327f91d85f3b3acf8cbbef9ea7a220345cf23e9241b6def9263d2c1ebcf3a316b0aa52ad26a43a84aa02baca3381717b3e307f47 languageName: node linkType: hard @@ -6815,29 +6965,29 @@ browserlist@latest: languageName: node linkType: hard -"resolve@npm:^1.22.1": - version: 1.22.1 - resolution: "resolve@npm:1.22.1" +"resolve@npm:^1.22.2": + version: 1.22.3 + resolution: "resolve@npm:1.22.3" dependencies: - is-core-module: ^2.9.0 + is-core-module: ^2.12.0 path-parse: ^1.0.7 supports-preserve-symlinks-flag: ^1.0.0 bin: resolve: bin/resolve - checksum: 07af5fc1e81aa1d866cbc9e9460fbb67318a10fa3c4deadc35c3ad8a898ee9a71a86a65e4755ac3195e0ea0cfbe201eb323ebe655ce90526fd61917313a34e4e + checksum: fb834b81348428cb545ff1b828a72ea28feb5a97c026a1cf40aa1008352c72811ff4d4e71f2035273dc536dcfcae20c13604ba6283c612d70fa0b6e44519c374 languageName: node linkType: hard -"resolve@npm:^1.22.2": - version: 1.22.3 - resolution: "resolve@npm:1.22.3" +"resolve@npm:^1.22.4": + version: 1.22.8 + resolution: "resolve@npm:1.22.8" dependencies: - is-core-module: ^2.12.0 + is-core-module: ^2.13.0 path-parse: ^1.0.7 supports-preserve-symlinks-flag: ^1.0.0 bin: resolve: bin/resolve - checksum: fb834b81348428cb545ff1b828a72ea28feb5a97c026a1cf40aa1008352c72811ff4d4e71f2035273dc536dcfcae20c13604ba6283c612d70fa0b6e44519c374 + checksum: f8a26958aa572c9b064562750b52131a37c29d072478ea32e129063e2da7f83e31f7f11e7087a18225a8561cfe8d2f0df9dbea7c9d331a897571c0a2527dbb4c languageName: node linkType: hard @@ -6854,29 +7004,29 @@ browserlist@latest: languageName: node linkType: hard -"resolve@patch:resolve@^1.22.1#~builtin": - version: 1.22.1 - resolution: "resolve@patch:resolve@npm%3A1.22.1#~builtin::version=1.22.1&hash=07638b" +"resolve@patch:resolve@^1.22.2#~builtin": + version: 1.22.3 + resolution: "resolve@patch:resolve@npm%3A1.22.3#~builtin::version=1.22.3&hash=07638b" dependencies: - is-core-module: ^2.9.0 + is-core-module: ^2.12.0 path-parse: ^1.0.7 supports-preserve-symlinks-flag: ^1.0.0 bin: resolve: bin/resolve - checksum: 5656f4d0bedcf8eb52685c1abdf8fbe73a1603bb1160a24d716e27a57f6cecbe2432ff9c89c2bd57542c3a7b9d14b1882b73bfe2e9d7849c9a4c0b8b39f02b8b + checksum: ad59734723b596d0891321c951592ed9015a77ce84907f89c9d9307dd0c06e11a67906a3e628c4cae143d3e44898603478af0ddeb2bba3f229a9373efe342665 languageName: node linkType: hard -"resolve@patch:resolve@^1.22.2#~builtin": - version: 1.22.3 - resolution: "resolve@patch:resolve@npm%3A1.22.3#~builtin::version=1.22.3&hash=07638b" +"resolve@patch:resolve@^1.22.4#~builtin": + version: 1.22.8 + resolution: "resolve@patch:resolve@npm%3A1.22.8#~builtin::version=1.22.8&hash=07638b" dependencies: - is-core-module: ^2.12.0 + is-core-module: ^2.13.0 path-parse: ^1.0.7 supports-preserve-symlinks-flag: ^1.0.0 bin: resolve: bin/resolve - checksum: ad59734723b596d0891321c951592ed9015a77ce84907f89c9d9307dd0c06e11a67906a3e628c4cae143d3e44898603478af0ddeb2bba3f229a9373efe342665 + checksum: 5479b7d431cacd5185f8db64bfcb7286ae5e31eb299f4c4f404ad8aa6098b77599563ac4257cb2c37a42f59dfc06a1bec2bcf283bb448f319e37f0feb9a09847 languageName: node linkType: hard @@ -6913,8 +7063,8 @@ browserlist@latest: linkType: hard "rollup@npm:^3.27.1": - version: 3.28.0 - resolution: "rollup@npm:3.28.0" + version: 3.29.4 + resolution: "rollup@npm:3.29.4" dependencies: fsevents: ~2.3.2 dependenciesMeta: @@ -6922,7 +7072,7 @@ browserlist@latest: optional: true bin: rollup: dist/bin/rollup - checksum: 6ded4a0d3ca531d68e82897d5eebaa9d085014a062620bc328f2859ccf78d6a148a51ed53f1275a5f89b55cc6d7b1440b7cee44e5a9e3a51442f809b4b26f727 + checksum: 8bb20a39c8d91130825159c3823eccf4dc2295c9a0a5c4ed851a5bf2167dbf24d9a29f23461a54c955e5506395e6cc188eafc8ab0e20399d7489fb33793b184e languageName: node linkType: hard @@ -6930,40 +7080,45 @@ browserlist@latest: version: 0.0.0-use.local resolution: "root-workspace-0b6124@workspace:." dependencies: - "@fullcalendar/bootstrap5": 6.1.9 - "@fullcalendar/core": 6.1.9 - "@fullcalendar/daygrid": 6.1.9 - "@fullcalendar/icalendar": 6.1.9 - "@fullcalendar/interaction": 6.1.9 - "@fullcalendar/list": 6.1.9 - "@fullcalendar/luxon3": 6.1.9 - "@fullcalendar/timegrid": 6.1.9 - "@fullcalendar/vue3": 6.1.9 - "@parcel/optimizer-data-url": 2.10.0 - "@parcel/transformer-inline-string": 2.10.0 - "@parcel/transformer-sass": 2.10.0 + "@fullcalendar/bootstrap5": 6.1.11 + "@fullcalendar/core": 6.1.11 + "@fullcalendar/daygrid": 6.1.11 + "@fullcalendar/icalendar": 6.1.11 + "@fullcalendar/interaction": 6.1.11 + "@fullcalendar/list": 6.1.11 + "@fullcalendar/luxon3": 6.1.11 + "@fullcalendar/timegrid": 6.1.11 + "@fullcalendar/vue3": 6.1.11 + "@kurkle/color": 0.3.1 + "@parcel/optimizer-data-url": 2.12.0 + "@parcel/transformer-inline-string": 2.12.0 + "@parcel/transformer-sass": 2.12.0 "@popperjs/core": 2.11.8 - "@rollup/pluginutils": 5.0.5 + "@rollup/pluginutils": 5.1.0 "@twuni/emojify": 1.0.2 - "@vitejs/plugin-vue": 4.4.0 - bootstrap: 5.3.2 - bootstrap-icons: 1.11.1 + "@vitejs/plugin-vue": 4.6.2 + "@vue/language-plugin-pug": 2.0.7 + bootstrap: 5.3.3 + bootstrap-icons: 1.11.3 browser-fs-access: 0.35.0 browserlist: latest - c8: 8.0.1 - caniuse-lite: 1.0.30001538 - d3: 7.8.5 - eslint: 8.51.0 + c8: 9.1.0 + caniuse-lite: 1.0.30001603 + chart.js: ^4.5.1 + chartjs-plugin-autocolors: 0.3.1 + chartjs-plugin-zoom: 2.2.0 + d3: 7.9.0 + eslint: 8.57.0 eslint-config-standard: 17.1.0 eslint-plugin-cypress: 2.15.1 - eslint-plugin-import: 2.28.1 - eslint-plugin-n: 16.2.0 + eslint-plugin-import: 2.29.1 + eslint-plugin-n: 16.6.2 eslint-plugin-node: 11.1.0 eslint-plugin-promise: 6.1.1 - eslint-plugin-vue: 9.17.0 + eslint-plugin-vue: 9.24.0 file-saver: 2.0.5 - highcharts: 11.1.0 - html-validate: 8.5.0 + highcharts: 11.4.0 + html-validate: 8.18.1 ical.js: 1.5.0 jquery: 3.7.1 jquery-migrate: 3.4.1 @@ -6971,28 +7126,28 @@ browserlist@latest: list.js: 2.3.1 lodash: 4.17.21 lodash-es: 4.17.21 - luxon: 3.4.3 - moment: 2.29.4 - moment-timezone: 0.5.43 + luxon: 3.4.4 + moment: 2.30.1 + moment-timezone: 0.5.45 ms: 2.1.3 murmurhash-js: 1.0.0 - naive-ui: 2.35.0 - parcel: 2.10.0 + naive-ui: 2.38.1 + parcel: 2.12.0 pinia: 2.1.7 pinia-plugin-persist: 1.0.0 pug: 3.0.2 - sass: 1.69.4 + sass: 1.72.0 seedrandom: 3.0.5 select2: 4.1.0-rc.0 select2-bootstrap-5-theme: 1.3.0 send: 0.18.0 shepherd.js: 11.2.0 slugify: 1.6.6 - sortablejs: 1.15.0 + sortablejs: 1.15.2 vanillajs-datepicker: 1.3.4 - vite: 4.4.11 - vue: 3.3.4 - vue-router: 4.2.5 + vite: 4.5.3 + vue: 3.4.21 + vue-router: 4.3.0 zxcvbn: 4.4.2 languageName: unknown linkType: soft @@ -7013,15 +7168,15 @@ browserlist@latest: languageName: node linkType: hard -"safe-array-concat@npm:^1.0.0": - version: 1.0.0 - resolution: "safe-array-concat@npm:1.0.0" +"safe-array-concat@npm:^1.0.1": + version: 1.0.1 + resolution: "safe-array-concat@npm:1.0.1" dependencies: call-bind: ^1.0.2 - get-intrinsic: ^1.2.0 + get-intrinsic: ^1.2.1 has-symbols: ^1.0.3 isarray: ^2.0.5 - checksum: f43cb98fe3b566327d0c09284de2b15fb85ae964a89495c1b1a5d50c7c8ed484190f4e5e71aacc167e16231940079b326f2c0807aea633d47cc7322f40a6b57f + checksum: 001ecf1d8af398251cbfabaf30ed66e3855127fbceee178179524b24160b49d15442f94ed6c0db0b2e796da76bb05b73bf3cc241490ec9c2b741b41d33058581 languageName: node linkType: hard @@ -7057,16 +7212,16 @@ browserlist@latest: languageName: node linkType: hard -"sass@npm:1.69.4": - version: 1.69.4 - resolution: "sass@npm:1.69.4" +"sass@npm:1.72.0": + version: 1.72.0 + resolution: "sass@npm:1.72.0" dependencies: chokidar: ">=3.0.0 <4.0.0" immutable: ^4.0.0 source-map-js: ">=0.6.2 <2.0.0" bin: sass: sass.js - checksum: ed5558445b59dfa711e094f804e4a360544dd916c069ee211e1c687446146721d91d4304f33fe5df3966a10de96eba43369beb2e14f0881c285424e5e44cf360 + checksum: f420079c7d51660b7256ee52463c1499ede36f7fd5c8ef50c687451777ad641509001454dea45244073cedd7c00e7a3bc1c362e55206ac6686171b994edb41e4 languageName: node linkType: hard @@ -7097,6 +7252,13 @@ browserlist@latest: languageName: node linkType: hard +"seemly@npm:^0.3.8": + version: 0.3.8 + resolution: "seemly@npm:0.3.8" + checksum: 98171fd4d9e3a03f49f695885499883c85cc00b8d88bc4a12576d5069b46ebe269d2dfc58a7e6cec8887bf2b2511d074376eb837c14b476918f9a8706ed5977a + languageName: node + linkType: hard + "select2-bootstrap-5-theme@npm:1.3.0": version: 1.3.0 resolution: "select2-bootstrap-5-theme@npm:1.3.0" @@ -7151,7 +7313,7 @@ browserlist@latest: languageName: node linkType: hard -"semver@npm:^7.5.2, semver@npm:^7.5.4": +"semver@npm:^7.5.2": version: 7.5.4 resolution: "semver@npm:7.5.4" dependencies: @@ -7173,6 +7335,17 @@ browserlist@latest: languageName: node linkType: hard +"semver@npm:^7.6.0": + version: 7.6.0 + resolution: "semver@npm:7.6.0" + dependencies: + lru-cache: ^6.0.0 + bin: + semver: bin/semver.js + checksum: 7427f05b70786c696640edc29fdd4bc33b2acf3bbe1740b955029044f80575fc664e1a512e4113c3af21e767154a94b4aa214bf6cd6e42a1f6dba5914e0b208c + languageName: node + linkType: hard + "send@npm:0.18.0": version: 0.18.0 resolution: "send@npm:0.18.0" @@ -7201,6 +7374,29 @@ browserlist@latest: languageName: node linkType: hard +"set-function-length@npm:^1.1.1": + version: 1.1.1 + resolution: "set-function-length@npm:1.1.1" + dependencies: + define-data-property: ^1.1.1 + get-intrinsic: ^1.2.1 + gopd: ^1.0.1 + has-property-descriptors: ^1.0.0 + checksum: c131d7569cd7e110cafdfbfbb0557249b538477624dfac4fc18c376d879672fa52563b74029ca01f8f4583a8acb35bb1e873d573a24edb80d978a7ee607c6e06 + languageName: node + linkType: hard + +"set-function-name@npm:^2.0.0": + version: 2.0.1 + resolution: "set-function-name@npm:2.0.1" + dependencies: + define-data-property: ^1.0.1 + functions-have-names: ^1.2.3 + has-property-descriptors: ^1.0.0 + checksum: 4975d17d90c40168eee2c7c9c59d023429f0a1690a89d75656306481ece0c3c1fb1ebcc0150ea546d1913e35fbd037bace91372c69e543e51fc5d1f31a9fa126 + languageName: node + linkType: hard + "setprototypeof@npm:1.2.0": version: 1.2.0 resolution: "setprototypeof@npm:1.2.0" @@ -7245,7 +7441,7 @@ browserlist@latest: languageName: node linkType: hard -"signal-exit@npm:^3.0.2, signal-exit@npm:^3.0.7": +"signal-exit@npm:^3.0.7": version: 3.0.7 resolution: "signal-exit@npm:3.0.7" checksum: a2f098f247adc367dffc27845853e9959b9e88b01cb301658cfe4194352d8d2bb32e18467c786a7fe15f1d44b233ea35633d076d5e737870b7139949d1ab6318 @@ -7301,10 +7497,10 @@ browserlist@latest: languageName: node linkType: hard -"sortablejs@npm:1.15.0": - version: 1.15.0 - resolution: "sortablejs@npm:1.15.0" - checksum: bb82223a663484640d317cad510ac987f26b7a443631040407224de1be069afcc6c39048b6d8527f10f269e33595e8128d7de2fac23517c8260470f77f932d55 +"sortablejs@npm:1.15.2": + version: 1.15.2 + resolution: "sortablejs@npm:1.15.2" + checksum: 36b20b144ff5fd2d078aed0eba3349aaef5691e4830ba9a28d69ca023d4583ca15e5eacb3c09c1d9924675388400d1219def1121e514badfb0f41463cc844da7 languageName: node linkType: hard @@ -7381,36 +7577,36 @@ browserlist@latest: languageName: node linkType: hard -"string.prototype.trim@npm:^1.2.7": - version: 1.2.7 - resolution: "string.prototype.trim@npm:1.2.7" +"string.prototype.trim@npm:^1.2.8": + version: 1.2.8 + resolution: "string.prototype.trim@npm:1.2.8" dependencies: call-bind: ^1.0.2 - define-properties: ^1.1.4 - es-abstract: ^1.20.4 - checksum: 05b7b2d6af63648e70e44c4a8d10d8cc457536df78b55b9d6230918bde75c5987f6b8604438c4c8652eb55e4fc9725d2912789eb4ec457d6995f3495af190c09 + define-properties: ^1.2.0 + es-abstract: ^1.22.1 + checksum: 49eb1a862a53aba73c3fb6c2a53f5463173cb1f4512374b623bcd6b43ad49dd559a06fb5789bdec771a40fc4d2a564411c0a75d35fb27e76bbe738c211ecff07 languageName: node linkType: hard -"string.prototype.trimend@npm:^1.0.6": - version: 1.0.6 - resolution: "string.prototype.trimend@npm:1.0.6" +"string.prototype.trimend@npm:^1.0.7": + version: 1.0.7 + resolution: "string.prototype.trimend@npm:1.0.7" dependencies: call-bind: ^1.0.2 - define-properties: ^1.1.4 - es-abstract: ^1.20.4 - checksum: 0fdc34645a639bd35179b5a08227a353b88dc089adf438f46be8a7c197fc3f22f8514c1c9be4629b3cd29c281582730a8cbbad6466c60f76b5f99cf2addb132e + define-properties: ^1.2.0 + es-abstract: ^1.22.1 + checksum: 2375516272fd1ba75992f4c4aa88a7b5f3c7a9ca308d963bcd5645adf689eba6f8a04ebab80c33e30ec0aefc6554181a3a8416015c38da0aa118e60ec896310c languageName: node linkType: hard -"string.prototype.trimstart@npm:^1.0.6": - version: 1.0.6 - resolution: "string.prototype.trimstart@npm:1.0.6" +"string.prototype.trimstart@npm:^1.0.7": + version: 1.0.7 + resolution: "string.prototype.trimstart@npm:1.0.7" dependencies: call-bind: ^1.0.2 - define-properties: ^1.1.4 - es-abstract: ^1.20.4 - checksum: 89080feef416621e6ef1279588994305477a7a91648d9436490d56010a1f7adc39167cddac7ce0b9884b8cdbef086987c4dcb2960209f2af8bac0d23ceff4f41 + define-properties: ^1.2.0 + es-abstract: ^1.22.1 + checksum: 13d0c2cb0d5ff9e926fa0bec559158b062eed2b68cd5be777ffba782c96b2b492944e47057274e064549b94dd27cf81f48b27a31fee8af5b574cff253e7eb613 languageName: node linkType: hard @@ -7580,15 +7776,15 @@ browserlist@latest: languageName: node linkType: hard -"tsconfig-paths@npm:^3.14.2": - version: 3.14.2 - resolution: "tsconfig-paths@npm:3.14.2" +"tsconfig-paths@npm:^3.15.0": + version: 3.15.0 + resolution: "tsconfig-paths@npm:3.15.0" dependencies: "@types/json5": ^0.0.29 json5: ^1.0.2 minimist: ^1.2.6 strip-bom: ^3.0.0 - checksum: a6162eaa1aed680537f93621b82399c7856afd10ec299867b13a0675e981acac4e0ec00896860480efc59fc10fd0b16fdc928c0b885865b52be62cadac692447 + checksum: 59f35407a390d9482b320451f52a411a256a130ff0e7543d18c6f20afab29ac19fbe55c360a93d6476213cc335a4d76ce90f67df54c4e9037f7d240920832201 languageName: node linkType: hard @@ -7744,9 +7940,9 @@ browserlist@latest: languageName: node linkType: hard -"vite@npm:4.4.11": - version: 4.4.11 - resolution: "vite@npm:4.4.11" +"vite@npm:4.5.3": + version: 4.5.3 + resolution: "vite@npm:4.5.3" dependencies: esbuild: ^0.18.10 fsevents: ~2.3.2 @@ -7780,7 +7976,7 @@ browserlist@latest: optional: true bin: vite: bin/vite.js - checksum: c22145c8385343a629cd546054b9da6eee60327540102bdfd1ad897fd2e78e0763ce6a18a9d84fdefde9da8fd2427d3bec9eb2697b47cf4068c7b4b52f7e3e6a + checksum: fd3f512ce48ca2a1fe60ad0376283b832de9272725fdbc65064ae9248f792de87b0f27a89573115e23e26784800daca329f8a9234d298ba6f60e808a9c63883c languageName: node linkType: hard @@ -7791,6 +7987,36 @@ browserlist@latest: languageName: node linkType: hard +"volar-service-html@npm:0.0.34": + version: 0.0.34 + resolution: "volar-service-html@npm:0.0.34" + dependencies: + vscode-html-languageservice: ^5.1.0 + vscode-languageserver-textdocument: ^1.0.11 + vscode-uri: ^3.0.8 + peerDependencies: + "@volar/language-service": ~2.1.0 + peerDependenciesMeta: + "@volar/language-service": + optional: true + checksum: 83b50cd805680c77b5632e9534b23cddb85bf7e0cd425624d474981d173ddf07a66fcce6348f675c9d5c2551df9ae1e58206c2ed1c32052f8a70940fb7f5fe50 + languageName: node + linkType: hard + +"volar-service-pug@npm:0.0.34": + version: 0.0.34 + resolution: "volar-service-pug@npm:0.0.34" + dependencies: + "@volar/language-service": ~2.1.0 + pug-lexer: ^5.0.1 + pug-parser: ^6.0.0 + volar-service-html: 0.0.34 + vscode-html-languageservice: ^5.1.0 + vscode-languageserver-textdocument: ^1.0.11 + checksum: 4691aa1c8ea9039e1b5ce4218445309575c2cb4bc08ad5341a8af6f0db1a60711f26cc905e124c3485cc780eb58b895332fbb6a2ccf427a9d0e08012f2c5ad4a + languageName: node + linkType: hard + "vooks@npm:^0.2.12, vooks@npm:^0.2.4": version: 0.2.12 resolution: "vooks@npm:0.2.12" @@ -7802,6 +8028,56 @@ browserlist@latest: languageName: node linkType: hard +"vscode-html-languageservice@npm:^5.1.0": + version: 5.1.2 + resolution: "vscode-html-languageservice@npm:5.1.2" + dependencies: + "@vscode/l10n": ^0.0.18 + vscode-languageserver-textdocument: ^1.0.11 + vscode-languageserver-types: ^3.17.5 + vscode-uri: ^3.0.8 + checksum: 3a2a5ee5ad4ea429e85f4fb8f45da5b47d50541784d703fc9ccd009f68426034a48be6c04f8c420dc7236de07df93ccc28873da3395db5f5626fe169f18f1ac6 + languageName: node + linkType: hard + +"vscode-jsonrpc@npm:8.2.0": + version: 8.2.0 + resolution: "vscode-jsonrpc@npm:8.2.0" + checksum: f302a01e59272adc1ae6494581fa31c15499f9278df76366e3b97b2236c7c53ebfc71efbace9041cfd2caa7f91675b9e56f2407871a1b3c7f760a2e2ee61484a + languageName: node + linkType: hard + +"vscode-languageserver-protocol@npm:^3.17.5": + version: 3.17.5 + resolution: "vscode-languageserver-protocol@npm:3.17.5" + dependencies: + vscode-jsonrpc: 8.2.0 + vscode-languageserver-types: 3.17.5 + checksum: dfb42d276df5dfea728267885b99872ecff62f6c20448b8539fae71bb196b420f5351c5aca7c1047bf8fb1f89fa94a961dce2bc5bf7e726198f4be0bb86a1e71 + languageName: node + linkType: hard + +"vscode-languageserver-textdocument@npm:^1.0.11": + version: 1.0.11 + resolution: "vscode-languageserver-textdocument@npm:1.0.11" + checksum: ea7cdc9d4ffaae5952071fa11d17d714215a76444e6936c9359f94b9ba3222a52a55edb5bd5928bd3e9712b900a9f175bb3565ec1c8923234fe3bd327584bafb + languageName: node + linkType: hard + +"vscode-languageserver-types@npm:3.17.5, vscode-languageserver-types@npm:^3.17.5": + version: 3.17.5 + resolution: "vscode-languageserver-types@npm:3.17.5" + checksum: 79b420e7576398d396579ca3a461c9ed70e78db4403cd28bbdf4d3ed2b66a2b4114031172e51fad49f0baa60a2180132d7cb2ea35aa3157d7af3c325528210ac + languageName: node + linkType: hard + +"vscode-uri@npm:^3.0.8": + version: 3.0.8 + resolution: "vscode-uri@npm:3.0.8" + checksum: 514249126850c0a41a7d8c3c2836cab35983b9dc1938b903cfa253b9e33974c1416d62a00111385adcfa2b98df456437ab704f709a2ecca76a90134ef5eb4832 + languageName: node + linkType: hard + "vue-demi@npm:>=0.14.5": version: 0.14.5 resolution: "vue-demi@npm:0.14.5" @@ -7834,9 +8110,9 @@ browserlist@latest: languageName: node linkType: hard -"vue-eslint-parser@npm:^9.3.1": - version: 9.3.1 - resolution: "vue-eslint-parser@npm:9.3.1" +"vue-eslint-parser@npm:^9.4.2": + version: 9.4.2 + resolution: "vue-eslint-parser@npm:9.4.2" dependencies: debug: ^4.3.4 eslint-scope: ^7.1.1 @@ -7847,37 +8123,42 @@ browserlist@latest: semver: ^7.3.6 peerDependencies: eslint: ">=6.0.0" - checksum: 6d1476b45fcc5b456a1e5c0f33ec695cf1d392ca6113250d5e3441e6cf3b2a0ec28a9455699363641dfb7c48358f215db07856c98385a31ace9bc58196f4156e + checksum: 67f14c8ea19b578077a878864a5ec438ab4c597381923c9814fac39b3772da8654ac2a543467b5880607f694131f8ff34b87bd24c10bbc5f99fa2fcac49ff2e6 languageName: node linkType: hard -"vue-router@npm:4.2.5": - version: 4.2.5 - resolution: "vue-router@npm:4.2.5" +"vue-router@npm:4.3.0": + version: 4.3.0 + resolution: "vue-router@npm:4.3.0" dependencies: - "@vue/devtools-api": ^6.5.0 + "@vue/devtools-api": ^6.5.1 peerDependencies: vue: ^3.2.0 - checksum: 2449db4f3a1b3f0ccd16a3788000e47f0e26ca7035b6adf48ebd51d189eb2bad6c39664476cfad9c2ca22988032f5190c99970718495aa2a5c5595d50c8f71b9 + checksum: 0059261d39c8a6f61d3cdf4b74cfcd6a109062e0562f2db5a387cdf4d1b186dfdd2dddcacbf83ce2842d7c3ec9a63d8a6d427c4cec1db61372f4a06048496354 languageName: node linkType: hard -"vue@npm:3.3.4": - version: 3.3.4 - resolution: "vue@npm:3.3.4" +"vue@npm:3.4.21": + version: 3.4.21 + resolution: "vue@npm:3.4.21" dependencies: - "@vue/compiler-dom": 3.3.4 - "@vue/compiler-sfc": 3.3.4 - "@vue/runtime-dom": 3.3.4 - "@vue/server-renderer": 3.3.4 - "@vue/shared": 3.3.4 - checksum: 58b6c62a66a375ce5df460fcb7ba41b37c8637c635faf06ef472ae4197f412cf9ad83586cd8e3f66c486404fbe8550e694f90ff724a571d1ba78830791099c59 + "@vue/compiler-dom": 3.4.21 + "@vue/compiler-sfc": 3.4.21 + "@vue/runtime-dom": 3.4.21 + "@vue/server-renderer": 3.4.21 + "@vue/shared": 3.4.21 + peerDependencies: + typescript: "*" + peerDependenciesMeta: + typescript: + optional: true + checksum: 3c477982a0a9aadfa512eb625b67f35809f123e98a268ace52e3ee738b23a9b8d9461cfc1f2b314fb098047ab3aab50f8beea657a2d3ebe5aae0e02aa4f903d2 languageName: node linkType: hard -"vueuc@npm:^0.4.51": - version: 0.4.51 - resolution: "vueuc@npm:0.4.51" +"vueuc@npm:^0.4.58": + version: 0.4.58 + resolution: "vueuc@npm:0.4.58" dependencies: "@css-render/vue3-ssr": ^0.15.10 "@juggle/resize-observer": ^3.3.1 @@ -7888,7 +8169,7 @@ browserlist@latest: vooks: ^0.2.4 peerDependencies: vue: ^3.0.11 - checksum: 7969659facd8a1f7b7676bf5629d00a4830a648c279f3632c711ceedf8f7d7f6c8f61546a2dec74e2f2ff91e9185dcd76136bef7ae6eeac38bb1c6129840cfac + checksum: fb0b9a69be553ccbdc314eec22433d99022ef065d6e6add4b1177ebada6de6d05b4ece36af4ee37a750687215ec966880c17d6b6dd2d0ea38a7958f584da74b9 languageName: node linkType: hard @@ -7912,30 +8193,16 @@ browserlist@latest: languageName: node linkType: hard -"which-typed-array@npm:^1.1.10": - version: 1.1.11 - resolution: "which-typed-array@npm:1.1.11" - dependencies: - available-typed-arrays: ^1.0.5 - call-bind: ^1.0.2 - for-each: ^0.3.3 - gopd: ^1.0.1 - has-tostringtag: ^1.0.0 - checksum: 711ffc8ef891ca6597b19539075ec3e08bb9b4c2ca1f78887e3c07a977ab91ac1421940505a197758fb5939aa9524976d0a5bbcac34d07ed6faa75cedbb17206 - languageName: node - linkType: hard - -"which-typed-array@npm:^1.1.9": - version: 1.1.9 - resolution: "which-typed-array@npm:1.1.9" +"which-typed-array@npm:^1.1.11, which-typed-array@npm:^1.1.13": + version: 1.1.13 + resolution: "which-typed-array@npm:1.1.13" dependencies: available-typed-arrays: ^1.0.5 - call-bind: ^1.0.2 + call-bind: ^1.0.4 for-each: ^0.3.3 gopd: ^1.0.1 has-tostringtag: ^1.0.0 - is-typed-array: ^1.1.10 - checksum: fe0178ca44c57699ca2c0e657b64eaa8d2db2372a4e2851184f568f98c478ae3dc3fdb5f7e46c384487046b0cf9e23241423242b277e03e8ba3dabc7c84c98ef + checksum: 3828a0d5d72c800e369d447e54c7620742a4cc0c9baf1b5e8c17e9b6ff90d8d861a3a6dd4800f1953dbf80e5e5cec954a289e5b4a223e3bee4aeb1f8c5f33309 languageName: node linkType: hard