diff --git a/.flox/.gitignore b/.flox/.gitignore
new file mode 100644
index 0000000000..15d71a169f
--- /dev/null
+++ b/.flox/.gitignore
@@ -0,0 +1,4 @@
+run/
+cache/
+lib/
+log/
diff --git a/.flox/env.json b/.flox/env.json
new file mode 100644
index 0000000000..91fcc84944
--- /dev/null
+++ b/.flox/env.json
@@ -0,0 +1,4 @@
+{
+ "name": "axe-core-daisy",
+ "version": 1
+}
diff --git a/.flox/env/manifest.lock b/.flox/env/manifest.lock
new file mode 100644
index 0000000000..d9470fac54
--- /dev/null
+++ b/.flox/env/manifest.lock
@@ -0,0 +1,154 @@
+{
+ "lockfile-version": 1,
+ "manifest": {
+ "schema-version": "1.11.0",
+ "install": {
+ "nodejs_24": {
+ "pkg-path": "nodejs_24",
+ "outputs": "all"
+ }
+ },
+ "vars": {
+ "CLICOLOR": "1",
+ "FLOX_DISABLE_METRICS": "true",
+ "FLX_VERBOSE": "${FLOX_VERBOSE:-}"
+ },
+ "hook": {
+ "on-activate": "[[ ! -z \"${FLX_VERBOSE}\" ]] && echo \"##################################################\";\n[[ ! -z \"${FLX_VERBOSE}\" ]] && echo \"########## FLOX ACTIVATE...\";\n\n# alias lss='ls -alshF --color=auto'\nlss () {\n command ls -alshF --color=auto \"$@\"\n}\n\n# PWD=$(pwd);\n# [[ ! -z \"${FLX_VERBOSE}\" ]] && echo \"-----> PWD: ${PWD}\";\n# unset PWD\n\n#FLOX_ENV_PROJECT\n#_FLOX_ENV_LOG_DIR\n#FLOX_CACHE_DIR=\"$(dirname ${FLOX_ENV_CACHE})\"\n#FLOX_CACHE_DIR=\"$(realpath ${FLOX_ENV_CACHE})\"\nFLOX_CACHE_DIR=\"${FLOX_ENV_CACHE}\"\n[[ ! -z \"${FLX_VERBOSE}\" ]] && echo \"-----> FLOX_CACHE_DIR: ${FLOX_CACHE_DIR}\";\n\nif [[ ! -z \"${FLX_VERBOSE}\" ]]; then\n\necho \"=====> NODE VERSION: $(node --version)\";\nWHICH_NODE=$(which node);\necho \".....> NODE WHICH: ${WHICH_NODE}\";\nlss \"${WHICH_NODE}\";\nunset WHICH_NODE\n\necho \"=====> NPM VERSION (init): $(npm --version)\";\nWHICH_NPM=$(which npm);\necho \".....> NPM WHICH: ${WHICH_NPM}\";\nlss \"${WHICH_NPM}\";\nunset WHICH_NPM\n\necho \"-----> NPM CONFIG PREFIX (init): $(npm config get prefix)\";\necho \"-----> NPM_CONFIG_PREFIX (init): ${NPM_CONFIG_PREFIX}\";\n\necho \"-----> NPM CONFIG CACHE (init): $(npm config get cache)\"\necho \"-----> NPM_CONFIG_CACHE (init): ${NPM_CONFIG_CACHE}\";\n\nfi\n\n# envName=\"${_FLOX_ACTIVE_ENVIRONMENTS##:*}\";\n#echo \"${envName}\";\n#export NPM_CONFIG_PREFIX=\"/tmp/${envName}-npm\";\n\nexport NPM_CONFIG_PREFIX=\"${FLOX_CACHE_DIR}/NPM_PREFIX\";\nrm -rf \"$NPM_CONFIG_PREFIX\";\nmkdir -p \"$NPM_CONFIG_PREFIX\";\nif [[ ! -z \"${FLX_VERBOSE}\" ]]; then\necho \"-----> NPM CONFIG PREFIX: $(npm config get prefix)\";\necho \"-----> NPM_CONFIG_PREFIX: ${NPM_CONFIG_PREFIX}\";\nlss \"$NPM_CONFIG_PREFIX\";\nlss \"$NPM_CONFIG_PREFIX/bin\";\nlss \"$NPM_CONFIG_PREFIX/lib\";\nlss \"$NPM_CONFIG_PREFIX/lib/node_modules\";\nfi\n\nexport NPM_CONFIG_CACHE=\"${FLOX_CACHE_DIR}/NPM_CACHE\";\nrm -rf \"$NPM_CONFIG_CACHE\";\nmkdir -p \"$NPM_CONFIG_CACHE\";\nif [[ ! -z \"${FLX_VERBOSE}\" ]]; then\necho \"-----> NPM CONFIG CACHE: $(npm config get cache)\"\necho \"-----> NPM_CONFIG_CACHE: ${NPM_CONFIG_CACHE}\";\nlss \"$NPM_CONFIG_CACHE\";\nfi\n\nunset FLOX_CACHE_DIR\n\n[[ ! -z \"${FLX_VERBOSE}\" ]] && echo \"-----> NODE_PATH (init): ${NODE_PATH}\";\nexport NODE_PATH=\"$NPM_CONFIG_PREFIX/lib/node_modules${NODE_PATH:+:$NODE_PATH}\";\n[[ ! -z \"${FLX_VERBOSE}\" ]] && echo \"-----> NODE_PATH: ${NODE_PATH}\";\n\nexport PATH=\"$NPM_CONFIG_PREFIX/bin:$PATH\";\n\nnpm install --ignore-scripts --foreground-scripts -g sfw\n\n[[ ! -z \"${FLX_VERBOSE}\" ]] && ls -alshFR --color=auto \"$NPM_CONFIG_PREFIX/lib/node_modules/sfw/.sfw-cache\"\n\nsfw npm install -g npm --foreground-scripts 1>/dev/null\n#sfw npm install -g npm-check-updates --foreground-scripts 1>/dev/null\n\n[[ ! -z \"${FLX_VERBOSE}\" ]] && ls -alshFR --color=auto \"$NPM_CONFIG_PREFIX/lib/node_modules/sfw/.sfw-cache\"\n"
+ },
+ "profile": {
+ "common": "[[ ! -z \"${FLX_VERBOSE}\" ]] && echo \"##################################################\";\n[[ ! -z \"${FLX_VERBOSE}\" ]] && echo \"########## FLOX PROFILE COMMON (${SHELL})...\";\n\nexport PATH=\"$NPM_CONFIG_PREFIX/bin:$PATH\";\n\nalias lss='ls -alshF --color=auto'\nalias flx='flox activate'\nalias flxx='FLOX_VERBOSE=1 flox activate'\n\n# PWD=$(pwd);\n# [[ ! -z \"${FLX_VERBOSE}\" ]] && echo \"-----> PWD: ${PWD}\";\n# unset PWD\n\nif [[ ! -z \"${FLX_VERBOSE}\" ]]; then\n\necho \"=====> NODE VERSION: $(node --version)\";\nWHICH_NODE=$(which node);\necho \".....> NODE WHICH: ${WHICH_NODE}\";\nlss \"${WHICH_NODE}\";\nunset WHICH_NODE\n\necho \"=====> NPM VERSION: $(npm --version)\";\nWHICH_NPM=$(which npm);\necho \".....> NPM WHICH: ${WHICH_NPM}\";\nlss \"${WHICH_NPM}\";\nunset WHICH_NPM\n\necho \"-----> NPM CONFIG PREFIX: $(npm config get prefix)\";\necho \"-----> NPM_CONFIG_PREFIX: ${NPM_CONFIG_PREFIX}\";\n\necho \"-----> NPM CONFIG CACHE: $(npm config get cache)\"\necho \"-----> NPM_CONFIG_CACHE: ${NPM_CONFIG_CACHE}\";\n\necho \"=====> GIT VERSION: $(git --version)\";\nWHICH_GIT=$(which git);\necho \".....> GIT WHICH: ${WHICH_GIT}\";\nlss \"${WHICH_GIT}\";\nunset WHICH_GIT\n\nENV=$(env);\necho \"-----> ENV: ${ENV}\";\necho \"||||||||||||||||||||||||||\";\n\necho \"-----> DEBUG: ${DEBUG}\";\n\necho \"-----> USER: ${USER}\";\necho \"-----> USERNAME: ${USERNAME}\";\n\necho \"-----> NODE_ENV: ${NODE_ENV}\";\n\ngit branch\ngit status\n\nfi\n"
+ },
+ "options": {
+ "systems": [
+ "aarch64-darwin",
+ "aarch64-linux",
+ "x86_64-darwin",
+ "x86_64-linux"
+ ]
+ }
+ },
+ "packages": [
+ {
+ "attr_path": "nodejs_24",
+ "broken": false,
+ "derivation": "/nix/store/g2yfx00n1lp9i8ka7nbl11fawc0yfm7n-nodejs-24.14.0.drv",
+ "description": "Event-driven I/O framework for the V8 JavaScript engine",
+ "install_id": "nodejs_24",
+ "license": "MIT",
+ "locked_url": "https://github.com/flox/nixpkgs?rev=68d8aa3d661f0e6bd5862291b5bb263b2a6595c9",
+ "name": "nodejs-24.14.0",
+ "pname": "nodejs_24",
+ "rev": "68d8aa3d661f0e6bd5862291b5bb263b2a6595c9",
+ "rev_count": 975402,
+ "rev_date": "2026-04-05T21:03:29Z",
+ "scrape_date": "2026-04-07T06:03:56.007227Z",
+ "stabilities": [
+ "unstable"
+ ],
+ "unfree": false,
+ "version": "24.14.0",
+ "outputs_to_install": [
+ "out",
+ "out",
+ "out"
+ ],
+ "outputs": {
+ "out": "/nix/store/d5hw52iialz64hvfzfp9qm1gr2fp43kx-nodejs-24.14.0"
+ },
+ "system": "aarch64-darwin",
+ "group": "toplevel",
+ "priority": 5
+ },
+ {
+ "attr_path": "nodejs_24",
+ "broken": false,
+ "derivation": "/nix/store/jj8rw2gnisil7dw10xi2qdinad9jqp7s-nodejs-24.14.0.drv",
+ "description": "Event-driven I/O framework for the V8 JavaScript engine",
+ "install_id": "nodejs_24",
+ "license": "MIT",
+ "locked_url": "https://github.com/flox/nixpkgs?rev=68d8aa3d661f0e6bd5862291b5bb263b2a6595c9",
+ "name": "nodejs-24.14.0",
+ "pname": "nodejs_24",
+ "rev": "68d8aa3d661f0e6bd5862291b5bb263b2a6595c9",
+ "rev_count": 975402,
+ "rev_date": "2026-04-05T21:03:29Z",
+ "scrape_date": "2026-04-07T06:35:37.585271Z",
+ "stabilities": [
+ "unstable"
+ ],
+ "unfree": false,
+ "version": "24.14.0",
+ "outputs_to_install": [
+ "out",
+ "out"
+ ],
+ "outputs": {
+ "out": "/nix/store/vs8zaq8fvbbjqfg4bzn424kn55sj7s7y-nodejs-24.14.0"
+ },
+ "system": "aarch64-linux",
+ "group": "toplevel",
+ "priority": 5
+ },
+ {
+ "attr_path": "nodejs_24",
+ "broken": false,
+ "derivation": "/nix/store/l9ksrfa960964gqf23yfr7a66a2n18c1-nodejs-24.14.0.drv",
+ "description": "Event-driven I/O framework for the V8 JavaScript engine",
+ "install_id": "nodejs_24",
+ "license": "MIT",
+ "locked_url": "https://github.com/flox/nixpkgs?rev=68d8aa3d661f0e6bd5862291b5bb263b2a6595c9",
+ "name": "nodejs-24.14.0",
+ "pname": "nodejs_24",
+ "rev": "68d8aa3d661f0e6bd5862291b5bb263b2a6595c9",
+ "rev_count": 975402,
+ "rev_date": "2026-04-05T21:03:29Z",
+ "scrape_date": "2026-04-07T07:05:24.161249Z",
+ "stabilities": [
+ "unstable"
+ ],
+ "unfree": false,
+ "version": "24.14.0",
+ "outputs_to_install": [
+ "out",
+ "out"
+ ],
+ "outputs": {
+ "out": "/nix/store/70ypbaj273w2kk0nrs4vijizyqgbg48v-nodejs-24.14.0"
+ },
+ "system": "x86_64-darwin",
+ "group": "toplevel",
+ "priority": 5
+ },
+ {
+ "attr_path": "nodejs_24",
+ "broken": false,
+ "derivation": "/nix/store/15s1kk2n6vg180wg04wj2wgnl19f03v7-nodejs-24.14.0.drv",
+ "description": "Event-driven I/O framework for the V8 JavaScript engine",
+ "install_id": "nodejs_24",
+ "license": "MIT",
+ "locked_url": "https://github.com/flox/nixpkgs?rev=68d8aa3d661f0e6bd5862291b5bb263b2a6595c9",
+ "name": "nodejs-24.14.0",
+ "pname": "nodejs_24",
+ "rev": "68d8aa3d661f0e6bd5862291b5bb263b2a6595c9",
+ "rev_count": 975402,
+ "rev_date": "2026-04-05T21:03:29Z",
+ "scrape_date": "2026-04-07T07:39:46.274930Z",
+ "stabilities": [
+ "unstable"
+ ],
+ "unfree": false,
+ "version": "24.14.0",
+ "outputs_to_install": [
+ "out",
+ "out"
+ ],
+ "outputs": {
+ "out": "/nix/store/v2210ar51jddnrcx46m53ivv9palc26m-nodejs-24.14.0"
+ },
+ "system": "x86_64-linux",
+ "group": "toplevel",
+ "priority": 5
+ }
+ ]
+}
diff --git a/.flox/env/manifest.toml b/.flox/env/manifest.toml
new file mode 100644
index 0000000000..7d36beac93
--- /dev/null
+++ b/.flox/env/manifest.toml
@@ -0,0 +1,203 @@
+schema-version = "1.11.0"
+
+# flox config --set-bool set_prompt false
+# ==> .zshrc
+# Warp-ify Flox subshell prompt:
+# function git_branch_name()
+# {
+# branch=$({ git symbolic-ref -q HEAD || git name-rev --name-only --no-undefined --always HEAD; } 2>/dev/null | sed 's/refs\/heads\///')
+# if [[ $branch == "" ]];
+# then
+# :
+# else
+# #echo "[$branch]"
+# echo '('$branch') '
+# fi
+# }
+#
+# function flox_prompt()
+# {
+# FLX_ENVS="${FLOX_PROMPT_ENVIRONMENTS:-}"
+# if [[ -z "${FLX_ENVS}" ]];
+# then
+# :
+# else
+# echo 'FLX ['$FLX_ENVS'] '
+# fi
+# }
+# setopt prompt_subst
+# autoload -U colors && colors
+# export PS1='%{$fg[red]%}$(flox_prompt)%{$reset_color%}%n@%m %{$fg[green]%}%~%{$reset_color%} %{$fg[magenta]%}$(git_branch_name)%{$reset_color%}%# '
+# PROMPT="${PROMPT}"$'\n'
+# printf '\eP$f{"hook": "SourcedRcFileForWarp", "value": { "shell": "zsh" }}\x9c'
+
+[install]
+nodejs_24.pkg-path = "nodejs_24"
+nodejs_24.outputs = "all"
+
+[vars]
+FLOX_DISABLE_METRICS="true"
+CLICOLOR="1"
+
+#set -euo pipefail (set -u in particular ==> undefined vars early exit)
+# FLX_VERBOSE becomes fallback when FLOX_VERBOSE is empty or unset
+FLX_VERBOSE="${FLOX_VERBOSE:-}"
+# FLX_VERBOSE becomes fallback only when FLOX_VERBOSE is unset, not empty
+# FLX_VERBOSE="${FLOX_VERBOSE-}"
+
+# if [[ ! -v FLX_VERBOSE ]]; then
+# # unset
+# elif [[ -z "${FLX_VERBOSE}" ]]; then
+# # empty
+# else
+# neither unset nor empty
+# fi
+
+[hook]
+on-activate = '''
+[[ ! -z "${FLX_VERBOSE}" ]] && echo "##################################################";
+[[ ! -z "${FLX_VERBOSE}" ]] && echo "########## FLOX ACTIVATE...";
+
+# alias lss='ls -alshF --color=auto'
+lss () {
+ command ls -alshF --color=auto "$@"
+}
+
+# PWD=$(pwd);
+# [[ ! -z "${FLX_VERBOSE}" ]] && echo "-----> PWD: ${PWD}";
+# unset PWD
+
+#FLOX_ENV_PROJECT
+#_FLOX_ENV_LOG_DIR
+#FLOX_CACHE_DIR="$(dirname ${FLOX_ENV_CACHE})"
+#FLOX_CACHE_DIR="$(realpath ${FLOX_ENV_CACHE})"
+FLOX_CACHE_DIR="${FLOX_ENV_CACHE}"
+[[ ! -z "${FLX_VERBOSE}" ]] && echo "-----> FLOX_CACHE_DIR: ${FLOX_CACHE_DIR}";
+
+if [[ ! -z "${FLX_VERBOSE}" ]]; then
+
+echo "=====> NODE VERSION: $(node --version)";
+WHICH_NODE=$(which node);
+echo ".....> NODE WHICH: ${WHICH_NODE}";
+lss "${WHICH_NODE}";
+unset WHICH_NODE
+
+echo "=====> NPM VERSION (init): $(npm --version)";
+WHICH_NPM=$(which npm);
+echo ".....> NPM WHICH: ${WHICH_NPM}";
+lss "${WHICH_NPM}";
+unset WHICH_NPM
+
+echo "-----> NPM CONFIG PREFIX (init): $(npm config get prefix)";
+echo "-----> NPM_CONFIG_PREFIX (init): ${NPM_CONFIG_PREFIX}";
+
+echo "-----> NPM CONFIG CACHE (init): $(npm config get cache)"
+echo "-----> NPM_CONFIG_CACHE (init): ${NPM_CONFIG_CACHE}";
+
+fi
+
+# envName="${_FLOX_ACTIVE_ENVIRONMENTS##:*}";
+#echo "${envName}";
+#export NPM_CONFIG_PREFIX="/tmp/${envName}-npm";
+
+export NPM_CONFIG_PREFIX="${FLOX_CACHE_DIR}/NPM_PREFIX";
+rm -rf "$NPM_CONFIG_PREFIX";
+mkdir -p "$NPM_CONFIG_PREFIX";
+if [[ ! -z "${FLX_VERBOSE}" ]]; then
+echo "-----> NPM CONFIG PREFIX: $(npm config get prefix)";
+echo "-----> NPM_CONFIG_PREFIX: ${NPM_CONFIG_PREFIX}";
+lss "$NPM_CONFIG_PREFIX";
+lss "$NPM_CONFIG_PREFIX/bin";
+lss "$NPM_CONFIG_PREFIX/lib";
+lss "$NPM_CONFIG_PREFIX/lib/node_modules";
+fi
+
+export NPM_CONFIG_CACHE="${FLOX_CACHE_DIR}/NPM_CACHE";
+rm -rf "$NPM_CONFIG_CACHE";
+mkdir -p "$NPM_CONFIG_CACHE";
+if [[ ! -z "${FLX_VERBOSE}" ]]; then
+echo "-----> NPM CONFIG CACHE: $(npm config get cache)"
+echo "-----> NPM_CONFIG_CACHE: ${NPM_CONFIG_CACHE}";
+lss "$NPM_CONFIG_CACHE";
+fi
+
+unset FLOX_CACHE_DIR
+
+[[ ! -z "${FLX_VERBOSE}" ]] && echo "-----> NODE_PATH (init): ${NODE_PATH}";
+export NODE_PATH="$NPM_CONFIG_PREFIX/lib/node_modules${NODE_PATH:+:$NODE_PATH}";
+[[ ! -z "${FLX_VERBOSE}" ]] && echo "-----> NODE_PATH: ${NODE_PATH}";
+
+export PATH="$NPM_CONFIG_PREFIX/bin:$PATH";
+
+npm install --ignore-scripts --foreground-scripts -g sfw
+
+[[ ! -z "${FLX_VERBOSE}" ]] && ls -alshFR --color=auto "$NPM_CONFIG_PREFIX/lib/node_modules/sfw/.sfw-cache"
+
+sfw npm install -g npm --foreground-scripts 1>/dev/null
+#sfw npm install -g npm-check-updates --foreground-scripts 1>/dev/null
+
+[[ ! -z "${FLX_VERBOSE}" ]] && ls -alshFR --color=auto "$NPM_CONFIG_PREFIX/lib/node_modules/sfw/.sfw-cache"
+'''
+
+[profile]
+common = '''
+[[ ! -z "${FLX_VERBOSE}" ]] && echo "##################################################";
+[[ ! -z "${FLX_VERBOSE}" ]] && echo "########## FLOX PROFILE COMMON (${SHELL})...";
+
+export PATH="$NPM_CONFIG_PREFIX/bin:$PATH";
+
+alias lss='ls -alshF --color=auto'
+alias flx='flox activate'
+alias flxx='FLOX_VERBOSE=1 flox activate'
+
+# PWD=$(pwd);
+# [[ ! -z "${FLX_VERBOSE}" ]] && echo "-----> PWD: ${PWD}";
+# unset PWD
+
+if [[ ! -z "${FLX_VERBOSE}" ]]; then
+
+echo "=====> NODE VERSION: $(node --version)";
+WHICH_NODE=$(which node);
+echo ".....> NODE WHICH: ${WHICH_NODE}";
+lss "${WHICH_NODE}";
+unset WHICH_NODE
+
+echo "=====> NPM VERSION: $(npm --version)";
+WHICH_NPM=$(which npm);
+echo ".....> NPM WHICH: ${WHICH_NPM}";
+lss "${WHICH_NPM}";
+unset WHICH_NPM
+
+echo "-----> NPM CONFIG PREFIX: $(npm config get prefix)";
+echo "-----> NPM_CONFIG_PREFIX: ${NPM_CONFIG_PREFIX}";
+
+echo "-----> NPM CONFIG CACHE: $(npm config get cache)"
+echo "-----> NPM_CONFIG_CACHE: ${NPM_CONFIG_CACHE}";
+
+echo "=====> GIT VERSION: $(git --version)";
+WHICH_GIT=$(which git);
+echo ".....> GIT WHICH: ${WHICH_GIT}";
+lss "${WHICH_GIT}";
+unset WHICH_GIT
+
+ENV=$(env);
+echo "-----> ENV: ${ENV}";
+echo "||||||||||||||||||||||||||";
+
+echo "-----> DEBUG: ${DEBUG}";
+
+echo "-----> USER: ${USER}";
+echo "-----> USERNAME: ${USERNAME}";
+
+echo "-----> NODE_ENV: ${NODE_ENV}";
+
+git branch
+git status
+
+fi
+'''
+
+[services]
+
+[options]
+systems = ["aarch64-darwin", "aarch64-linux", "x86_64-darwin", "x86_64-linux"]
diff --git a/.github/actions/install-deps/action.yml b/.github/actions/install-deps/action.yml
index b7d220b7f6..1579309452 100644
--- a/.github/actions/install-deps/action.yml
+++ b/.github/actions/install-deps/action.yml
@@ -2,22 +2,22 @@ name: 'Install Dependencies'
description: 'Install OS and Project dependencies'
inputs:
- node-version:
- description: 'Node.js version to install'
- required: false
+ # node-version:
+ # description: 'Node.js version to install'
+ # required: false
start-xvfb:
description: 'If provided, this is the display number to run xvfb on. Should be in `:N` format, e.g., `:99`.'
required: false
- nightly:
- description: 'If true, installs the nightly versions of browsers.'
- required: false
+ # nightly:
+ # description: 'If true, installs the nightly versions of browsers.'
+ # required: false
outputs:
chrome-path:
description: 'Path to the installed Chrome binary'
value: ${{ steps.setup-chrome.outputs.chrome-path }}
- firefox-path:
- description: 'Path to the installed Firefox binary'
- value: ${{ steps.setup-firefox.outputs.firefox-path }}
+ # firefox-path:
+ # description: 'Path to the installed Firefox binary'
+ # value: ${{ steps.setup-firefox.outputs.firefox-path }}
chromedriver-path:
description: 'Path to the installed ChromeDriver binary'
value: ${{ steps.setup-chrome.outputs.chromedriver-path }}
@@ -27,25 +27,25 @@ outputs:
chromedriver-version:
description: 'Version of the installed ChromeDriver binary'
value: ${{ steps.setup-chrome.outputs.chromedriver-version }}
- firefox-version:
- description: 'Version of the installed Firefox binary'
- value: ${{ steps.setup-firefox.outputs.firefox-version }}
+ # firefox-version:
+ # description: 'Version of the installed Firefox binary'
+ # value: ${{ steps.setup-firefox.outputs.firefox-version }}
runs:
using: 'composite'
steps:
- - name: Setup Node.js
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
- with:
- registry-url: 'https://registry.npmjs.org'
- node-version: ${{ inputs.node-version }}
- node-version-file: ${{ inputs.node-version == '' && '.nvmrc' || '' }}
- cache: npm
- - name: Fix Chrome Sandbox Permissions
- shell: bash
- run: |
- sudo chown root:root /opt/google/chrome/chrome-sandbox
- sudo chmod 4755 /opt/google/chrome/chrome-sandbox
+ # - name: Setup Node.js
+ # uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
+ # with:
+ # registry-url: 'https://registry.npmjs.org'
+ # node-version: ${{ inputs.node-version }}
+ # node-version-file: ${{ inputs.node-version == '' && '.nvmrc' || '' }}
+ # cache: npm
+ # - name: Fix Chrome Sandbox Permissions
+ # shell: bash
+ # run: |
+ # sudo chown root:root /opt/google/chrome/chrome-sandbox
+ # sudo chmod 4755 /opt/google/chrome/chrome-sandbox
- name: Install Xvfb
shell: bash
if: ${{ inputs.start-xvfb }}
@@ -59,14 +59,14 @@ runs:
chrome-version: ${{ inputs.nightly == 'true' && 'beta' || 'stable' }}
install-chromedriver: true
install-dependencies: true
- - name: Install Firefox
- id: setup-firefox
- uses: browser-actions/setup-firefox@5914774dda97099441f02628f8d46411fcfbd208 # v1.7.0
- with:
- firefox-version: ${{ inputs.nightly == 'true' && 'latest-nightly' || 'latest' }}
- - name: Install Project Dependencies
- shell: bash
- run: npm ci
+ # - name: Install Firefox
+ # id: setup-firefox
+ # uses: browser-actions/setup-firefox@5914774dda97099441f02628f8d46411fcfbd208 # v1.7.0
+ # with:
+ # firefox-version: ${{ inputs.nightly == 'true' && 'latest-nightly' || 'latest' }}
+ # - name: Install Project Dependencies
+ # shell: bash
+ # run: npm ci
- name: Start Xvfb
if: ${{ inputs.start-xvfb }}
env:
diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml
index fda5a9dec2..d5fa63fb75 100644
--- a/.github/workflows/deploy.yml
+++ b/.github/workflows/deploy.yml
@@ -3,195 +3,206 @@
name: Deploy
on:
- # Run on push and not `workflow_run` after tests finish.
- # Specifically because `workflow_run` only runs from the context
- # of the default branch, regardless of which branch triggered the tests.
- # That means no non-default branches could deploy.
push:
branches:
- - master
- - develop
+ - xxxxxxxxxxxxxxxxxxxxxxxxx
+jobs:
+ skip:
+ if: ${{ false }}
+ runs-on: ubuntu-latest
+ steps:
+ - run: echo OK
-concurrency:
- group: deploy/${{ github.ref_name }}
- cancel-in-progress: false
+# on:
+# # Run on push and not `workflow_run` after tests finish.
+# # Specifically because `workflow_run` only runs from the context
+# # of the default branch, regardless of which branch triggered the tests.
+# # That means no non-default branches could deploy.
+# push:
+# branches:
+# - master
+# - develop
-permissions:
- contents: read
+# concurrency:
+# group: deploy/${{ github.ref_name }}
+# cancel-in-progress: false
-jobs:
- # Since we can't run against `workflow_run`, we have to
- # wait for for the Tests to succeed first before any
- # processing can happen.
- wait-for-tests:
- name: Wait for Tests to Pass
- if: github.repository_owner == 'dequelabs'
- runs-on: ubuntu-24.04
- permissions:
- contents: read
- actions: read
- statuses: read
- timeout-minutes: 15
- steps:
- - &checkout
- name: Checkout repository
- timeout-minutes: 2
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- with:
- persist-credentials: false
- - name: Wait for Tests workflow to complete
- timeout-minutes: 13
- env:
- SHA: ${{ github.sha }}
- REPOSITORY: ${{ github.repository }}
- BRANCH: ${{ github.ref_name }}
- WORKFLOW_NAME: Tests
- DEBUG: ${{ runner.debug == '1' }}
- # One minute less than the job timeout to allow for the script to do cleanup work.
- TIMEOUT_MINUTES: 12
- GH_TOKEN: ${{ github.token }}
- run: ./.github/bin/wait-for-workflow-success.sh
- deploy-next:
- name: Deploy "next" to npm
- needs: wait-for-tests
- if: ${{ github.ref_name == 'develop' }}
- environment:
- name: registry.npmjs.org
- permissions:
- contents: read
- id-token: write # Required for OIDC
- runs-on: ubuntu-24.04
- outputs:
- version: ${{ steps.determine-version.outputs.version }}
- packageName: ${{ steps.determine-version.outputs.name }}
- steps:
- - *checkout
- - &setup-node
- name: Setup NodeJS
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
- with:
- registry-url: 'https://registry.npmjs.org'
- node-version-file: .nvmrc
- cache: npm
- - &install-project-deps
- name: Install Project Dependencies
- shell: bash
- run: npm ci
- - &build
- name: Build
- run: |
- npm run prepare
- npm run build
- - name: Determine prerelease version
- id: determine-version
- run: ./.github/bin/determine-version.sh
- - name: Bump version
- env:
- NEW_VERSION: ${{ steps.determine-version.outputs.version }}
- run: npm version "$NEW_VERSION" --no-git-tag-version --ignore-scripts
- - &validate-package
- name: Validate package is consumable
- env:
- # Ref: https://docs.github.com/en/actions/reference/workflows-and-actions/contexts#runner-context
- # Linting shows this context might be invalid, but it shouldn't be per docs.
- # Probably something missing in the schema.
- DEBUG: ${{ runner.debug == '1' }}
- run: node .github/bin/validate-package.mjs
- - name: Publish "next" version to npm
- run: npm publish --tag=next
- validate-next-deploy:
- name: Validate Next Deployment
- needs: deploy-next
- runs-on: ubuntu-24.04
- steps:
- - *checkout
- - *setup-node
- # In theory since this is a new job now, by the time
- # this would kick off the package should be available.
- # But, to be safe in case of delays in propagation,
- # we'll implement a retry mechanism.
- - name: Wait for package to be available on npm
- env:
- VERSION: ${{ needs.deploy-next.outputs.version }}
- PACKAGE_NAME: ${{ needs.deploy-next.outputs.packageName }}
- run: ./.github/bin/wait-for-npm-ready.sh
- - name: Validate installation of "next" version
- env:
- PACKAGE_NAME: ${{ needs.deploy-next.outputs.packageName }}
- VERSION: ${{ needs.deploy-next.outputs.version }}
- run: ./.github/bin/validate-npm-deploy.sh
- prod-hold:
- name: Await approval to deploy to production
- needs: wait-for-tests
- if: ${{ github.ref_name == 'master' }}
- environment:
- name: production-hold
- runs-on: ubuntu-24.04
- steps:
- - name: Awaiting approval to deploy to production
- run: echo "Approval granted to proceed to production deployment."
- prod-deploy:
- name: Deploy stable to npm
- needs: prod-hold
- if: ${{ needs.prod-hold.result == 'success' }}
- environment:
- name: registry.npmjs.org
- permissions:
- contents: read
- id-token: write # Required for OIDC
- outputs:
- version: ${{ steps.get-data.outputs.version }}
- packageName: ${{ steps.get-data.outputs.name }}
- runs-on: ubuntu-24.04
- steps:
- - *checkout
- - *setup-node
- - *install-project-deps
- - *build
- - *validate-package
- - name: Publish stable version to npm
- run: npm publish
- - name: Get published package data
- id: get-data
- run: |
- VERSION=$(npm pkg get version | tr -d '"')
- NAME=$(npm pkg get name | tr -d '"')
- echo "version=$VERSION" >> $GITHUB_OUTPUT
- echo "name=$NAME" >> $GITHUB_OUTPUT
- create-github-release:
- name: Create GitHub Release
- needs: prod-deploy
- runs-on: ubuntu-24.04
- permissions:
- contents: write # Required to create releases
- steps:
- - *checkout
- - name: Install Release Helper
- run: go install gopkg.in/aktau/github-release.v0@latest
- - name: Download Release Script
- run: curl https://raw.githubusercontent.com/dequelabs/attest-release-scripts/develop/src/node-github-release.sh -s -o ./node-github-release.sh
- - name: Make Release Script Executable
- run: chmod +x ./node-github-release.sh
- - name: Create GitHub Release
- run: ./node-github-release.sh
- validate-deploy:
- name: Validate Deployment
- needs: prod-deploy
- runs-on: ubuntu-24.04
- steps:
- - *checkout
- - *setup-node
- # In theory since this is a new job now, by the time
- # this would kick off the package should be available.
- # But, to be safe in case of delays in propagation,
- # we'll implement a retry mechanism.
- - name: Wait for package to be available on npm
- env:
- VERSION: ${{ needs.prod-deploy.outputs.version }}
- PACKAGE_NAME: ${{ needs.prod-deploy.outputs.packageName }}
- run: ./.github/bin/wait-for-npm-ready.sh
- - name: Validate installation of stable version
- env:
- PACKAGE_NAME: ${{ needs.prod-deploy.outputs.packageName }}
- VERSION: ${{ needs.prod-deploy.outputs.version }}
- run: ./.github/bin/validate-npm-deploy.sh
+# permissions:
+# contents: read
+
+# jobs:
+# # Since we can't run against `workflow_run`, we have to
+# # wait for for the Tests to succeed first before any
+# # processing can happen.
+# wait-for-tests:
+# name: Wait for Tests to Pass
+# if: github.repository_owner == 'dequelabs'
+# runs-on: ubuntu-24.04
+# permissions:
+# contents: read
+# actions: read
+# statuses: read
+# timeout-minutes: 15
+# steps:
+# - &checkout
+# name: Checkout repository
+# timeout-minutes: 2
+# uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
+# with:
+# persist-credentials: false
+# - name: Wait for Tests workflow to complete
+# timeout-minutes: 13
+# env:
+# SHA: ${{ github.sha }}
+# REPOSITORY: ${{ github.repository }}
+# BRANCH: ${{ github.ref_name }}
+# WORKFLOW_NAME: Tests
+# DEBUG: ${{ runner.debug == '1' }}
+# # One minute less than the job timeout to allow for the script to do cleanup work.
+# TIMEOUT_MINUTES: 12
+# GH_TOKEN: ${{ github.token }}
+# run: ./.github/bin/wait-for-workflow-success.sh
+# deploy-next:
+# name: Deploy "next" to npm
+# needs: wait-for-tests
+# if: ${{ github.ref_name == 'develop' }}
+# environment:
+# name: registry.npmjs.org
+# permissions:
+# contents: read
+# id-token: write # Required for OIDC
+# runs-on: ubuntu-24.04
+# outputs:
+# version: ${{ steps.determine-version.outputs.version }}
+# packageName: ${{ steps.determine-version.outputs.name }}
+# steps:
+# - *checkout
+# - &setup-node
+# name: Setup NodeJS
+# uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
+# with:
+# registry-url: 'https://registry.npmjs.org'
+# node-version-file: .nvmrc
+# cache: npm
+# - &install-project-deps
+# name: Install Project Dependencies
+# shell: bash
+# run: npm ci
+# - &build
+# name: Build
+# run: |
+# npm run prepare
+# npm run build
+# - name: Determine prerelease version
+# id: determine-version
+# run: ./.github/bin/determine-version.sh
+# - name: Bump version
+# env:
+# NEW_VERSION: ${{ steps.determine-version.outputs.version }}
+# run: npm version "$NEW_VERSION" --no-git-tag-version --ignore-scripts
+# - &validate-package
+# name: Validate package is consumable
+# env:
+# # Ref: https://docs.github.com/en/actions/reference/workflows-and-actions/contexts#runner-context
+# # Linting shows this context might be invalid, but it shouldn't be per docs.
+# # Probably something missing in the schema.
+# DEBUG: ${{ runner.debug == '1' }}
+# run: node .github/bin/validate-package.mjs
+# - name: Publish "next" version to npm
+# run: npm publish --tag=next
+# validate-next-deploy:
+# name: Validate Next Deployment
+# needs: deploy-next
+# runs-on: ubuntu-24.04
+# steps:
+# - *checkout
+# - *setup-node
+# # In theory since this is a new job now, by the time
+# # this would kick off the package should be available.
+# # But, to be safe in case of delays in propagation,
+# # we'll implement a retry mechanism.
+# - name: Wait for package to be available on npm
+# env:
+# VERSION: ${{ needs.deploy-next.outputs.version }}
+# PACKAGE_NAME: ${{ needs.deploy-next.outputs.packageName }}
+# run: ./.github/bin/wait-for-npm-ready.sh
+# - name: Validate installation of "next" version
+# env:
+# PACKAGE_NAME: ${{ needs.deploy-next.outputs.packageName }}
+# VERSION: ${{ needs.deploy-next.outputs.version }}
+# run: ./.github/bin/validate-npm-deploy.sh
+# prod-hold:
+# name: Await approval to deploy to production
+# needs: wait-for-tests
+# if: ${{ github.ref_name == 'master' }}
+# environment:
+# name: production-hold
+# runs-on: ubuntu-24.04
+# steps:
+# - name: Awaiting approval to deploy to production
+# run: echo "Approval granted to proceed to production deployment."
+# prod-deploy:
+# name: Deploy stable to npm
+# needs: prod-hold
+# if: ${{ needs.prod-hold.result == 'success' }}
+# environment:
+# name: registry.npmjs.org
+# permissions:
+# contents: read
+# id-token: write # Required for OIDC
+# outputs:
+# version: ${{ steps.get-data.outputs.version }}
+# packageName: ${{ steps.get-data.outputs.name }}
+# runs-on: ubuntu-24.04
+# steps:
+# - *checkout
+# - *setup-node
+# - *install-project-deps
+# - *build
+# - *validate-package
+# - name: Publish stable version to npm
+# run: npm publish
+# - name: Get published package data
+# id: get-data
+# run: |
+# VERSION=$(npm pkg get version | tr -d '"')
+# NAME=$(npm pkg get name | tr -d '"')
+# echo "version=$VERSION" >> $GITHUB_OUTPUT
+# echo "name=$NAME" >> $GITHUB_OUTPUT
+# create-github-release:
+# name: Create GitHub Release
+# needs: prod-deploy
+# runs-on: ubuntu-24.04
+# permissions:
+# contents: write # Required to create releases
+# steps:
+# - *checkout
+# - name: Install Release Helper
+# run: go install gopkg.in/aktau/github-release.v0@latest
+# - name: Download Release Script
+# run: curl https://raw.githubusercontent.com/dequelabs/attest-release-scripts/develop/src/node-github-release.sh -s -o ./node-github-release.sh
+# - name: Make Release Script Executable
+# run: chmod +x ./node-github-release.sh
+# - name: Create GitHub Release
+# run: ./node-github-release.sh
+# validate-deploy:
+# name: Validate Deployment
+# needs: prod-deploy
+# runs-on: ubuntu-24.04
+# steps:
+# - *checkout
+# - *setup-node
+# # In theory since this is a new job now, by the time
+# # this would kick off the package should be available.
+# # But, to be safe in case of delays in propagation,
+# # we'll implement a retry mechanism.
+# - name: Wait for package to be available on npm
+# env:
+# VERSION: ${{ needs.prod-deploy.outputs.version }}
+# PACKAGE_NAME: ${{ needs.prod-deploy.outputs.packageName }}
+# run: ./.github/bin/wait-for-npm-ready.sh
+# - name: Validate installation of stable version
+# env:
+# PACKAGE_NAME: ${{ needs.prod-deploy.outputs.packageName }}
+# VERSION: ${{ needs.prod-deploy.outputs.version }}
+# run: ./.github/bin/validate-npm-deploy.sh
diff --git a/.github/workflows/format.yml b/.github/workflows/format.yml
index 545beb920f..80df8fa9d0 100644
--- a/.github/workflows/format.yml
+++ b/.github/workflows/format.yml
@@ -1,34 +1,45 @@
name: Formatter
on:
- pull_request:
+ push:
branches:
- - develop
-
+ - xxxxxxxxxxxxxxxxxxxxxxxxx
jobs:
- prettier:
- # This conditional prevents running the job on PRs from forks; won't
- # have permissions to commit changes, so the job would fail if it ran.
- # PRs from forks will instead rely on failing the fmt_check job in test.yml
- if: github.event.pull_request.head.repo.full_name == github.repository
+ skip:
+ if: ${{ false }}
runs-on: ubuntu-latest
- timeout-minutes: 5
steps:
- - uses: actions/checkout@v6
- with:
- ref: ${{ github.event.pull_request.head.ref }}
- - name: Install dependencies
- run: npm ci
- - uses: actions/setup-node@v6
- with:
- node-version-file: .nvmrc
- cache: 'npm'
- # Workflows are not allowed to edit workflows. As result, we need to prevent Prettier from formatting them.
- - name: Prevent workflows from being formatted
- run: echo ".github" >> .prettierignore && cat .prettierignore
- - run: npm run fmt
- # Prevent the prettierignore change from being committed.
- - run: git checkout .prettierignore
- - uses: stefanzweifel/git-auto-commit-action@04702edda442b2e678b25b537cec683a1493fcb9 # tag=v5
- with:
- commit_message: ':robot: Automated formatting fixes'
+ - run: echo OK
+
+# on:
+# pull_request:
+# branches:
+# - develop
+
+# jobs:
+# prettier:
+# # This conditional prevents running the job on PRs from forks; won't
+# # have permissions to commit changes, so the job would fail if it ran.
+# # PRs from forks will instead rely on failing the fmt_check job in test.yml
+# if: ${{ false }} # github.event.pull_request.head.repo.full_name == github.repository
+# runs-on: ubuntu-latest
+# timeout-minutes: 5
+# steps:
+# - uses: actions/checkout@v6
+# with:
+# ref: ${{ github.event.pull_request.head.ref }}
+# - name: Install dependencies
+# run: npm ci
+# - uses: actions/setup-node@v6
+# with:
+# node-version-file: .nvmrc
+# cache: 'npm'
+# # Workflows are not allowed to edit workflows. As result, we need to prevent Prettier from formatting them.
+# - name: Prevent workflows from being formatted
+# run: echo ".github" >> .prettierignore && cat .prettierignore
+# - run: npm run fmt
+# # Prevent the prettierignore change from being committed.
+# - run: git checkout .prettierignore
+# - uses: stefanzweifel/git-auto-commit-action@04702edda442b2e678b25b537cec683a1493fcb9 # tag=v5
+# with:
+# commit_message: ':robot: Automated formatting fixes'
diff --git a/.github/workflows/label-extension-linter-issues.yml b/.github/workflows/label-extension-linter-issues.yml
index fee190e945..5fe42d2ea6 100644
--- a/.github/workflows/label-extension-linter-issues.yml
+++ b/.github/workflows/label-extension-linter-issues.yml
@@ -1,24 +1,35 @@
name: Add extension and linting labels to associated opened issues
on:
- issues:
- types: [opened]
-
+ push:
+ branches:
+ - xxxxxxxxxxxxxxxxxxxxxxxxx
jobs:
- label_extension_linting_issues:
+ skip:
+ if: ${{ false }}
runs-on: ubuntu-latest
- permissions:
- issues: write
steps:
- - name: Label Extension Issues
- uses: andymckay/labeler@e6c4322d0397f3240f0e7e30a33b5c5df2d39e90 # tag=1
- if: contains(toJson(github.event.issue.body), '### Product\n\naxe Extension\n\n')
- with:
- add-labels: 'extension'
- repo-token: ${{ secrets.GITHUB_TOKEN }}
- - name: Label Linting Issues
- uses: andymckay/labeler@e6c4322d0397f3240f0e7e30a33b5c5df2d39e90 # tag=1
- if: contains(toJson(github.event.issue.body), '### Product\n\naxe Linter\n\n')
- with:
- add-labels: 'linting'
- repo-token: ${{ secrets.GITHUB_TOKEN }}
+ - run: echo OK
+
+# on:
+# issues:
+# types: [opened]
+
+# jobs:
+# label_extension_linting_issues:
+# runs-on: ubuntu-latest
+# permissions:
+# issues: write
+# steps:
+# - name: Label Extension Issues
+# uses: andymckay/labeler@e6c4322d0397f3240f0e7e30a33b5c5df2d39e90 # tag=1
+# if: contains(toJson(github.event.issue.body), '### Product\n\naxe Extension\n\n')
+# with:
+# add-labels: 'extension'
+# repo-token: ${{ secrets.GITHUB_TOKEN }}
+# - name: Label Linting Issues
+# uses: andymckay/labeler@e6c4322d0397f3240f0e7e30a33b5c5df2d39e90 # tag=1
+# if: contains(toJson(github.event.issue.body), '### Product\n\naxe Linter\n\n')
+# with:
+# add-labels: 'linting'
+# repo-token: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/nightly-tests.yml b/.github/workflows/nightly-tests.yml
index ab4cf3fd79..f46ff9a861 100644
--- a/.github/workflows/nightly-tests.yml
+++ b/.github/workflows/nightly-tests.yml
@@ -1,81 +1,92 @@
name: Nightly Tests
on:
- schedule:
- # Runs every day at 2:17 AM UTC
- # Schedules should try to be offset from common times
- # to avoid high contention times on GitHub runners.
- - cron: '17 2 * * *'
- workflow_dispatch:
+ push:
+ branches:
+ - xxxxxxxxxxxxxxxxxxxxxxxxx
+jobs:
+ skip:
+ if: ${{ false }}
+ runs-on: ubuntu-latest
+ steps:
+ - run: echo OK
-env:
- CHROME_DEVEL_SANDBOX: /opt/google/chrome/chrome-sandbox
+# on:
+# schedule:
+# # Runs every day at 2:17 AM UTC
+# # Schedules should try to be offset from common times
+# # to avoid high contention times on GitHub runners.
+# - cron: '17 2 * * *'
+# workflow_dispatch:
-permissions:
- contents: read
+# env:
+# CHROME_DEVEL_SANDBOX: /opt/google/chrome/chrome-sandbox
-jobs:
- browsers:
- runs-on: ubuntu-24.04
- timeout-minutes: 10
- env:
- DISPLAY: ':99'
- steps:
- - &checkout
- name: Checkout repository
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- with:
- persist-credentials: false
- - name: Install Dependencies
- id: install-deps
- uses: ./.github/actions/install-deps
- with:
- nightly: 'true'
- start-xvfb: ${{ env.DISPLAY }}
- - &build
- name: Build
- id: build
- run: |
- npm run prepare
- npm run build
- - name: Run Firefox Nightly Browser Tests
- env:
- FIREFOX_NIGHTLY_BIN: ${{ steps.install-deps.outputs.firefox-path }}
- run: npm run test -- --browsers FirefoxNightly
- - name: Run Chrome Beta Browser Tests
- if: ${{ !cancelled() && steps.build.conclusion == 'success' }}
- env:
- CHROME_BIN: ${{ steps.install-deps.outputs.chrome-path }}
- CHROMEDRIVER_BIN: ${{ steps.install-deps.outputs.chromedriver-path }}
- run: npm run test -- --browsers Chrome
- act:
- runs-on: ubuntu-24.04
- timeout-minutes: 10
- steps:
- - *checkout
- - &install-deps
- name: Install Deps
- id: install-deps
- uses: ./.github/actions/install-deps
- - *build
- - name: Install Latest WCAG ACT Rules
- run: npm install w3c/wcag-act-rules#main
- - name: Run ACT Tests
- env:
- CHROME_BIN: ${{ steps.install-deps.outputs.chrome-path }}
- CHROMEDRIVER_BIN: ${{ steps.install-deps.outputs.chromedriver-path }}
- run: npm run test:act
- aria-practices:
- runs-on: ubuntu-24.04
- timeout-minutes: 7
- steps:
- - *checkout
- - *install-deps
- - *build
- - name: Install Latest W3C Aria Practices
- run: npm install w3c/aria-practices#main
- - name: Run ARIA Practices Tests
- env:
- CHROME_BIN: ${{ steps.install-deps.outputs.chrome-path }}
- CHROMEDRIVER_BIN: ${{ steps.install-deps.outputs.chromedriver-path }}
- run: npm run test:apg
+# permissions:
+# contents: read
+
+# jobs:
+# browsers:
+# runs-on: ubuntu-24.04
+# timeout-minutes: 10
+# env:
+# DISPLAY: ':99'
+# steps:
+# - &checkout
+# name: Checkout repository
+# uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
+# with:
+# persist-credentials: false
+# - name: Install Dependencies
+# id: install-deps
+# uses: ./.github/actions/install-deps
+# with:
+# nightly: 'true'
+# start-xvfb: ${{ env.DISPLAY }}
+# - &build
+# name: Build
+# id: build
+# run: |
+# npm run prepare
+# npm run build
+# - name: Run Firefox Nightly Browser Tests
+# env:
+# FIREFOX_NIGHTLY_BIN: ${{ steps.install-deps.outputs.firefox-path }}
+# run: npm run test -- --browsers FirefoxNightly
+# - name: Run Chrome Beta Browser Tests
+# if: ${{ !cancelled() && steps.build.conclusion == 'success' }}
+# env:
+# CHROME_BIN: ${{ steps.install-deps.outputs.chrome-path }}
+# CHROMEDRIVER_BIN: ${{ steps.install-deps.outputs.chromedriver-path }}
+# run: npm run test -- --browsers Chrome
+# act:
+# runs-on: ubuntu-24.04
+# timeout-minutes: 10
+# steps:
+# - *checkout
+# - &install-deps
+# name: Install Deps
+# id: install-deps
+# uses: ./.github/actions/install-deps
+# - *build
+# - name: Install Latest WCAG ACT Rules
+# run: npm install w3c/wcag-act-rules#main
+# - name: Run ACT Tests
+# env:
+# CHROME_BIN: ${{ steps.install-deps.outputs.chrome-path }}
+# CHROMEDRIVER_BIN: ${{ steps.install-deps.outputs.chromedriver-path }}
+# run: npm run test:act
+# aria-practices:
+# runs-on: ubuntu-24.04
+# timeout-minutes: 7
+# steps:
+# - *checkout
+# - *install-deps
+# - *build
+# - name: Install Latest W3C Aria Practices
+# run: npm install w3c/aria-practices#main
+# - name: Run ARIA Practices Tests
+# env:
+# CHROME_BIN: ${{ steps.install-deps.outputs.chrome-path }}
+# CHROMEDRIVER_BIN: ${{ steps.install-deps.outputs.chromedriver-path }}
+# run: npm run test:apg
diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml
new file mode 100644
index 0000000000..3ced88332c
--- /dev/null
+++ b/.github/workflows/publish.yml
@@ -0,0 +1,72 @@
+#
+# https://docs.npmjs.com/trusted-publishers
+#
+
+#GIT_COMMITTER_DATE="$(git show v4.11.1_DAISY --format=%aD | head -1)" git tag -a "v4.11.1-canary.2" v4.11.1_DAISY -m "release tag v4.11.1-canary.2" && git push --tags origin v4.11.1_DAISY && git --no-pager tag --list --format='%(refname) %(taggerdate)'
+
+#git tag -d v4.11.1-canary.2 && git push origin :refs/tags/v4.11.1-canary.2
+
+name: Publish Package
+
+#on:
+# release:
+# types: [published]
+
+on:
+ push:
+ #branches: [v4.11.1_DAISY]
+ tags:
+ # https://github.com/daisy/ace/settings/rules
+ - 'v*' # excludes slash /
+
+permissions:
+ id-token: write # Required for OIDC
+ contents: read
+
+jobs:
+ publish:
+ #if: "github.event_name == 'push' && contains(github.event.head_commit.message, 'release tag')"
+ #if: "contains(github.event.head_commit.message, 'release tag')"
+ runs-on: ubuntu-24.04
+ timeout-minutes: 15
+ env:
+ DISPLAY: :99
+ steps:
+ - uses: actions/checkout@v6
+ - uses: actions/setup-node@v6
+ with:
+ node-version: '24'
+ registry-url: 'https://registry.npmjs.org'
+ # cache: npm
+ - run: node --version ; npm --version
+ - run: npm --global install npm@^11
+ - run: npm --version
+ - name: NPM INSTALL
+ run: rm -rf node_modules ; HUSKY=0 HUSKY_SKIP_HOOKS=0 HUSKY_SKIP_INSTALL=0 npm ci --ignore-scripts ; (npm audit || echo OK) ; (npm outdated || echo OK) ; ((npm exec --no --offline -- taze --fail-on-outdated --all --force --include-locked --concurrency 1 --loglevel debug --cwd . && npm exec --no --offline -- taze major --fail-on-outdated --all --force --include-locked --concurrency 1 --loglevel debug --cwd .) || echo OK)
+ shell: bash
+ - name: BUILD
+ run: npm run build
+ shell: bash
+ - &install-deps-with-xvfb
+ name: Install Deps
+ uses: ./.github/actions/install-deps
+ id: install-deps
+ with:
+ start-xvfb: ${{ env.DISPLAY }}
+ - name: CHROME TESTS
+ env:
+ CHROME_BIN: ${{ steps.install-deps.outputs.chrome-path }}
+ CHROMEDRIVER_BIN: ${{ steps.install-deps.outputs.chromedriver-path }}
+ #run: env | grep -i CHROME ; npm run test:integration:chromeheadless ; npm run test:integration:chrome ; npm run test -- --browsers ChromeNoSandbox ; npm run test -- --browsers ChromeHeadlessNoSandbox
+ run: env | grep -i CHROME ; npm run test:integration:chrome ; npm run test -- --browsers ChromeNoSandbox
+ # - name: HUSKY
+ # run: (npx husky || echo OK)
+ # shell: bash
+ # - name: CHROME TESTS
+ # run: rm -rf ~/.browser-driver-manager ; npx browser-driver-manager install chromedriver --verbose ; cat ~/.browser-driver-manager/.env ; source ~/.browser-driver-manager/.env ; env | grep -i CHROME ; echo $CHROME_TEST_PATH ; export CHROME_BIN="${CHROME_TEST_PATH}" ; echo $CHROMEDRIVER_TEST_PATH ; export CHROMEDRIVER_BIN="${CHROMEDRIVER_TEST_PATH}" ; npm run test:chromeheadless
+ # shell: bash
+ - name: PACKAGE PUBLISH
+ #env:
+ # NPM_CONFIG_PROVENANCE: true
+ shell: bash
+ run: npm publish --access public --tag=latest .
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index ad707b7f5a..f61cfa4329 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -1,47 +1,58 @@
name: Create release candidate
-on: [workflow_dispatch]
-
+on:
+ push:
+ branches:
+ - xxxxxxxxxxxxxxxxxxxxxxxxx
jobs:
- create_release:
- name: Create release
+ skip:
+ if: ${{ false }}
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v6
- with:
- fetch-depth: 0
- - uses: actions/setup-node@v6
- with:
- node-version-file: .nvmrc
- cache: 'npm'
- - name: Run release script and open PR
- run: |
- git config user.name "API Team CI User"
- git config user.email "aciattestteamci@deque.com"
-
- Branch="release-$(date +%Y-%m-%d)"
- echo "Branch: $Branch"
- git checkout -b "$Branch"
-
- npm ci
- npm run release
-
- git push origin "$Branch" --force
-
- Version=$(jq -r .version ./package.json)
- echo "Version: $Version"
-
- # Get the additions to the changelog as the commit body and generate the PR url
- ReleaseNotes=$(
- git show \
- --no-color \
- --no-prefix \
- --output-indicator-new=! CHANGELOG.md | egrep '^!' | awk -F'^[!]' '{print $2}' | sed -e 's/\n/$0A/g'
- )
-
- echo "$ReleaseNotes" >> /tmp/pr.txt
- echo "" >> /tmp/pr.txt
- echo "This PR was opened by a robot :robot: :tada:" >> /tmp/pr.txt
- gh pr create --title "chore(release): v$Version" --body-file "/tmp/pr.txt" --base master
- env:
- GITHUB_TOKEN: ${{ secrets.PAT }}
+ - run: echo OK
+
+# on: [workflow_dispatch]
+
+# jobs:
+# create_release:
+# name: Create release
+# runs-on: ubuntu-latest
+# steps:
+# - uses: actions/checkout@v6
+# with:
+# fetch-depth: 0
+# - uses: actions/setup-node@v6
+# with:
+# node-version-file: .nvmrc
+# cache: 'npm'
+# - name: Run release script and open PR
+# run: |
+# git config user.name "API Team CI User"
+# git config user.email "aciattestteamci@deque.com"
+
+# Branch="release-$(date +%Y-%m-%d)"
+# echo "Branch: $Branch"
+# git checkout -b "$Branch"
+
+# npm ci
+# npm run release
+
+# git push origin "$Branch" --force
+
+# Version=$(jq -r .version ./package.json)
+# echo "Version: $Version"
+
+# # Get the additions to the changelog as the commit body and generate the PR url
+# ReleaseNotes=$(
+# git show \
+# --no-color \
+# --no-prefix \
+# --output-indicator-new=! CHANGELOG.md | egrep '^!' | awk -F'^[!]' '{print $2}' | sed -e 's/\n/$0A/g'
+# )
+
+# echo "$ReleaseNotes" >> /tmp/pr.txt
+# echo "" >> /tmp/pr.txt
+# echo "This PR was opened by a robot :robot: :tada:" >> /tmp/pr.txt
+# gh pr create --title "chore(release): v$Version" --body-file "/tmp/pr.txt" --base master
+# env:
+# GITHUB_TOKEN: ${{ secrets.PAT }}
diff --git a/.github/workflows/semantic-pr-title.yml b/.github/workflows/semantic-pr-title.yml
index 181cd23986..b4524c3c26 100644
--- a/.github/workflows/semantic-pr-title.yml
+++ b/.github/workflows/semantic-pr-title.yml
@@ -1,15 +1,27 @@
name: Semantic PR title
on:
- pull_request:
- types:
- - opened
- - reopened
- - edited
- - synchronize
-
+ push:
+ branches:
+ - xxxxxxxxxxxxxxxxxxxxxxxxx
jobs:
- semantic-pr-title:
+ skip:
+ if: ${{ false }}
runs-on: ubuntu-latest
steps:
- - uses: dequelabs/semantic-pr-title@v1
+ - run: echo OK
+
+# on:
+# pull_request:
+# types:
+# - opened
+# - reopened
+# - edited
+# - synchronize
+
+# jobs:
+# semantic-pr-title:
+# if: ${{ false }}
+# runs-on: ubuntu-latest
+# steps:
+# - uses: dequelabs/semantic-pr-title@v1
diff --git a/.github/workflows/sync-master-develop.yml b/.github/workflows/sync-master-develop.yml
index 52116c2c9e..1aed2cf869 100644
--- a/.github/workflows/sync-master-develop.yml
+++ b/.github/workflows/sync-master-develop.yml
@@ -1,14 +1,26 @@
name: Sync master/develop branches
+
on:
push:
branches:
- - master
+ - xxxxxxxxxxxxxxxxxxxxxxxxx
jobs:
- create_sync_pull_request:
+ skip:
+ if: ${{ false }}
runs-on: ubuntu-latest
steps:
- - uses: dequelabs/action-sync-branches@v1
- with:
- github-token: ${{ secrets.GITHUB_TOKEN }}
- pr-title: 'chore: merge master into develop'
- pr-reviewers: straker,WilcoFiers,stephenmathieson
+ - run: echo OK
+
+# on:
+# push:
+# branches:
+# - master
+# jobs:
+# create_sync_pull_request:
+# runs-on: ubuntu-latest
+# steps:
+# - uses: dequelabs/action-sync-branches@v1
+# with:
+# github-token: ${{ secrets.GITHUB_TOKEN }}
+# pr-title: 'chore: merge master into develop'
+# pr-reviewers: straker,WilcoFiers,stephenmathieson
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 7988fcc0c5..9a62097ab8 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -1,249 +1,260 @@
name: Tests
on:
- pull_request:
push:
branches:
- - master
- - develop
- - release-*
+ - xxxxxxxxxxxxxxxxxxxxxxxxx
+jobs:
+ skip:
+ if: ${{ false }}
+ runs-on: ubuntu-latest
+ steps:
+ - run: echo OK
-# We want to group to the workflow for each branch.
-# Non-push events will be cancelled if a new one is started.
-# Push events will run sequentially. This helps ensure that
-# the `next` tag isn't out of sync.
-concurrency:
- group: ${{ github.workflow }}-${{ github.ref_name }}
- cancel-in-progress: ${{ github.event_name == 'pull_request' }}
+# on:
+# pull_request:
+# push:
+# branches:
+# - master
+# - develop
+# - release-*
-permissions: {}
+# # We want to group to the workflow for each branch.
+# # Non-push events will be cancelled if a new one is started.
+# # Push events will run sequentially. This helps ensure that
+# # the `next` tag isn't out of sync.
+# concurrency:
+# group: ${{ github.workflow }}-${{ github.ref_name }}
+# cancel-in-progress: ${{ github.event_name == 'pull_request' }}
-env:
- CHROME_DEVEL_SANDBOX: /opt/google/chrome/chrome-sandbox
+# permissions: {}
-jobs:
- lint:
- runs-on: ubuntu-24.04
- timeout-minutes: 10
- steps:
- - &checkout
- name: Checkout repository
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- with:
- persist-credentials: false
- - &setup-node
- name: Set up Node.js
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
- with:
- node-version-file: .nvmrc
- cache: 'npm'
- - &install-deps-directly
- name: Install Dependencies
- run: npm ci
- - name: Run ESLint
- run: npm run eslint
-
- fmt_check:
- runs-on: ubuntu-24.04
- timeout-minutes: 10
- steps:
- - *checkout
- - *setup-node
- - *install-deps-directly
- - run: npm run fmt:check
-
- build:
- runs-on: ubuntu-24.04
- timeout-minutes: 10
- steps:
- - *checkout
- - *setup-node
- - *install-deps-directly
- - &build
- name: Build
- run: |
- npm run prepare
- npm run build
- - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
- with:
- name: axe-core
- path: axe.js
- retention-days: 1
-
- test_chrome:
- runs-on: ubuntu-24.04
- timeout-minutes: 10
- env:
- DISPLAY: :99
- steps:
- - *checkout
- - &install-deps-with-xvfb
- name: Install Deps
- uses: ./.github/actions/install-deps
- id: install-deps
- with:
- start-xvfb: ${{ env.DISPLAY }}
- - *build
- - name: Run Tests Against Chrome
- env:
- CHROME_BIN: ${{ steps.install-deps.outputs.chrome-path }}
- CHROMEDRIVER_BIN: ${{ steps.install-deps.outputs.chromedriver-path }}
- run: npm run test -- --browsers Chrome
- - name: Run Chrome Integration Tests
- env:
- CHROME_BIN: ${{ steps.install-deps.outputs.chrome-path }}
- CHROMEDRIVER_BIN: ${{ steps.install-deps.outputs.chromedriver-path }}
- run: npm run test:integration:chrome
-
- test_firefox:
- runs-on: ubuntu-24.04
- timeout-minutes: 10
- env:
- DISPLAY: :99
- steps:
- - *checkout
- - *install-deps-with-xvfb
- - *build
- - name: Run Tests Against Firefox
- env:
- FIREFOX_BIN: ${{ steps.install-deps.outputs.firefox-path }}
- run: npm run test -- --browsers Firefox
- - name: Run Firefox Integration Tests
- env:
- FIREFOX_BIN: ${{ steps.install-deps.outputs.firefox-path }}
- run: npm run test:integration:firefox
-
- # Run examples under `doc/examples`
- test_examples:
- runs-on: ubuntu-24.04
- timeout-minutes: 10
- steps:
- - *checkout
- - &install-deps
- name: Install Deps
- id: install-deps
- uses: ./.github/actions/install-deps
- - *build
- - name: Run Tests Against Examples
- run: npm run test:examples
-
- test_act:
- runs-on: ubuntu-24.04
- timeout-minutes: 10
- needs: build
- steps:
- - *checkout
- - *install-deps
- - &restore-axe-build
- name: Restore axe build
- uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
- with:
- name: axe-core
- - name: Run ACT Tests
- env:
- CHROME_BIN: ${{ steps.install-deps.outputs.chrome-path }}
- CHROMEDRIVER_BIN: ${{ steps.install-deps.outputs.chromedriver-path }}
- run: npm run test:act
-
- test_aria_practices:
- runs-on: ubuntu-24.04
- timeout-minutes: 10
- needs: build
- steps:
- - *checkout
- - *install-deps
- - *restore-axe-build
- - name: Run ARIA Practices Tests
- env:
- CHROME_BIN: ${{ steps.install-deps.outputs.chrome-path }}
- CHROMEDRIVER_BIN: ${{ steps.install-deps.outputs.chromedriver-path }}
- run: npm run test:apg
-
- test_locales:
- runs-on: ubuntu-24.04
- timeout-minutes: 10
- needs: build
- steps:
- - *checkout
- - *install-deps
- - *restore-axe-build
- - name: Run Locale Tests
- run: npm run test:locales
-
- test_virtual_rules:
- runs-on: ubuntu-24.04
- timeout-minutes: 10
- needs: build
- steps:
- - *checkout
- - *install-deps
- - *restore-axe-build
- - name: Run Virtual Rules Tests
- run: npm run test:virtual-rules
-
- test_jsdom:
- runs-on: ubuntu-24.04
- timeout-minutes: 10
- needs: build
- steps:
- - *checkout
- - *install-deps
- - *restore-axe-build
- - name: Run jsdom Tests
- run: npm run test:jsdom
-
- build_api_docs:
- runs-on: ubuntu-24.04
- timeout-minutes: 10
- steps:
- - *checkout
- - *install-deps
- - name: Run API Docs Build
- run: npm run api-docs
-
- test_rule_help_version:
- runs-on: ubuntu-24.04
- timeout-minutes: 10
- if: ${{ github.ref_name == 'master' }}
- steps:
- - *checkout
- - *install-deps
- - name: Run Rule Help Version Tests
- run: npm run test:rule-help-version
-
- sri-validate:
- runs-on: ubuntu-24.04
- timeout-minutes: 10
- needs: build
- # Run on master and RC branches along with PRs targeting those branches.
- if: ${{ github.ref_name == 'master' || startsWith(github.ref_name, 'release-') || github.event.pull_request.base.ref == 'master' || startsWith(github.event.pull_request.base.ref, 'release-') }}
- steps:
- - *checkout
- - *install-deps
- - *restore-axe-build
- - name: Validate Subresource Integrity
- run: npm run sri-validate
-
- test_node:
- # The package can't be built on Node 6 anymore, but should still run there.
- # So we need to pull in a previous build artifact.
- needs: build
- strategy:
- matrix:
- node:
- - 6
- - 18
- - 20
- - 22
- - 24
- runs-on: ubuntu-24.04
- timeout-minutes: 10
- steps:
- - *checkout
- - name: Set up Node.js
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
- with:
- node-version: ${{ matrix.node }}
- - *restore-axe-build
- - name: Run Node.js Tests
- run: npm run test:node
+# env:
+# CHROME_DEVEL_SANDBOX: /opt/google/chrome/chrome-sandbox
+
+# jobs:
+# lint:
+# runs-on: ubuntu-24.04
+# timeout-minutes: 10
+# steps:
+# - &checkout
+# name: Checkout repository
+# uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
+# with:
+# persist-credentials: false
+# - &setup-node
+# name: Set up Node.js
+# uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
+# with:
+# node-version-file: .nvmrc
+# cache: 'npm'
+# - &install-deps-directly
+# name: Install Dependencies
+# run: npm ci
+# - name: Run ESLint
+# run: npm run eslint
+
+# fmt_check:
+# runs-on: ubuntu-24.04
+# timeout-minutes: 10
+# steps:
+# - *checkout
+# - *setup-node
+# - *install-deps-directly
+# - run: npm run fmt:check
+
+# build:
+# runs-on: ubuntu-24.04
+# timeout-minutes: 10
+# steps:
+# - *checkout
+# - *setup-node
+# - *install-deps-directly
+# - &build
+# name: Build
+# run: |
+# npm run prepare
+# npm run build
+# - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+# with:
+# name: axe-core
+# path: axe.js
+# retention-days: 1
+
+# test_chrome:
+# runs-on: ubuntu-24.04
+# timeout-minutes: 10
+# env:
+# DISPLAY: :99
+# steps:
+# - *checkout
+# - &install-deps-with-xvfb
+# name: Install Deps
+# uses: ./.github/actions/install-deps
+# id: install-deps
+# with:
+# start-xvfb: ${{ env.DISPLAY }}
+# - *build
+# - name: Run Tests Against Chrome
+# env:
+# CHROME_BIN: ${{ steps.install-deps.outputs.chrome-path }}
+# CHROMEDRIVER_BIN: ${{ steps.install-deps.outputs.chromedriver-path }}
+# run: npm run test -- --browsers Chrome
+# - name: Run Chrome Integration Tests
+# env:
+# CHROME_BIN: ${{ steps.install-deps.outputs.chrome-path }}
+# CHROMEDRIVER_BIN: ${{ steps.install-deps.outputs.chromedriver-path }}
+# run: npm run test:integration:chrome
+
+# test_firefox:
+# runs-on: ubuntu-24.04
+# timeout-minutes: 10
+# env:
+# DISPLAY: :99
+# steps:
+# - *checkout
+# - *install-deps-with-xvfb
+# - *build
+# - name: Run Tests Against Firefox
+# env:
+# FIREFOX_BIN: ${{ steps.install-deps.outputs.firefox-path }}
+# run: npm run test -- --browsers Firefox
+# - name: Run Firefox Integration Tests
+# env:
+# FIREFOX_BIN: ${{ steps.install-deps.outputs.firefox-path }}
+# run: npm run test:integration:firefox
+
+# # Run examples under `doc/examples`
+# test_examples:
+# runs-on: ubuntu-24.04
+# timeout-minutes: 10
+# steps:
+# - *checkout
+# - &install-deps
+# name: Install Deps
+# id: install-deps
+# uses: ./.github/actions/install-deps
+# - *build
+# - name: Run Tests Against Examples
+# run: npm run test:examples
+
+# test_act:
+# runs-on: ubuntu-24.04
+# timeout-minutes: 10
+# needs: build
+# steps:
+# - *checkout
+# - *install-deps
+# - &restore-axe-build
+# name: Restore axe build
+# uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
+# with:
+# name: axe-core
+# - name: Run ACT Tests
+# env:
+# CHROME_BIN: ${{ steps.install-deps.outputs.chrome-path }}
+# CHROMEDRIVER_BIN: ${{ steps.install-deps.outputs.chromedriver-path }}
+# run: npm run test:act
+
+# test_aria_practices:
+# runs-on: ubuntu-24.04
+# timeout-minutes: 10
+# needs: build
+# steps:
+# - *checkout
+# - *install-deps
+# - *restore-axe-build
+# - name: Run ARIA Practices Tests
+# env:
+# CHROME_BIN: ${{ steps.install-deps.outputs.chrome-path }}
+# CHROMEDRIVER_BIN: ${{ steps.install-deps.outputs.chromedriver-path }}
+# run: npm run test:apg
+
+# test_locales:
+# runs-on: ubuntu-24.04
+# timeout-minutes: 10
+# needs: build
+# steps:
+# - *checkout
+# - *install-deps
+# - *restore-axe-build
+# - name: Run Locale Tests
+# run: npm run test:locales
+
+# test_virtual_rules:
+# runs-on: ubuntu-24.04
+# timeout-minutes: 10
+# needs: build
+# steps:
+# - *checkout
+# - *install-deps
+# - *restore-axe-build
+# - name: Run Virtual Rules Tests
+# run: npm run test:virtual-rules
+
+# test_jsdom:
+# runs-on: ubuntu-24.04
+# timeout-minutes: 10
+# needs: build
+# steps:
+# - *checkout
+# - *install-deps
+# - *restore-axe-build
+# - name: Run jsdom Tests
+# run: npm run test:jsdom
+
+# build_api_docs:
+# runs-on: ubuntu-24.04
+# timeout-minutes: 10
+# steps:
+# - *checkout
+# - *install-deps
+# - name: Run API Docs Build
+# run: npm run api-docs
+
+# test_rule_help_version:
+# runs-on: ubuntu-24.04
+# timeout-minutes: 10
+# if: ${{ github.ref_name == 'master' }}
+# steps:
+# - *checkout
+# - *install-deps
+# - name: Run Rule Help Version Tests
+# run: npm run test:rule-help-version
+
+# sri-validate:
+# runs-on: ubuntu-24.04
+# timeout-minutes: 10
+# needs: build
+# # Run on master and RC branches along with PRs targeting those branches.
+# if: ${{ github.ref_name == 'master' || startsWith(github.ref_name, 'release-') || github.event.pull_request.base.ref == 'master' || startsWith(github.event.pull_request.base.ref, 'release-') }}
+# steps:
+# - *checkout
+# - *install-deps
+# - *restore-axe-build
+# - name: Validate Subresource Integrity
+# run: npm run sri-validate
+
+# test_node:
+# # The package can't be built on Node 6 anymore, but should still run there.
+# # So we need to pull in a previous build artifact.
+# needs: build
+# strategy:
+# matrix:
+# node:
+# - 6
+# - 18
+# - 20
+# - 22
+# - 24
+# runs-on: ubuntu-24.04
+# timeout-minutes: 10
+# steps:
+# - *checkout
+# - name: Set up Node.js
+# uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
+# with:
+# node-version: ${{ matrix.node }}
+# - *restore-axe-build
+# - name: Run Node.js Tests
+# run: npm run test:node
diff --git a/.github/workflows/update-generated-files.yaml b/.github/workflows/update-generated-files.yaml
index 1968e89c50..a67f46c2b3 100644
--- a/.github/workflows/update-generated-files.yaml
+++ b/.github/workflows/update-generated-files.yaml
@@ -3,70 +3,81 @@ name: Update generated files
on:
push:
branches:
- - develop
-
-env:
- BRANCH_NAME: sync-generated-files
-
+ - xxxxxxxxxxxxxxxxxxxxxxxxx
jobs:
- update_generated_files:
+ skip:
+ if: ${{ false }}
runs-on: ubuntu-latest
steps:
- - name: Checkout
- uses: actions/checkout@v6
- with:
- fetch-depth: 0
- - uses: actions/setup-node@v6
- with:
- node-version-file: .nvmrc
- cache: 'npm'
+ - run: echo OK
+
+# on:
+# push:
+# branches:
+# - develop
+
+# env:
+# BRANCH_NAME: sync-generated-files
+
+# jobs:
+# update_generated_files:
+# runs-on: ubuntu-latest
+# steps:
+# - name: Checkout
+# uses: actions/checkout@v6
+# with:
+# fetch-depth: 0
+# - uses: actions/setup-node@v6
+# with:
+# node-version-file: .nvmrc
+# cache: 'npm'
- - name: Build
- run: |
- npm ci
- npm run build
+# - name: Build
+# run: |
+# npm ci
+# npm run build
- - name: Check for changes
- id: changes
- run: |
- changes=$(git status --porcelain)
- # see https://unix.stackexchange.com/a/509498
- echo $changes | grep . && echo "Changes detected" || echo "No changes"
- echo "changes=$changes" >> "$GITHUB_OUTPUT"
+# - name: Check for changes
+# id: changes
+# run: |
+# changes=$(git status --porcelain)
+# # see https://unix.stackexchange.com/a/509498
+# echo $changes | grep . && echo "Changes detected" || echo "No changes"
+# echo "changes=$changes" >> "$GITHUB_OUTPUT"
- - name: Check branch exists
- id: branchExists
- if: steps.changes.outputs.changes
- run: |
- exists=$(git ls-remote --heads origin $BRANCH_NAME)
- echo $exists | grep . && echo "Branch '$BRANCH_NAME' already exists on remote" || echo "Branch does not exists in remote"
- echo "exists=$exists" >> "$GITHUB_OUTPUT"
+# - name: Check branch exists
+# id: branchExists
+# if: steps.changes.outputs.changes
+# run: |
+# exists=$(git ls-remote --heads origin $BRANCH_NAME)
+# echo $exists | grep . && echo "Branch '$BRANCH_NAME' already exists on remote" || echo "Branch does not exists in remote"
+# echo "exists=$exists" >> "$GITHUB_OUTPUT"
- - name: Create pull request
- if: ${{ steps.changes.outputs.changes && !steps.branchExists.outputs.exists }}
- run: |
- git status
- git config user.name github-actions
- git config user.email github-actions@github.com
- git add .
- git checkout -b $BRANCH_NAME
- git commit -m "chore: sync generated files"
- git push origin $BRANCH_NAME
- gh pr create --base develop --head $BRANCH_NAME --title "chore: sync generated files" --body ""
- env:
- GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+# - name: Create pull request
+# if: ${{ steps.changes.outputs.changes && !steps.branchExists.outputs.exists }}
+# run: |
+# git status
+# git config user.name github-actions
+# git config user.email github-actions@github.com
+# git add .
+# git checkout -b $BRANCH_NAME
+# git commit -m "chore: sync generated files"
+# git push origin $BRANCH_NAME
+# gh pr create --base develop --head $BRANCH_NAME --title "chore: sync generated files" --body ""
+# env:
+# GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- - name: Update pull request
- if: ${{ steps.changes.outputs.changes && steps.branchExists.outputs.exists }}
- run: |
- git reset HEAD --hard
- git checkout $BRANCH_NAME
- npm run build
- git status
- git config user.name github-actions
- git config user.email github-actions@github.com
- git add .
- git commit -m "chore: sync generated files"
- git push origin $BRANCH_NAME
- env:
- GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+# - name: Update pull request
+# if: ${{ steps.changes.outputs.changes && steps.branchExists.outputs.exists }}
+# run: |
+# git reset HEAD --hard
+# git checkout $BRANCH_NAME
+# npm run build
+# git status
+# git config user.name github-actions
+# git config user.email github-actions@github.com
+# git add .
+# git commit -m "chore: sync generated files"
+# git push origin $BRANCH_NAME
+# env:
+# GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.gitignore b/.gitignore
index 3331c6fdbb..76363204b3 100644
--- a/.gitignore
+++ b/.gitignore
@@ -28,3 +28,12 @@ typings/axe-core/axe-core-tests.js
# doc
doc/rule-descriptions.*.md
+
+.history
+
+.flox/run
+.flox/cache
+.flox/lib
+.flox/log
+.flox/NPM_PREFIX
+.flox/NPM_CACHE
diff --git a/README.md b/README.md
index 9be7bd46e2..c56e55d8de 100644
--- a/README.md
+++ b/README.md
@@ -1,3 +1,54 @@
+DEV: `HUSKY=0 npm i` (otherwise `"prepare": "husky"` in `w3c/aria-practices` fails https://github.com/w3c/aria-practices/blob/84b921a0c6646d2ddabaa94d918e165a1093daeb/package.json#L27 )
+
+- `rm -rf node_modules/ && rm -f package-lock.json && HUSKY=0 HUSKY_SKIP_HOOKS=0 HUSKY_SKIP_INSTALL=0 sfw npm install --foreground-scripts --ignore-scripts`
+- `npm audit`
+- `npm outdated`
+- `((npm exec --no --offline -- taze --fail-on-outdated --all --force --include-locked --concurrency 10 --loglevel debug --cwd . && npm exec --no --offline -- taze major --fail-on-outdated --all --force --include-locked --concurrency 10 --loglevel debug --cwd .) || echo OK)`
+- `npm run build`
+- `npx husky`
+- `rm -rf ~/.browser-driver-manager && npx browser-driver-manager install chromedriver --verbose`
+- `cat ~/.browser-driver-manager/.env`
+- `source ~/.browser-driver-manager/.env`
+- `env | grep -i CHROME`
+- `echo $CHROME_TEST_PATH`
+- `export CHROME_BIN="${CHROME_TEST_PATH}"`
+- `echo $CHROMEDRIVER_TEST_PATH`
+- `export CHROMEDRIVER_BIN="${CHROMEDRIVER_TEST_PATH}"`
+- `killall -9 "Google Chrome"`
+- `rm -rf "~/Library/Application Support/Google"`
+- `npm run test:chromeheadless`
+- `killall -9 "Google Chrome"`
+- `rm -rf "~/Library/Application Support/Google"`
+- `npm run test:chrome`
+- `killall -9 "Google Chrome"`
+- `rm -rf "~/Library/Application Support/Google"`
+
+`--ignore-scripts` must be used, see:
+aria-practices@0.0.0 prepare
+fsevents@2.3.2 install
+pre-commit@1.2.2 install
+spawn-sync@1.0.15 postinstall
+act-tools@1.0.0 postinstall
+act-tools@1.0.0 prebuild
+act-tools@1.0.0 build
+act-tools@1.0.0 prepare
+==> husky install!! (not skipped by `export HUSKY=0; export HUSKY_SKIP_HOOKS=0; export HUSKY_SKIP_INSTALL=0;` ???)
+https://github.com/act-rules/act-tools/blob/31ea4ae3553f1d4be885edf7568e8461b04a927a/package.json#L21C17-L21C22
+
+`npm cache clean --force` and/or `rm -f .git/hooks/pre-commit` might be necessary.
+
+// _ `cd node_modules && cd aria-practices && HUSKY=0 HUSKY_SKIP_HOOKS=0 HUSKY_SKIP_INSTALL=0 npm run prepare && cd ../..`
+// _ `cd node_modules && cd aria-practices && export HUSKY=0; export HUSKY_SKIP_HOOKS=0; export HUSKY_SKIP_INSTALL=0; npm run prepare --foreground-scripts; cd ../..`
+
+`~/.browser-driver-manager/.env`
+==>
+
+```
+CHROME_TEST_PATH="/Users/U/.browser-driver-manager/chrome/mac_arm-145.0.7632.46/chrome-mac-arm64/Google Chrome for Testing.app/Contents/MacOS/Google Chrome for Testing"
+CHROMEDRIVER_TEST_PATH="/Users/U/.browser-driver-manager/chromedriver/mac_arm-145.0.7632.46/chromedriver-mac-arm64/chromedriver"
+CHROME_TEST_VERSION="145.0.7632.46"
+```
+
# axe-core
[](LICENSE)
diff --git a/bower.json b/bower.json
index d1d2ba54d0..9a83964196 100644
--- a/bower.json
+++ b/bower.json
@@ -1,6 +1,6 @@
{
"name": "axe-core",
- "version": "4.11.1",
+ "version": "4.11.1-canary.2",
"deprecated": true,
"contributors": [
{
diff --git a/build/configure.js b/build/configure.js
index a8a1da734f..03ee29b8ad 100644
--- a/build/configure.js
+++ b/build/configure.js
@@ -10,10 +10,8 @@ var { encode } = require('html-entities');
var packageJSON = require('../package.json');
var doTRegex = /\{\{.+?\}\}/g;
-var axeVersion = packageJSON.version.substring(
- 0,
- packageJSON.version.lastIndexOf('.')
-);
+var _v = packageJSON.version.replace(/-\w+\.\w+$/, '');
+var axeVersion = _v.substring(0, _v.lastIndexOf('.'));
var descriptionTableHeader =
'| Rule ID | Description | Impact | Tags | Issue Type | [ACT Rules](https://www.w3.org/WAI/standards-guidelines/act/rules/) |\n| :------- | :------- | :------- | :------- | :------- | :------- |\n';
diff --git a/build/tasks/update-help.js b/build/tasks/update-help.js
index 417a94048c..0e947549fb 100644
--- a/build/tasks/update-help.js
+++ b/build/tasks/update-help.js
@@ -9,7 +9,7 @@ module.exports = function (grunt) {
var options = this.options({
version: '1.0.0'
});
- var v = options.version.split('.');
+ var v = options.version.replace(/-\w+\.\w+$/, '').split('.');
v.pop();
var baseUrl =
'https://dequeuniversity.com/rules/axe/' + v.join('.') + '/';
diff --git a/build/tasks/validate.js b/build/tasks/validate.js
index ea601806ce..ab99d820c8 100644
--- a/build/tasks/validate.js
+++ b/build/tasks/validate.js
@@ -310,6 +310,7 @@ function validateRule({ tags, metadata }) {
const miscTags = ['ACT', 'experimental', 'review-item', 'deprecated'];
const categories = [
+ 'epub',
'aria',
'color',
'forms',
diff --git a/doc/examples/qunit/Gruntfile.js b/doc/examples/qunit/Gruntfile.js
index 19c11eedf5..a2a8f6dc6d 100644
--- a/doc/examples/qunit/Gruntfile.js
+++ b/doc/examples/qunit/Gruntfile.js
@@ -5,7 +5,7 @@ module.exports = function (grunt) {
grunt.initConfig({
qunit: {
- all: ['test/**/*.html'],
+ all: ['test/**/*.html', 'test/**/*__.xhtml'],
options: {
puppeteer: {
args: ['--disable-web-security', '--allow-file-access-from-files']
diff --git a/doc/rule-descriptions.md b/doc/rule-descriptions.md
index 6858eb6300..12e787941e 100644
--- a/doc/rule-descriptions.md
+++ b/doc/rule-descriptions.md
@@ -87,9 +87,10 @@
These rules are disabled by default, until WCAG 2.2 is more widely adopted and required.
-| Rule ID | Description | Impact | Tags | Issue Type | [ACT Rules](https://www.w3.org/WAI/standards-guidelines/act/rules/) |
-| :------------------------------------------------------------------------------------------------ | :-------------------------------------------------- | :------ | :--------------------------------------------- | :------------------------- | :------------------------------------------------------------------ |
-| [target-size](https://dequeuniversity.com/rules/axe/4.11/target-size?application=RuleDescription) | Ensure touch targets have sufficient size and space | Serious | cat.sensory-and-visual-cues, wcag22aa, wcag258 | failure, needs review | |
+| Rule ID | Description | Impact | Tags | Issue Type | [ACT Rules](https://www.w3.org/WAI/standards-guidelines/act/rules/) |
+| :-------------------------------------------------------------------------------------------------------- | :-------------------------------------------------- | :------- | :--------------------------------------------- | :------------------------- | :------------------------------------------------------------------ |
+| [pagebreak-label](https://dequeuniversity.com/rules/axe/4.11/pagebreak-label?application=RuleDescription) | Ensure page markers have an accessible label | Moderate | cat.epub | failure, needs review | |
+| [target-size](https://dequeuniversity.com/rules/axe/4.11/target-size?application=RuleDescription) | Ensure touch targets have sufficient size and space | Serious | cat.sensory-and-visual-cues, wcag22aa, wcag258 | failure, needs review | |
## Best Practices Rules
@@ -104,6 +105,7 @@ Rules that do not necessarily conform to WCAG success criterion but are industry
| [aria-treeitem-name](https://dequeuniversity.com/rules/axe/4.11/aria-treeitem-name?application=RuleDescription) | Ensure every ARIA treeitem node has an accessible name | Serious | cat.aria, best-practice | failure, needs review | |
| [empty-heading](https://dequeuniversity.com/rules/axe/4.11/empty-heading?application=RuleDescription) | Ensure headings have discernible text | Minor | cat.name-role-value, best-practice | failure, needs review | [ffd0e9](https://act-rules.github.io/rules/ffd0e9) |
| [empty-table-header](https://dequeuniversity.com/rules/axe/4.11/empty-table-header?application=RuleDescription) | Ensure table headers have discernible text | Minor | cat.name-role-value, best-practice | failure, needs review | |
+| [epub-type-has-matching-role](https://dequeuniversity.com/rules/axe/4.11/epub-type-has-matching-role?application=RuleDescription) | Ensure the element has an ARIA role matching its epub:type | Moderate | cat.aria, best-practice | failure | |
| [frame-tested](https://dequeuniversity.com/rules/axe/4.11/frame-tested?application=RuleDescription) | Ensure <iframe> and <frame> elements contain the axe-core script | Critical | cat.structure, best-practice, review-item | failure, needs review | |
| [heading-order](https://dequeuniversity.com/rules/axe/4.11/heading-order?application=RuleDescription) | Ensure the order of headings is semantically correct | Moderate | cat.semantics, best-practice | failure, needs review | |
| [image-redundant-alt](https://dequeuniversity.com/rules/axe/4.11/image-redundant-alt?application=RuleDescription) | Ensure image alternative is not repeated as text | Minor | cat.text-alternatives, best-practice | failure | |
diff --git a/lib/checks/aria/aria-prohibited-attr-evaluate.js b/lib/checks/aria/aria-prohibited-attr-evaluate.js
index bee8b99a7b..3e4d9c73c5 100644
--- a/lib/checks/aria/aria-prohibited-attr-evaluate.js
+++ b/lib/checks/aria/aria-prohibited-attr-evaluate.js
@@ -36,10 +36,10 @@ export default function ariaProhibitedAttrEvaluate(
const { nodeName } = virtualNode.props;
const role = getRole(virtualNode, {
chromium: true,
+ dpub: true,
// this check allows fallback roles. For example, `
` is legal.
fallback: true
});
-
const prohibitedList = listProhibitedAttrs(
virtualNode,
role,
diff --git a/lib/checks/aria/aria-required-children-evaluate.js b/lib/checks/aria/aria-required-children-evaluate.js
index 1e7512d960..f2662ec75c 100644
--- a/lib/checks/aria/aria-required-children-evaluate.js
+++ b/lib/checks/aria/aria-required-children-evaluate.js
@@ -75,6 +75,8 @@ export default function ariaRequiredChildrenEvaluate(
*/
function getOwnedRoles(virtualNode, required) {
let vNode;
+ const parentRole = getRole(virtualNode, { dpub: true });
+
const ownedRoles = [];
const ownedVirtual = getOwnedVirtual(virtualNode);
while ((vNode = ownedVirtual.shift())) {
@@ -85,7 +87,8 @@ function getOwnedRoles(virtualNode, required) {
continue;
}
- const role = getRole(vNode, { noPresentational: true });
+ const role = getRole(vNode, { noPresentational: true, dpub: true });
+
const globalAriaAttr = getGlobalAriaAttr(vNode);
const hasGlobalAriaOrFocusable = !!globalAriaAttr || isFocusable(vNode);
@@ -96,7 +99,9 @@ function getOwnedRoles(virtualNode, required) {
if (
(!role && !hasGlobalAriaOrFocusable) ||
(['group', 'rowgroup'].includes(role) &&
- required.some(requiredRole => requiredRole === role))
+ required.some(requiredRole => requiredRole === role)) ||
+ (['list'].includes(role) &&
+ ['doc-bibliography', 'doc-endnotes'].includes(parentRole))
) {
ownedVirtual.push(...vNode.children);
} else if (role || hasGlobalAriaOrFocusable) {
diff --git a/lib/checks/aria/matching-aria-role-evaluate.js b/lib/checks/aria/matching-aria-role-evaluate.js
new file mode 100644
index 0000000000..ca1d7316bc
--- /dev/null
+++ b/lib/checks/aria/matching-aria-role-evaluate.js
@@ -0,0 +1,190 @@
+import { tokenList } from '../../core/utils';
+import standards from '../../standards';
+import { getRole } from '../../commons/aria';
+import matchesSelector from '../../core/utils/element-matches';
+
+function matchingAriaRoleEvaluate(node) {
+ // https://idpf.github.io/epub-guides/epub-aria-authoring/#sec-mappings
+ // https://www.w3.org/TR/dpub-aam-1.0/#mapping_role_table
+ // https://w3c.github.io/publ-cg/guides/aria-mapping.html#mapping-table
+ const mappings = new Map([
+ ['abstract', 'doc-abstract'],
+ ['acknowledgments', 'doc-acknowledgments'],
+ ['afterword', 'doc-afterword'],
+ // ['answer', '??'],
+ // ['answers', '??'],
+ ['appendix', 'doc-appendix'],
+ // ['assessment', '??'],
+ // ['assessments', '??'],
+ // ['backmatter', '??'],
+ // ['balloon', '??'],
+ // ['backlink', 'doc-backlink'], // ??
+ ['biblioentry', 'doc-biblioentry'],
+ ['bibliography', 'doc-bibliography'],
+ ['biblioref', 'doc-biblioref'],
+ // ['bodymatter', '??'],
+ // ['bridgehead', '??'],
+ // ['case-study', '??'],
+ ['chapter', 'doc-chapter'],
+ ['colophon', 'doc-colophon'],
+ // ['concluding-sentence', '??'],
+ ['conclusion', 'doc-conclusion'],
+ // ['contributors', '??'],
+ // ['copyright-page', '??'],
+ // ['cover', '??'],
+ // ['cover-image', 'doc-cover'], // ??
+ // ['covertitle', '??'],
+ ['credit', 'doc-credit'],
+ ['credits', 'doc-credits'],
+ ['dedication', 'doc-dedication'],
+ // ['division', '??'],
+ ['endnote', 'doc-endnote'],
+ ['endnotes', 'doc-endnotes'],
+ ['epigraph', 'doc-epigraph'],
+ ['epilogue', 'doc-epilogue'],
+ ['errata', 'doc-errata'],
+ // ['example', 'doc-example'],
+ // ['feedback', '??'],
+ ['figure', 'figure'], // ARIA
+ // ['fill-in-the-blank-problem', '??'],
+ ['footnote', 'doc-footnote'],
+ // ['footnotes', '??'],
+ ['foreword', 'doc-foreword'],
+ // ['frontmatter', '??'],
+ // ['fulltitle', '??'],
+ // ['general-problem', '??'],
+ ['glossary', 'doc-glossary'],
+ ['glossdef', 'definition'], // ARIA
+ ['glossref', 'doc-glossref'],
+ ['glossterm', 'term'], // ARIA
+ // ['halftitle', '??'],
+ // ['halftitlepage', '??'],
+ // ['imprimatur', '??'],
+ // ['imprint', '??'],
+ ['help', 'doc-tip'], // ??
+ ['index', 'doc-index'],
+ // ['index-editor-note', '??'],
+ // ['index-entry', '??'],
+ // ['index-entry-list', '??'],
+ // ['index-group', '??'],
+ // ['index-headnotes', '??'],
+ // ['index-legend', '??'],
+ // ['index-locator', '??'],
+ // ['index-locator-list', '??'],
+ // ['index-locator-range', '??'],
+ // ['index-term', '??'],
+ // ['index-term-categories', '??'],
+ // ['index-term-category', '??'],
+ // ['index-xref-preferred', '??'],
+ // ['index-xref-related', '??'],
+ ['introduction', 'doc-introduction'],
+ // ['keyword', '??'],
+ // ['keywords', '??'],
+ // ['label', '??'],
+ // ['landmarks', 'directory'], // ARIA (SKIPPED! NavDoc)
+ // ['learning-objective', '??'],
+ // ['learning-objectives', '??'],
+ // ['learning-outcome', '??'],
+ // ['learning-outcomes', '??'],
+ // ['learning-resource', '??'],
+ // ['learning-resources', '??'],
+ // ['learning-standard', '??'],
+ // ['learning-standards', '??'],
+ ['list', 'list'], // ARIA
+ ['list-item', 'listitem'], // ARIA
+ // ['loa', '??'],
+ // ['loi', '??'],
+ // ['lot', '??'],
+ // ['lov', '??'],
+ // ['match-problem', '??'],
+ // ['multiple-choice-problem', '??'],
+ ['noteref', 'doc-noteref'],
+ ['notice', 'doc-notice'],
+ // ['ordinal', '??'],
+ // ['other-credits', '??'],
+ ['page-list', 'doc-pagelist'],
+ ['pagebreak', 'doc-pagebreak'],
+ // ['panel', '??'],
+ // ['panel-group', '??'],
+ ['part', 'doc-part'],
+ // ['practice', '??'],
+ // ['practices', '??'],
+ // ['preamble', '??'],
+ ['preface', 'doc-preface'],
+ ['prologue', 'doc-prologue'],
+ ['pullquote', 'doc-pullquote'],
+ ['qna', 'doc-qna'],
+ // ['question', '??'],
+ ['referrer', 'doc-backlink'],
+ // ['revision-history', '??'],
+ // ['seriespage', '??'],
+ // ['sound-area', '??'],
+ // ['subchapter', '??'],
+ ['subtitle', 'doc-subtitle'],
+ ['table', 'table'],
+ ['table-cell', 'cell'],
+ ['table-row', 'row'],
+ // ['text-area', '??'],
+ ['tip', 'doc-tip'],
+ // ['title', '??'],
+ // ['titlepage', '??'],
+ ['toc', 'doc-toc']
+ // ['toc-brief', '??'],
+ // ['topic-sentence', '??'],
+ // ['true-false-problem', '??'],
+ // ['volume', '??'],
+ ]);
+
+ const hasXmlEpubType = node.hasAttributeNS(
+ 'http://www.idpf.org/2007/ops',
+ 'type'
+ );
+ if (
+ hasXmlEpubType ||
+ node.hasAttribute('epub:type') // for unit tests that are not XML-aware due to fixture.innerHTML
+ ) {
+ // abort if descendant of landmarks nav (nav with epub:type=landmarks)
+ if (
+ (hasXmlEpubType && matchesSelector(node, 'nav[*|type~="landmarks"] *')) ||
+ matchesSelector(node, 'nav[epub\\:type~="landmarks"] *')
+ ) {
+ // console.log('BREAKPOINT');
+ // throw new Error('BREAKPOINT');
+ return true;
+ }
+
+ // iterate for each epub:type value
+ var types = tokenList(
+ hasXmlEpubType
+ ? node.getAttributeNS('http://www.idpf.org/2007/ops', 'type')
+ : node.getAttribute('epub:type')
+ );
+ for (const type of types) {
+ // If there is a 1-1 mapping, check that the role is set (best practice)
+ if (mappings.has(type)) {
+ // Note: using axe’s `getRole` util returns the effective role of the element
+ // (either explicitly set with the role attribute or implicit)
+ // So this works for types mapping to core ARIA roles (eg. glossref/glossterm).
+ const mappedRole = mappings.get(type);
+ const role = getRole(node, { dpub: true });
+ const roleDefinition = standards.ariaRoles[mappedRole];
+ if (!roleDefinition || roleDefinition.deprecated) {
+ return true;
+ }
+ // if (mappedRole !== role) {
+ // console.log('BREAKPOINT: ', type, mappedRole, role);
+ // // throw new Error('BREAKPOINT');
+ // }
+ return mappedRole === role;
+ } else {
+ // e.g. cover, landmarks
+ // console.log('BREAKPOINT: ', type);
+ // throw new Error('BREAKPOINT');
+ }
+ }
+ }
+
+ return true;
+}
+
+export default matchingAriaRoleEvaluate;
diff --git a/lib/checks/aria/matching-aria-role.json b/lib/checks/aria/matching-aria-role.json
new file mode 100644
index 0000000000..fb78acbe75
--- /dev/null
+++ b/lib/checks/aria/matching-aria-role.json
@@ -0,0 +1,11 @@
+{
+ "id": "matching-aria-role",
+ "evaluate": "matching-aria-role-evaluate",
+ "metadata": {
+ "impact": "minor",
+ "messages": {
+ "pass": "Element has an ARIA role matching its epub:type",
+ "fail": "Element has no ARIA role matching its epub:type"
+ }
+ }
+}
diff --git a/lib/checks/landmarks/landmark-is-unique-after.js b/lib/checks/landmarks/landmark-is-unique-after.js
index fd4eb2ec1f..71628c861c 100644
--- a/lib/checks/landmarks/landmark-is-unique-after.js
+++ b/lib/checks/landmarks/landmark-is-unique-after.js
@@ -1,11 +1,20 @@
function landmarkIsUniqueAfter(results) {
const uniqueLandmarks = [];
+ // console.log("landmarkIsUniqueAfter results: ", JSON.stringify(results, null, 4));
+
// filter out landmark elements that share the same role and accessible text
// so every non-unique landmark isn't reported as a failure (just the first)
- return results.filter(currentResult => {
- const findMatch = someResult => {
+ var filtered = results.filter(currentResult => {
+ if (!currentResult.data) {
+ // console.log('landmarkIsUniqueAfterlandmarkIsUniqueAfter NO DATA???!!!');
+ return false;
+ }
+
+ var findMatch = someResult => {
return (
+ // currentResult.data.isLandmark &&
+ // someResult.data.isLandmark &&
currentResult.data.role === someResult.data.role &&
currentResult.data.accessibleText === someResult.data.accessibleText
);
@@ -22,6 +31,9 @@ function landmarkIsUniqueAfter(results) {
currentResult.relatedNodes = [];
return true;
});
+
+ // console.log("landmarkIsUniqueAfter filtered: ", JSON.stringify(filtered, null, 4));
+ return filtered;
}
export default landmarkIsUniqueAfter;
diff --git a/lib/checks/landmarks/landmark-is-unique-evaluate.js b/lib/checks/landmarks/landmark-is-unique-evaluate.js
index f8375fed5f..0a6e9151d7 100644
--- a/lib/checks/landmarks/landmark-is-unique-evaluate.js
+++ b/lib/checks/landmarks/landmark-is-unique-evaluate.js
@@ -1,11 +1,40 @@
-import { getRole } from '../../commons/aria';
+import { getRole } from '../../commons/aria'; // getRoleType
import { accessibleTextVirtual } from '../../commons/text';
+// import { getAriaRolesByType } from '../../commons/standards';
function landmarkIsUniqueEvaluate(node, options, virtualNode) {
- const role = getRole(node);
- let accessibleText = accessibleTextVirtual(virtualNode);
+ var role = getRole(node, { dpub: true }); // fallback: true
+ if (!role) {
+ // this.data({ role: '', accessibleText: '', isLandmark: null });
+ // console.log('landmarkIsUniqueEvaluate landmarkIsUniqueEvaluate landmarkIsUniqueEvaluate NO ROLE???!!!');
+ return false;
+ }
+
+ // var landmarks = getAriaRolesByType('landmark');
+ // var roleType = getRoleType(role);
+ // var isLandmark =
+ // roleType === 'landmark' ||
+ // landmarks.includes(roleType) ||
+ // landmarks.includes(role);
+
+ // if (!isLandmark) {
+ // // this.data({ role: '', accessibleText: '', isLandmark: null });
+ // return false;
+ // }
+ // throw new Error('BREAK');
+
+ var accessibleText = accessibleTextVirtual(virtualNode);
+
+ // console.log('\n\n ))))) ', virtualNode.props ? virtualNode.props.nodeName : '!virtualNode.props', role, roleType, JSON.stringify(landmarks), isLandmark, " [[" + accessibleText + "]]")
+
accessibleText = accessibleText ? accessibleText.toLowerCase() : null;
- this.data({ role: role, accessibleText: accessibleText });
+
+ this.data({
+ role: role,
+ accessibleText: accessibleText
+ // isLandmark: isLandmark
+ });
+
this.relatedNodes([node]);
return true;
diff --git a/lib/checks/lists/listitem-evaluate.js b/lib/checks/lists/listitem-evaluate.js
index dde5dcb4f4..13bb2c6278 100644
--- a/lib/checks/lists/listitem-evaluate.js
+++ b/lib/checks/lists/listitem-evaluate.js
@@ -1,4 +1,8 @@
-import { isValidRole, getExplicitRole } from '../../commons/aria';
+import {
+ getExplicitRole,
+ getSuperClassRole,
+ isValidRole
+} from '../../commons/aria';
export default function listitemEvaluate(node, options, virtualNode) {
const { parent } = virtualNode;
@@ -15,6 +19,11 @@ export default function listitemEvaluate(node, options, virtualNode) {
}
if (parentRole && isValidRole(parentRole)) {
+ const sup = getSuperClassRole(parentRole);
+ if (sup && sup.includes('list')) {
+ return true;
+ }
+
this.data({
messageKey: 'roleNotValid'
});
diff --git a/lib/checks/lists/only-listitems-evaluate.js b/lib/checks/lists/only-listitems-evaluate.js
index 93ab74adfa..ff9d27d728 100644
--- a/lib/checks/lists/only-listitems-evaluate.js
+++ b/lib/checks/lists/only-listitems-evaluate.js
@@ -1,5 +1,6 @@
import { isVisibleToScreenReaders } from '../../commons/dom';
-import { getRole } from '../../commons/aria';
+
+import { getRole, getSuperClassRole } from '../../commons/aria';
/**
* @deprecated
@@ -27,7 +28,11 @@ function onlyListitemsEvaluate(node, options, virtualNode) {
isEmpty = false;
const isLi = actualNode.nodeName.toUpperCase() === 'LI';
const role = getRole(vNode);
- const isListItemRole = role === 'listitem';
+
+ const sup = getSuperClassRole(role);
+ const isListItemRole =
+ role === 'listitem' || (sup && sup.includes('listitem'));
+ // const isListItemRole = role === 'listitem';
if (!isLi && !isListItemRole) {
badNodes.push(actualNode);
diff --git a/lib/commons/aria/get-element-unallowed-roles.js b/lib/commons/aria/get-element-unallowed-roles.js
index 92544d47dd..037a8b0092 100644
--- a/lib/commons/aria/get-element-unallowed-roles.js
+++ b/lib/commons/aria/get-element-unallowed-roles.js
@@ -8,10 +8,10 @@ import { tokenList, isHtmlElement, nodeLookup } from '../../core/utils';
// HTML elements (img, link, etc.)
const dpubRoles = [
'doc-backlink',
- 'doc-biblioentry',
+ // 'doc-biblioentry',
'doc-biblioref',
'doc-cover',
- 'doc-endnote',
+ // 'doc-endnote',
'doc-glossref',
'doc-noteref'
];
diff --git a/lib/commons/aria/get-super-class-role.js b/lib/commons/aria/get-super-class-role.js
new file mode 100644
index 0000000000..52544dd100
--- /dev/null
+++ b/lib/commons/aria/get-super-class-role.js
@@ -0,0 +1,21 @@
+import standards from '../../standards';
+
+/**
+ * Get the "superclassRole" of role
+ * @method getSuperClassRole
+ * @memberof axe.commons.aria
+ * @instance
+ * @param {String} role The role to check
+ * @return {Mixed} String if a matching role and its superclassRole are found, otherwise `null`
+ */
+function getSuperClassRole(role) {
+ const roleDef = standards.ariaRoles[role];
+
+ if (!roleDef) {
+ return null;
+ }
+
+ return roleDef.superclassRole;
+}
+
+export default getSuperClassRole;
diff --git a/lib/commons/aria/index.js b/lib/commons/aria/index.js
index 9eb5bb3a88..c41443ebfd 100644
--- a/lib/commons/aria/index.js
+++ b/lib/commons/aria/index.js
@@ -12,6 +12,7 @@ export { default as getExplicitRole } from './get-explicit-role';
export { default as getImplicitRole } from './implicit-role';
export { default as getOwnedVirtual } from './get-owned-virtual';
export { default as getRoleType } from './get-role-type';
+export { default as getSuperClassRole } from './get-super-class-role';
export { default as getRole } from './get-role';
export { default as getRolesByType } from './get-roles-by-type';
export { default as getRolesWithNameFromContents } from './get-roles-with-name-from-contents';
diff --git a/lib/commons/aria/lookup-table.js b/lib/commons/aria/lookup-table.js
index 84700f8f1c..11d3aa10d6 100644
--- a/lib/commons/aria/lookup-table.js
+++ b/lib/commons/aria/lookup-table.js
@@ -489,7 +489,7 @@ lookupTable.role = {
owned: null,
nameFrom: ['author'],
context: null,
- implicit: ['dd', 'dfn'],
+ implicit: ['dd'], // DAISY-AXE: remove 'dfn' which has implicit 'term' role, see https://www.w3.org/TR/html-aria/#docconformance
unsupported: false
},
dialog: {
@@ -590,7 +590,7 @@ lookupTable.role = {
]
},
'doc-biblioentry': {
- type: 'listitem',
+ type: 'structure',
attributes: {
allowed: [
'aria-expanded',
@@ -611,9 +611,11 @@ lookupTable.role = {
attributes: {
allowed: ['aria-expanded', 'aria-errormessage']
},
- owned: {
- one: ['doc-biblioentry']
- },
+ owned: null,
+ // owned: {
+ // // one: ['doc-biblioentry']
+ // one: ['list']
+ // },
nameFrom: ['author'],
context: null,
unsupported: false,
@@ -714,7 +716,7 @@ lookupTable.role = {
allowedElements: ['section']
},
'doc-endnote': {
- type: 'listitem',
+ type: 'structure',
attributes: {
allowed: [
'aria-expanded',
@@ -735,9 +737,11 @@ lookupTable.role = {
attributes: {
allowed: ['aria-expanded', 'aria-errormessage']
},
- owned: {
- one: ['doc-endnote']
- },
+ owned: null,
+ // owned: {
+ // // one: ['doc-endnote']
+ // one: ['list']
+ // },
namefrom: ['author'],
context: null,
unsupported: false,
@@ -775,8 +779,11 @@ lookupTable.role = {
unsupported: false,
allowedElements: ['section']
},
+ // https://www.w3.org/TR/dpub-aria-1.0/#doc-example
+ // ==> (was 'section' now 'figure')
+ // https://www.w3.org/TR/dpub-aria-1.1/#doc-example
'doc-example': {
- type: 'section',
+ type: 'structure',
attributes: {
allowed: ['aria-expanded', 'aria-errormessage']
},
@@ -813,7 +820,8 @@ lookupTable.role = {
attributes: {
allowed: ['aria-expanded', 'aria-errormessage']
},
- owned: ['term', 'definition'],
+ owned: null,
+ // owned: ['term', 'definition'],
namefrom: ['author'],
context: null,
unsupported: false,
@@ -895,6 +903,7 @@ lookupTable.role = {
},
owned: null,
namefrom: ['author'],
+ nameFromContent: true,
context: null,
unsupported: false,
allowedElements: ['hr']
@@ -943,8 +952,11 @@ lookupTable.role = {
unsupported: false,
allowedElements: ['section']
},
+ // https://www.w3.org/TR/dpub-aria-1.0/#doc-pullquote
+ // ==> (was 'none' now 'section')
+ // https://www.w3.org/TR/dpub-aria-1.1/#doc-pullquote
'doc-pullquote': {
- type: 'none',
+ type: 'section',
attributes: {
allowed: ['aria-expanded']
},
@@ -972,6 +984,7 @@ lookupTable.role = {
},
owned: null,
namefrom: ['author'],
+ nameFromContent: true,
context: null,
unsupported: false,
allowedElements: {
@@ -1163,7 +1176,7 @@ lookupTable.role = {
},
nameFrom: ['author'],
context: null,
- implicit: ['ol', 'ul', 'dl'],
+ implicit: ['ol', 'ul'], // DAISY-AXE: remove 'dl' which has no implicit role, see https://www.w3.org/TR/html-aria/#docconformance
unsupported: false
},
listbox: {
@@ -1202,7 +1215,7 @@ lookupTable.role = {
owned: null,
nameFrom: ['author', 'contents'],
context: ['list'],
- implicit: ['li', 'dt'],
+ implicit: ['li'], // DAISY-AXE: remove 'dt' which has implicit 'term' role, see https://www.w3.org/TR/html-aria/#docconformance
unsupported: false
},
log: {
@@ -1944,7 +1957,7 @@ lookupTable.role = {
owned: null,
nameFrom: ['author', 'contents'],
context: null,
- implicit: ['dt'],
+ implicit: ['dt', 'dfn'], // DAISY-AXE: add 'dfn' which has implicit 'term' role, see https://www.w3.org/TR/html-aria/#docconformance
unsupported: false
},
textbox: {
diff --git a/lib/core/base/audit.js b/lib/core/base/audit.js
index 3353545af1..d284e23a1a 100644
--- a/lib/core/base/audit.js
+++ b/lib/core/base/audit.js
@@ -778,11 +778,12 @@ function createIncompleteErrorResult(rule, error) {
* For all the rules, create the helpUrl and add it to the data for that rule
*/
function getHelpUrl({ brand, application, lang }, ruleId, version) {
+ var _v = version ? version : axe.version.replace(/-\w+\.\w+$/, '');
return (
constants.helpUrlBase +
brand +
'/' +
- (version || axe.version.substring(0, axe.version.lastIndexOf('.'))) +
+ (version || _v.substring(0, _v.lastIndexOf('.'))) +
'/' +
ruleId +
'?application=' +
diff --git a/lib/core/public/configure.js b/lib/core/public/configure.js
index afe73582b1..76320e8dd5 100644
--- a/lib/core/public/configure.js
+++ b/lib/core/public/configure.js
@@ -3,6 +3,7 @@ import { configureStandards } from '../../standards';
import constants from '../constants';
function configure(spec) {
+ // throw new Error("DAISY-AXE BREAKPOINT AXE CONFIGURE");
const audit = axe._audit;
if (!audit) {
@@ -52,7 +53,6 @@ function configure(spec) {
spec.checks.forEach(check => {
if (!check.id) {
throw new TypeError(
- // eslint-disable-next-line max-len
`Configured check ${JSON.stringify(
check
)} is invalid. Checks must be an object with at least an id property`
@@ -72,7 +72,6 @@ function configure(spec) {
spec.rules.forEach(rule => {
if (!rule.id) {
throw new TypeError(
- // eslint-disable-next-line max-len
`Configured rule ${JSON.stringify(
rule
)} is invalid. Rules must be an object with at least an id property`
diff --git a/lib/rules/epub-type-has-matching-role-matches.js b/lib/rules/epub-type-has-matching-role-matches.js
new file mode 100644
index 0000000000..21d542169f
--- /dev/null
+++ b/lib/rules/epub-type-has-matching-role-matches.js
@@ -0,0 +1,19 @@
+function epubTypeHasMatchingRoleMatches(node) {
+ // selector: '[*|type]',
+ return (
+ node.hasAttributeNS('http://www.idpf.org/2007/ops', 'type') ||
+ node.hasAttribute('epub:type') // for unit tests that are not XML-aware due to fixture.innerHTML
+ );
+
+ // console.log('node.nodeName: ', node.nodeName);
+ // const attrs = Array.from(getNodeAttributes(node));
+ // console.log(attrs.length);
+ // attrs.forEach((attr) => {
+ // console.log('\n=====');
+ // console.log(JSON.stringify(attr));
+ // console.log('attr.nodeName: ', attr.nodeName);
+ // console.log('attr.namespaceURI: ', attr.namespaceURI);
+ // });
+}
+
+export default epubTypeHasMatchingRoleMatches;
diff --git a/lib/rules/epub-type-has-matching-role.json b/lib/rules/epub-type-has-matching-role.json
new file mode 100644
index 0000000000..f0f260a906
--- /dev/null
+++ b/lib/rules/epub-type-has-matching-role.json
@@ -0,0 +1,13 @@
+{
+ "id": "epub-type-has-matching-role",
+ "impact": "moderate",
+ "matches": "epub-type-has-matching-role-matches",
+ "tags": ["cat.aria", "best-practice"],
+ "metadata": {
+ "description": "Ensure the element has an ARIA role matching its epub:type",
+ "help": "ARIA role should be used in addition to epub:type"
+ },
+ "all": [],
+ "any": ["matching-aria-role"],
+ "none": []
+}
diff --git a/lib/rules/landmark-one-main.json b/lib/rules/landmark-one-main.json
index 34584d5b3d..cfb38a6eef 100644
--- a/lib/rules/landmark-one-main.json
+++ b/lib/rules/landmark-one-main.json
@@ -7,7 +7,7 @@
"description": "Ensure the document has a main landmark",
"help": "Document should have one main landmark"
},
- "all": ["page-has-main"],
+ "all": ["page-no-duplicate-main"],
"any": [],
"none": []
}
diff --git a/lib/rules/landmark-unique-matches.js b/lib/rules/landmark-unique-matches.js
index d651949764..ec213912a9 100644
--- a/lib/rules/landmark-unique-matches.js
+++ b/lib/rules/landmark-unique-matches.js
@@ -1,7 +1,8 @@
import { isVisibleToScreenReaders } from '../commons/dom';
-import { getRole } from '../commons/aria';
+import { getRole, getRoleType } from '../commons/aria';
import { getAriaRolesByType } from '../commons/standards';
import { accessibleTextVirtual } from '../commons/text';
+// import { closest } from '../core/utils';
export default function landmarkUniqueMatches(node, virtualNode) {
return (
@@ -11,7 +12,7 @@ export default function landmarkUniqueMatches(node, virtualNode) {
function isLandmarkVirtual(vNode) {
const landmarkRoles = getAriaRolesByType('landmark');
- const role = getRole(vNode);
+ const role = getRole(vNode, { dpub: true });
if (!role) {
return false;
}
@@ -23,5 +24,11 @@ function isLandmarkVirtual(vNode) {
return !!accessibleText;
}
- return landmarkRoles.indexOf(role) >= 0 || role === 'region';
+ var roleType = getRoleType(role);
+ return (
+ role === 'region' ||
+ roleType === 'landmark' ||
+ landmarkRoles.includes(roleType) ||
+ landmarkRoles.indexOf(role) >= 0
+ );
}
diff --git a/lib/rules/pagebreak-label-matches.js b/lib/rules/pagebreak-label-matches.js
new file mode 100644
index 0000000000..0feaab4710
--- /dev/null
+++ b/lib/rules/pagebreak-label-matches.js
@@ -0,0 +1,16 @@
+function pagebreakLabelMatches(node) {
+ // selector: '[*|type~="pagebreak"], [role~="doc-pagebreak"]',
+ return (
+ (node.hasAttribute('role') &&
+ node.getAttribute('role').match(/\S+/g).includes('doc-pagebreak')) ||
+ (node.hasAttributeNS('http://www.idpf.org/2007/ops', 'type') &&
+ node
+ .getAttributeNS('http://www.idpf.org/2007/ops', 'type')
+ .match(/\S+/g)
+ .includes('pagebreak')) ||
+ (node.hasAttribute('epub:type') &&
+ node.getAttribute('epub:type').match(/\S+/g).includes('pagebreak')) // for unit tests that are not XML-aware due to fixture.innerHTML
+ );
+}
+
+export default pagebreakLabelMatches;
diff --git a/lib/rules/pagebreak-label.json b/lib/rules/pagebreak-label.json
new file mode 100644
index 0000000000..521bb81fce
--- /dev/null
+++ b/lib/rules/pagebreak-label.json
@@ -0,0 +1,18 @@
+{
+ "id": "pagebreak-label",
+ "impact": "moderate",
+ "matches": "pagebreak-label-matches",
+ "tags": ["cat.epub"],
+ "metadata": {
+ "description": "Ensure page markers have an accessible label",
+ "help": "Page markers must have an accessible label"
+ },
+ "all": [],
+ "any": [
+ "aria-label",
+ "aria-labelledby",
+ "non-empty-title",
+ "has-visible-text"
+ ],
+ "none": []
+}
diff --git a/lib/standards/dpub-roles.js b/lib/standards/dpub-roles.js
index c2f8d33b95..bd1b88ecbe 100644
--- a/lib/standards/dpub-roles.js
+++ b/lib/standards/dpub-roles.js
@@ -27,7 +27,7 @@ const dpubRoles = {
superclassRole: ['link']
},
'doc-biblioentry': {
- type: 'listitem',
+ type: 'structure',
allowedAttrs: [
'aria-expanded',
'aria-level',
@@ -36,11 +36,15 @@ const dpubRoles = {
],
superclassRole: ['listitem'],
deprecated: true
+ // requiredContext: ['doc-bibliography']
+ // requiredContext: ['list', 'listitem']
},
'doc-bibliography': {
type: 'landmark',
allowedAttrs: ['aria-expanded'],
superclassRole: ['landmark']
+ // requiredOwned: ['doc-biblioentry']
+ // requiredOwned: ['list']
},
'doc-biblioref': {
type: 'link',
@@ -84,20 +88,24 @@ const dpubRoles = {
superclassRole: ['section']
},
'doc-endnote': {
- type: 'listitem',
+ type: 'structure',
allowedAttrs: [
'aria-expanded',
'aria-level',
'aria-posinset',
'aria-setsize'
],
- superclassRole: ['listitem'],
+ superclassRole: ['none'],
deprecated: true
+ // requiredContext: ['doc-endnotes']
+ // requiredContext: ['list', 'listitem']
},
'doc-endnotes': {
type: 'landmark',
allowedAttrs: ['aria-expanded'],
superclassRole: ['landmark']
+ // requiredOwned: ['doc-endnote']
+ // requiredOwned: ['list']
},
'doc-epigraph': {
type: 'section',
@@ -114,10 +122,13 @@ const dpubRoles = {
allowedAttrs: ['aria-expanded'],
superclassRole: ['landmark']
},
+ // https://www.w3.org/TR/dpub-aria-1.0/#doc-example
+ // ==> (was 'section' now 'figure')
+ // https://www.w3.org/TR/dpub-aria-1.1/#doc-example
'doc-example': {
- type: 'section',
+ type: 'structure',
allowedAttrs: ['aria-expanded'],
- superclassRole: ['section']
+ superclassRole: ['figure']
},
'doc-footnote': {
type: 'section',
@@ -187,8 +198,11 @@ const dpubRoles = {
allowedAttrs: ['aria-expanded'],
superclassRole: ['landmark']
},
+ // https://www.w3.org/TR/dpub-aria-1.0/#doc-pullquote
+ // ==> (was 'none' now 'section')
+ // https://www.w3.org/TR/dpub-aria-1.1/#doc-pullquote
'doc-pullquote': {
- type: 'none',
+ type: 'section',
superclassRole: ['none']
},
'doc-qna': {
diff --git a/locales/_template.json b/locales/_template.json
index d1993f0d82..481bfda8dd 100644
--- a/locales/_template.json
+++ b/locales/_template.json
@@ -173,6 +173,10 @@
"description": "Ensure table headers have discernible text",
"help": "Table header text should not be empty"
},
+ "epub-type-has-matching-role": {
+ "description": "Ensure the element has an ARIA role matching its epub:type",
+ "help": "ARIA role should be used in addition to epub:type"
+ },
"focus-order-semantics": {
"description": "Ensure elements in the focus order have a role appropriate for interactive content",
"help": "Elements in the focus order should have an appropriate role"
@@ -341,6 +345,10 @@
"description": "Ensure that the page, or at least one of its frames contains a level-one heading",
"help": "Page should contain a level-one heading"
},
+ "pagebreak-label": {
+ "description": "Ensure page markers have an accessible label",
+ "help": "Page markers must have an accessible label"
+ },
"presentation-role-conflict": {
"description": "Ensure elements marked as presentational do not have global ARIA or tabindex so that all screen readers ignore them",
"help": "Elements marked as presentational should be consistently ignored"
@@ -596,6 +604,10 @@
"pass": "Element is focusable.",
"fail": "Element is not focusable."
},
+ "matching-aria-role": {
+ "pass": "Element has an ARIA role matching its epub:type",
+ "fail": "Element has no ARIA role matching its epub:type"
+ },
"no-implicit-explicit-label": {
"pass": "There is no mismatch between a