diff --git a/.dockerignore b/.dockerignore index d19710b66..d099ab1d2 100644 --- a/.dockerignore +++ b/.dockerignore @@ -2,4 +2,12 @@ public/ .github/ .git/ fireside-scraper -scraped-data \ No newline at end of file +scraped-data +# ignoring when dockerfiles themselves +Dockerfile* +# ignoring all of tests directory +# container only needs dependencies, and tests are run during it's execution +test/ +# allowing Pipfiles, since used in Dockerfile.tests +!/test/Pipfile +!/test/Pipfile.lock diff --git a/.github/workflows/build_container-prod.yml b/.github/workflows/build_container-prod.yml new file mode 100644 index 000000000..b11b64820 --- /dev/null +++ b/.github/workflows/build_container-prod.yml @@ -0,0 +1,92 @@ +name: Building Prod Container + +# built with the following guides/examples: +# - https://github.com/marketplace/actions/push-to-registry#examples +# - https://github.com/marketplace/actions/buildah-build#building-using-containerfiles + +on: + push: + branches: + - main + - develop + # don't re-build prod when there's only test changes + paths-ignore: + - ./Dockerfile.tests + - "./tests/**" + + workflow_dispatch: + + workflow_run: + workflows: ["Scrape and commit"] + types: + - completed + +jobs: + # this sets up some variables so the next job can use them for tags + env_setup: + runs-on: ubuntu-latest + outputs: + date_output: ${{ steps.date.outputs.date_fmt }} + registry_user: ${{ steps.registry_user.outputs.username }} + current_branch: ${{ steps.current_branch.outputs.current_branch }} + steps: + # used for tagging the container with the current date down to the seconds + # since we have so many changes down to the seconds was required. Example: 2022-09-08T05_42_23-04_00 + - id: date + run: echo "::set-output name=date_fmt::$(date -Isec | tr ':' '_' | tr '+' '-' )" + # the username for the quay registry, since it can be derived from the quay secret's robot username + - id: registry_user + run: echo "::set-output name=username::$(cut -d '+' -f 1 <<< ${{ secrets.REGISTRY_USER }})" + - id: current_branch + run: echo "::set-output name=current_branch::$( [ "${GITHUB_REF_NAME}" != "main" ] && echo "-${GITHUB_REF_NAME:-}" )" + + prod: + strategy: + # this causes containers to be built at the same time + matrix: + # these 2 containers are being built seperately because they need to have different --baseURL parameters + container_env: + # the tests_web one needs to have --baseURL='/', this allows the tests to run from any domain and allows + # it to be relative off of that domain. i.e. http://jb.io is the base domain, and they'll all be off of it's root path + - 'TESTS_WEB' + # this is the normal production site being built and versioned + - 'PROD' + runs-on: ubuntu-latest + # requires the previous job to complete before attempting to run this job + needs: env_setup + # location where the credentials are stored for quay + environment: production_containers + steps: + - uses: actions/checkout@v2 + + - name: Building container image + id: build-image + uses: redhat-actions/buildah-build@v2 + with: + image: jb_web_container + # tagging with 2 different tags, this will let us have a consistent target of prod-latest or tests_web-latest, + # but still allow us to properly version each build of the containers (with the current time). This double + # tagging strategy will allow us to rollback changes on the prod environment if something goes awry + # example location as to where to change that in prod's ansible IaC + # https://github.com/JupiterBroadcasting/infra/blob/3b9490a46119ffdc70018b2ac8315346d947a06d/ansible/group_vars/core.yaml#L517 + # tag example: prod-latest, prod-develop-latest, prod-develop-2022-09-12t23_15_22-04_00, or prod-2022-09-12t23_15_22-04_00 + tags: ${{ matrix.container_env }}${{ needs.env_setup.outputs.current_branch }}-latest ${{ matrix.container_env }}${{ needs.env_setup.outputs.current_branch }}-${{ needs.env_setup.outputs.date_output }} + # prod docker container docker file + containerfiles: | + ./Dockerfile + # changes the build base URL based on the name passed into this build arguement + build-args: | + BASE_URL=${{ matrix.container_env }} + + - name: Push To quay.io + id: push-to-quay + uses: redhat-actions/push-to-registry@v2 + with: + image: ${{ steps.build-image.outputs.image }} + tags: ${{ steps.build-image.outputs.tags }} + registry: quay.io/${{ needs.env_setup.outputs.registry_user }} + username: ${{ secrets.REGISTRY_USER }} + password: ${{ secrets.REGISTRY_PASSWORD }} + + - name: Print image url + run: echo "Image pushed to ${{ steps.push-to-quay.outputs.registry-paths }}" diff --git a/.github/workflows/build_container-tests.yml b/.github/workflows/build_container-tests.yml new file mode 100644 index 000000000..9240ddd52 --- /dev/null +++ b/.github/workflows/build_container-tests.yml @@ -0,0 +1,67 @@ +name: Building Tests Container + +# built with the following guides/examples: +# - https://github.com/marketplace/actions/push-to-registry#examples +# - https://github.com/marketplace/actions/buildah-build#building-using-containerfiles + +on: + push: + branches: + - main + - develop + paths: + - ./Dockerfile.tests + - ./tests/Pipfile.lock + + workflow_dispatch: + +jobs: + # this sets up some variables so the next job can use them for tags + env_setup: + runs-on: ubuntu-latest + outputs: + date_output: ${{ steps.date.outputs.date_fmt }} + registry_user: ${{ steps.registry_user.outputs.username }} + current_branch: ${{ steps.current_branch.outputs.current_branch }} + steps: + # used for tagging the container with the current date down to the seconds + # since we have so many changes down to the seconds was required. Example: 2022-09-08T05_42_23-04_00 + - id: date + run: echo "::set-output name=date_fmt::$(date -Isec | tr ':' '_' | tr '+' '-' )" + # the username for the quay registry, since it can be derived from the quay secret's robot username + - id: registry_user + run: echo "::set-output name=username::$(cut -d '+' -f 1 <<< ${{ secrets.REGISTRY_USER }})" + - id: current_branch + run: echo "::set-output name=current_branch::$( [ "${GITHUB_REF_NAME}" != "main" ] && echo "-${GITHUB_REF_NAME:-}" )" + + tests: + runs-on: ubuntu-latest + needs: env_setup + # location where the credentials are stored for quay + environment: production_containers + steps: + - uses: actions/checkout@v2 + - name: Build Image + id: build-image + uses: redhat-actions/buildah-build@v2 + with: + image: jb_web_container + # tagging with 2 different tags, this will let us have a consistent target of prod-latest or tests_web-latest, + # but still allow us to properly version each build of the containers (with the current time). This double + # tagging strategy will allow us to rollback changes on the prod environment if something goes awry + tags: tests${{ needs.env_setup.outputs.current_branch }}-latest tests${{ needs.env_setup.outputs.current_branch }}-${{ needs.env_setup.outputs.date_output }} + containerfiles: | + ./Dockerfile.tests + + - name: Push To quay.io + id: push-to-quay + uses: redhat-actions/push-to-registry@v2 + with: + image: ${{ steps.build-image.outputs.image }} + tags: ${{ steps.build-image.outputs.tags }} + registry: quay.io/${{ needs.env_setup.outputs.registry_user }} + username: ${{ secrets.REGISTRY_USER }} + password: ${{ secrets.REGISTRY_PASSWORD }} + + - name: Print image url + run: echo "Image pushed to ${{ steps.push-to-quay.outputs.registry-paths }}" diff --git a/.github/workflows/build_containers.md b/.github/workflows/build_containers.md new file mode 100644 index 000000000..6c41e9fe6 --- /dev/null +++ b/.github/workflows/build_containers.md @@ -0,0 +1,30 @@ +# Containers Docs + +This is the documentation for how the build_container-*.yml files works, and the needed permissions/setup around it. + +## Registry + +We're using quay.io for the container registry, reasons why are outlined in [this comment](https://github.com/JupiterBroadcasting/jupiterbroadcasting.com/issues/244#issuecomment-1213146790) + +## Authentication to registry + +1. Naviage to: +2. Create a Red Hat account if you don't have one, or sign in +3. Navigate to the following (be sure to replace `` with your quay.io username): `https://quay.io/user/?tab=repos` +4. Click "Create New Repository" (near the top right) +5. Give the repo a name (this will be the name of you container) i.e. jb_web_container +6. Make sure you've selected the Repository Visibility as Public and you're going to be choosing "(Empty repository)" for the Initialize repository section +7. Navigate to the following (be sure to replace `` with your quay.io username): `https://quay.io/user/?tab=robots` +8. Click "Create Robot Account" +9. Give the robot a name (i.e. `jb_web_container`), and a description +10. find the repo you just created, and select the dropdown that says "None" +11. Choose the "Write" permission +12. Click "Add permissions" +13. Click on the gear on the far right side of the screen for your new robot account +14. Click "View Credentials" +15. Create an environment variables based on [this guide](https://blog.elreydetoda.site/github-action-security/), and have the environment name called production_containers. +16. Click "Add Secret" under Environment secrets, and add the following secrets + * `REGISTRY_PASSWORD` = your robot account token (long string of random characters) +17. Then go to this link (replace `GH_user_name` with your GitHub username): `https://github.com/jupiterbroadcasting/jupiterbroadcasting.com/settings/secrets/actions/new`, to create a repo secret + * `REGISTRY_USER` = your robot account username + * This isn't necessarily that sensative, so creating a repo secret instead of an environment secret is ok diff --git a/.github/workflows/deploy-prod.yml b/.github/workflows/deploy-prod.yml index fd9f329da..1f72b7adb 100644 --- a/.github/workflows/deploy-prod.yml +++ b/.github/workflows/deploy-prod.yml @@ -12,7 +12,8 @@ on: # scraper will run and not commit anything, but after #215 is solved # then it should only run after castablasta pushes a new episode workflow_run: - workflows: ["Scrape and commit"] + # putting the comment here to enable prod changes in the future to leverage the created containers + workflows: ["Scrape and commit"] #, "Building Prod Container"] types: - completed diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml index 5e4f668a2..a4908761f 100644 --- a/.github/workflows/e2e.yml +++ b/.github/workflows/e2e.yml @@ -1,14 +1,12 @@ name: E2E Tests on: - # keeping this till we figure out #371, so we can at least validate the - # scrapper PRs aren't breaking things - push: - branches: [ main ] - # https://frontside.com/blog/2020-05-26-github-actions-pull_request/ pull_request: - + push: + branches: + - main + - develop # Allows you to run this workflow manually from the Actions tab workflow_dispatch: @@ -18,36 +16,33 @@ jobs: steps: - name: Checkout uses: actions/checkout@v2 - - name: run hugo build - uses: jakejarvis/hugo-build-action@master - with: - args: --gc --config ./config.toml -b http://localhost:1313 - - name: run server - run: | - cd public && python -m http.server 1313 & - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: '3.10' - - name: Install dependencies - working-directory: ./test - run: | - python -m pip install --upgrade pip - pip install -r requirements.txt - - name: Ensure browsers are installed - run: python -m playwright install --with-deps + + # using docker-compose, so we can re-use local Dockerfiles + - name: starting up containers + working-directory: ./test/ + env: + # https://stackoverflow.com/a/52192327 + DOCKER_BUILDKIT: 1 + # https://stackoverflow.com/a/69500575 + BUILDKIT_PROGRESS: plain + run: docker-compose up -d --build + - name: Run your tests - working-directory: ./test - run: pytest --base-url http://localhost:1313 -v --junitxml report.xml e2e/* + working-directory: ./test/ + # https://github.com/actions/runner/issues/241#issuecomment-577360161 + run: script -e -c "docker-compose exec testing pytest --base-url http://jb-com --junitxml report.xml" + - name: Save screenshots uses: actions/upload-artifact@v2 with: name: screenshots - path: test/screenshots/ + path: screenshots/ + - name: Publish Test Report uses: dorny/test-reporter@v1 - if: always() + # TODO: potential solution in the future: https://stackoverflow.com/a/70448851 + if: always() && github.event_name != 'pull_request' with: name: E2E Tests - path: './test/report.xml' + path: './report.xml' reporter: java-junit diff --git a/.gitignore b/.gitignore index 451bea053..f9177b315 100644 --- a/.gitignore +++ b/.gitignore @@ -18,6 +18,7 @@ $RECYCLE.BIN/ .vscode .kateproject .kateproject.local +.fleet # Ignore all the scraped data diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index c40a2cce6..8cf9eba4b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -57,6 +57,8 @@ Our base of operations for group discussions on this project is our Matrix chat * โœ… Create a markdown checklist to indicate what's done and what's still missing ([see this example](https://github.com/JupiterBroadcasting/jupiterbroadcasting.com/pull/112)). * โš™ If you are closing a specific issue we encourage you to use [Github's closing keywords](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword) to automatically close the issue once the PR is merged. * ๐Ÿšฎ Feel free to delete your feature branch after it has been merged via the PR. Thereโ€™s a handy button on the GitHub interface, and in the worst case they can be reverted easily. +* ๐ŸŽฏ We currently use a `develop` branch as our default, so expect your PRs to target `develop` and not the otherwise standard `main` branch +* ๐Ÿš€ We've also integrate PR deploy previews for the current `develop` branch - see the Environment section of the repo's main page sidebar. All PRs also get their own unique deploy previews found as a comment under the respective PR. ### What is the stack used for the site? diff --git a/Dockerfile b/Dockerfile index b60bb3c40..4814844ec 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,16 @@ FROM registry.gitlab.com/pages/hugo/hugo_extended:0.101.0 as builder +# this defaults to an empty variable +ARG BASE_URL WORKDIR /site COPY . /site -RUN hugo +# when the argument is empty or PROD, do a normal production build +# else if it's TESTS_WEB use the specified baseURL "/", or +# exit 1, because that use case hasn't been provided +# https://explainshell.com/ is an awesome website which can explain this in more detail +# set -x : changes the shell's default verbosity and puts it in "debug" mode +RUN set -x && ( [ -z "${BASE_URL}" ] || [ "${BASE_URL}" == "PROD" ] ) \ + && hugo --gc \ + || ( [ "${BASE_URL}" == 'TESTS_WEB' ] && hugo --gc --baseURL "/" || exit 1 ) FROM nginx:alpine RUN rm -rf /usr/share/nginx/html/* diff --git a/Dockerfile.tests b/Dockerfile.tests index 57906d720..153ba3bea 100644 --- a/Dockerfile.tests +++ b/Dockerfile.tests @@ -1,20 +1,13 @@ -ARG PY_VERSION=3.10 - -FROM docker.io/python:${PY_VERSION} +# using v1.25.2-jammy because it's the only version which has +# python3.10, and is latest version (python verison pinned in the test/Pipfile as well (make sure to change there as well)) +FROM mcr.microsoft.com/playwright/python:v1.25.2-jammy RUN pip install pipenv CMD [ "--base-url", "http://localhost:1313" ] ENTRYPOINT [ "pytest" ] -RUN adduser --disabled-login --disabled-password \ - --gecos '' --uid 1000 pytest COPY test/Pipfile test/Pipfile.lock ./ -RUN pipenv --bare sync --system \ - && playwright install-deps \ - && rm -rf /var/lib/apt/lists/* - # since the previous command (playwright install-deps) runs an apt-get update - -USER pytest - -RUN playwright install \ No newline at end of file +# installing depenencies to the container's system, +# so we don't have to worry about venvs +RUN pipenv --bare sync --system diff --git a/Makefile b/Makefile index 93e3f366b..d18fc577d 100644 --- a/Makefile +++ b/Makefile @@ -14,4 +14,4 @@ deploy-prod: tests: docker build --rm -f Dockerfile.tests -t jb_tests:latest . - docker run --rm --net=host --volume "$${PWD}":/app -w /app jb_tests:latest + docker run --rm --net=host --user 1000:1000 --volume "$${PWD}":/app -w /app jb_tests:latest diff --git a/README.md b/README.md index 097738065..6b7b5b8df 100644 --- a/README.md +++ b/README.md @@ -40,6 +40,13 @@ Deployment is done with Github Actions, see workflow file in `.github/workflows/ At the moment it is only triggered when something in the `main` branch is changing, but it can also be set up to run at certain times. This would also enable scheduled publishing, since Hugo per default only builds pages which have set `date` in frontmatter to <= `now` +When upgrading the hugo version these are the locations whicht it needs to be updated at: + +* `Dockerfile` +* `Dockerfile.dev` + +Currently we're at hugo version `0.101.0` + ### Credits - Parts of the functionality are from the Castanet Theme: https://github.com/mattstratton/castanet diff --git a/config.toml b/config.toml index d04b65936..cbc64cee7 100644 --- a/config.toml +++ b/config.toml @@ -1,6 +1,7 @@ baseURL = 'https://www.jupiterbroadcasting.com/' languageCode = 'en-us' title = 'Jupiter Broadcasting' +# enableGitInfo = true theme = 'jb' @@ -19,7 +20,6 @@ paginate = 12 [params.footer.right] headline = "Get Involved" - # Allow HTML in Markdown [markup] [markup.goldmark] diff --git a/config/_default/params.toml b/config/_default/params.toml index 8cca25df8..0ffb0932a 100644 --- a/config/_default/params.toml +++ b/config/_default/params.toml @@ -27,3 +27,6 @@ calendar.embedd= "jalb5frk4cunnaedbfemuqbhv4@group.calendar.google.com" analytics = "https://plausible.ktz.cloud/js/plausible.js" github_repo = "https://github.com/JupiterBroadcasting/jupiterbroadcasting.com" + +# Sponsor Page +sponsors.lookbackdays = -90 \ No newline at end of file diff --git a/content/show/self-hosted/_index.md b/content/show/self-hosted/_index.md index f34e403f9..f59fe0d70 100644 --- a/content/show/self-hosted/_index.md +++ b/content/show/self-hosted/_index.md @@ -6,7 +6,7 @@ draft = false categories = ["Self-Hosted"] show = "self-hosted" hosts = ["chris","alex"] - +sponsors = ["cloudfree.shop-ssh"] type = "show" active = true diff --git a/content/sponsors/_index.md b/content/sponsors/_index.md index 6e6d88eb5..9ded901bc 100644 --- a/content/sponsors/_index.md +++ b/content/sponsors/_index.md @@ -3,67 +3,5 @@ title = "Sponsors " description = "Our Sponsors" date = "2022-07-03T03:35:01-05:00" draft = false -+++ -#### We have many sponsors we love who support each show, and we hope you'll enjoy them too: - -## Jupiter Broadcasting -**Linode** - receive a $100 60-day credit towards your new account. - -[Linode.com/jupiter](https://linode.com/jupiter) - -## LINUX Unplugged - -**Tailscale** - a Zero config VPN. It installs on any device in minutes, manages firewall rules for you, and works from anywhere. Get 20 devices for free for a personal account. - - -[Tailscale.com/coder](https://tailscale.com/coder) - -**Linode** - receive a $100 60-day credit towards your new account. - -[Linode.com/unplugged](https://linode.com/unplugged) - -**Bitwarden** - the easiest way for businesses and individuals to store, share, and sync sensitive data. - -[Bitwarden.com/linux](https://bitwarden.com/linux) - -## Self-Hosted - -**Tailscale** - a Zero config VPN. It installs on any device in minutes, manages firewall rules for you, and works from anywhere. Get 20 devices for free for a personal account. - -[Tailscale.com/selfhosted](https://tailscale.com/selfhosted) - -**Linode** - receive a $100 60-day credit towards your new account. - -[Linode.com/ssh](https://linode.com/ssh) - -**Humio** - Take logs from any source and make them usable. Get started with Humio Community Edition for free. - -[Humio.com/hce](https://humio.com/hce) - -## Linux Action News - -**Kolide** - user-centered, cross-platform endpoint security for teams that Slack. - -[Kolide.com/lan](https://kolide.com/lan) - - -**Linode** - receive a $100 60-day credit towards your new account. - -[Linode.com/lan](https://linode.com/lan) - - -## Coder Radio - - -**Linode** - Receive a $100 60-day credit towards your new account. - -[Linode.com/coder](https://linode.com/coder) - -**Tailscale** - the easiest way to create a peer-to-peer network with the power of Wireguard. - -[Tailscale.com/coder](https://tailscale.com/coder) - -**System76** - The new Oryx Pro has a beautiful Samsung OLED 4k display and an insanely fast RAM upgrade from DDR4 to DDR5. - -[System76.com/laptops/oryx](https://system76.com/laptops/oryx) - +sponsors = ["linode.com-jb"] ++++ \ No newline at end of file diff --git a/content/sponsors/linode.com-jb.md b/content/sponsors/linode.com-jb.md new file mode 100644 index 000000000..a2c723e6b --- /dev/null +++ b/content/sponsors/linode.com-jb.md @@ -0,0 +1,6 @@ +--- +description: receive a $100 60-day credit towards your new account. +link: https://linode.com/jupiter +shortname: linode.com-jb +title: Linode +--- \ No newline at end of file diff --git a/pytest.ini b/pytest.ini index f09fd2afa..544556c87 100644 --- a/pytest.ini +++ b/pytest.ini @@ -3,4 +3,6 @@ python_files = test_* python_classes = *Tests python_functions = test_* addopts = -ra -v -testpaths = test \ No newline at end of file +testpaths = test +markers = + dev: the current test(s) you're working on for developing \ No newline at end of file diff --git a/render.yaml b/render.yaml new file mode 100644 index 000000000..5b9caf2c5 --- /dev/null +++ b/render.yaml @@ -0,0 +1,17 @@ +services: +# A static site + - type: web + plan: starter + name: jb-deploy-prv + env: static + buildCommand: hugo --minify --gc --baseURL '/' + staticPublishPath: public/ + pullRequestPreviewsEnabled: true + # should have auto deploy enabled by default + # autoDeploy: true + # might be something we look into for the future (i.e. GH actions don't need to trigger rebuild for deploy-preview) + # buildFilter: + # paths: + # - src/**/*.js + # ignoredPaths: + # - src/**/*.test.js \ No newline at end of file diff --git a/test/Pipfile b/test/Pipfile index c304c0e5e..d9c84cb7d 100644 --- a/test/Pipfile +++ b/test/Pipfile @@ -4,10 +4,17 @@ verify_ssl = true name = "pypi" [packages] -pytest-playwright = "*" +# pinning playwright version +pytest-playwright = "1.25.2" pytest-metadata = "*" +# allows parallelism to happen: https://playwright.dev/python/docs/test-runners#parallelism-running-multiple-tests-at-once +pytest-xdist = "*" [dev-packages] +# code formatter +black = "*" +# code linter +pylint = "*" [requires] -python_version = "3.10" \ No newline at end of file +python_version = "3.10" diff --git a/test/Pipfile.lock b/test/Pipfile.lock index 69329b010..9b88f19a8 100644 --- a/test/Pipfile.lock +++ b/test/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "8b3572a9ee9d19f961392155841ff66c531fd0f35f765eb81e56fcd352495fdc" + "sha256": "11c90f2e07793a11956689dc3eef4ce68ebd86a3fa35f85ebc2f64f63b61e800" }, "pipfile-spec": 6, "requires": { @@ -40,6 +40,14 @@ "markers": "python_version >= '3.6'", "version": "==2.1.1" }, + "execnet": { + "hashes": [ + "sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5", + "sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==1.9.0" + }, "greenlet": { "hashes": [ "sha256:0051c6f1f27cb756ffc0ffbac7d2cd48cb0362ac1736871399a739b2885134d3", @@ -126,16 +134,16 @@ }, "playwright": { "hashes": [ - "sha256:1dbe89f4e3dae53add2c6b642cd07c44474eaba88593e29be7ae82106ede8e63", - "sha256:25b7ca2ee2bdf668dc487563355f42fc354bf5a386eaf639ace44133af7c7ab3", - "sha256:426f2e839671b6fe803a87ce3c7b38a8b3c552565863700791238a97f5f1ad24", - "sha256:58641991bcf43ade2a0740ece6e9d22deff228a6358f9aa61a290b7c4ab6f6ab", - "sha256:ca66ec55858fddfb0255a35c4c320795178b69424a51f95fe09530fed71e9abf", - "sha256:d5c64d4b6f4ab56ea0acf5446f3aa3834beea8d871c58a49eff189aa3cf85d13", - "sha256:de9cd487b28e7d03eb04ab8f8e23bfa75c18dffc897396dffa8e9f1be0982d22" + "sha256:24976035e50fc4c6af0c5c57c4de460c2db4f6590b63150c40fbe707c160ce6e", + "sha256:4ccf36445a435e71e2436d08b112bcb28db6afab1c0605e94d4037f1309a24f9", + "sha256:68ae739f82b78717123eb9d1b28b4619f0b368b88ef73c633681e267680697cd", + "sha256:69b6260ac98df49a868bbf8c44c324f821d359b9f2fbf18b656e836374988e5b", + "sha256:8e08ddd366db671017d6b741f324ac797c8681c3099c0b9a17185f2aac3a9224", + "sha256:b49680ad62a6e070ef857475256bc4f2cfb3242de96c12a2cae35b36564c78cb", + "sha256:fef1e08789732896c4de512c33e20199e27fd0effec08342db7cf1499b1cccb3" ], "markers": "python_version >= '3.7'", - "version": "==1.25.1" + "version": "==1.25.2" }, "pluggy": { "hashes": [ @@ -170,11 +178,11 @@ }, "pytest": { "hashes": [ - "sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c", - "sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45" + "sha256:1377bda3466d70b55e3f5cecfa55bb7cfcf219c7964629b967c37cf0bda818b7", + "sha256:4f365fec2dff9c1162f834d9f18af1ba13062db0c708bf7b946f8a5c76180c39" ], "markers": "python_version >= '3.7'", - "version": "==7.1.2" + "version": "==7.1.3" }, "pytest-base-url": { "hashes": [ @@ -184,6 +192,14 @@ "markers": "python_version >= '3.7' and python_version < '4'", "version": "==2.0.0" }, + "pytest-forked": { + "hashes": [ + "sha256:8b67587c8f98cbbadfdd804539ed5455b6ed03802203485dd2f53c1422d7440e", + "sha256:bbbb6717efc886b9d64537b41fb1497cfaf3c9601276be8da2cccfea5a3c8ad8" + ], + "markers": "python_version >= '3.6'", + "version": "==1.4.0" + }, "pytest-metadata": { "hashes": [ "sha256:39261ee0086f17649b180baf2a8633e1922a4c4b6fcc28a2de7d8127a82541bf", @@ -200,6 +216,14 @@ "index": "pypi", "version": "==0.3.0" }, + "pytest-xdist": { + "hashes": [ + "sha256:4580deca3ff04ddb2ac53eba39d76cb5dd5edeac050cb6fbc768b0dd712b4edf", + "sha256:6fe5c74fec98906deb8f2d2b616b5c782022744978e7bd4695d39c8f42d0ce65" + ], + "index": "pypi", + "version": "==2.5.0" + }, "python-slugify": { "hashes": [ "sha256:272d106cb31ab99b3496ba085e3fea0e9e76dcde967b5e9992500d1f785ce4e1", @@ -294,5 +318,235 @@ "version": "==10.1" } }, - "develop": {} + "develop": { + "astroid": { + "hashes": [ + "sha256:bf0fe7ff5d28f697f5eddc9f33140ad6086941c472f53e9dbbb9aaa6bc8952cb", + "sha256:cfef6fa5aeba225d01f217e435472da458711bceb949c6eccb07dc93701edd29" + ], + "markers": "python_full_version >= '3.7.2'", + "version": "==2.12.6" + }, + "black": { + "hashes": [ + "sha256:0a12e4e1353819af41df998b02c6742643cfef58282915f781d0e4dd7a200411", + "sha256:0ad827325a3a634bae88ae7747db1a395d5ee02cf05d9aa7a9bd77dfb10e940c", + "sha256:32a4b17f644fc288c6ee2bafdf5e3b045f4eff84693ac069d87b1a347d861497", + "sha256:3b2c25f8dea5e8444bdc6788a2f543e1fb01494e144480bc17f806178378005e", + "sha256:4a098a69a02596e1f2a58a2a1c8d5a05d5a74461af552b371e82f9fa4ada8342", + "sha256:5107ea36b2b61917956d018bd25129baf9ad1125e39324a9b18248d362156a27", + "sha256:53198e28a1fb865e9fe97f88220da2e44df6da82b18833b588b1883b16bb5d41", + "sha256:5594efbdc35426e35a7defa1ea1a1cb97c7dbd34c0e49af7fb593a36bd45edab", + "sha256:5b879eb439094751185d1cfdca43023bc6786bd3c60372462b6f051efa6281a5", + "sha256:78dd85caaab7c3153054756b9fe8c611efa63d9e7aecfa33e533060cb14b6d16", + "sha256:792f7eb540ba9a17e8656538701d3eb1afcb134e3b45b71f20b25c77a8db7e6e", + "sha256:8ce13ffed7e66dda0da3e0b2eb1bdfc83f5812f66e09aca2b0978593ed636b6c", + "sha256:a05da0430bd5ced89176db098567973be52ce175a55677436a271102d7eaa3fe", + "sha256:a983526af1bea1e4cf6768e649990f28ee4f4137266921c2c3cee8116ae42ec3", + "sha256:bc4d4123830a2d190e9cc42a2e43570f82ace35c3aeb26a512a2102bce5af7ec", + "sha256:c3a73f66b6d5ba7288cd5d6dad9b4c9b43f4e8a4b789a94bf5abfb878c663eb3", + "sha256:ce957f1d6b78a8a231b18e0dd2d94a33d2ba738cd88a7fe64f53f659eea49fdd", + "sha256:cea1b2542d4e2c02c332e83150e41e3ca80dc0fb8de20df3c5e98e242156222c", + "sha256:d2c21d439b2baf7aa80d6dd4e3659259be64c6f49dfd0f32091063db0e006db4", + "sha256:d839150f61d09e7217f52917259831fe2b689f5c8e5e32611736351b89bb2a90", + "sha256:dd82842bb272297503cbec1a2600b6bfb338dae017186f8f215c8958f8acf869", + "sha256:e8166b7bfe5dcb56d325385bd1d1e0f635f24aae14b3ae437102dedc0c186747", + "sha256:e981e20ec152dfb3e77418fb616077937378b322d7b26aa1ff87717fb18b4875" + ], + "index": "pypi", + "version": "==22.8.0" + }, + "click": { + "hashes": [ + "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e", + "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48" + ], + "markers": "python_version >= '3.7'", + "version": "==8.1.3" + }, + "dill": { + "hashes": [ + "sha256:33501d03270bbe410c72639b350e941882a8b0fd55357580fbc873fba0c59302", + "sha256:d75e41f3eff1eee599d738e76ba8f4ad98ea229db8b085318aa2b3333a208c86" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6'", + "version": "==0.3.5.1" + }, + "isort": { + "hashes": [ + "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7", + "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951" + ], + "markers": "python_version < '4.0' and python_full_version >= '3.6.1'", + "version": "==5.10.1" + }, + "lazy-object-proxy": { + "hashes": [ + "sha256:043651b6cb706eee4f91854da4a089816a6606c1428fd391573ef8cb642ae4f7", + "sha256:07fa44286cda977bd4803b656ffc1c9b7e3bc7dff7d34263446aec8f8c96f88a", + "sha256:12f3bb77efe1367b2515f8cb4790a11cffae889148ad33adad07b9b55e0ab22c", + "sha256:2052837718516a94940867e16b1bb10edb069ab475c3ad84fd1e1a6dd2c0fcfc", + "sha256:2130db8ed69a48a3440103d4a520b89d8a9405f1b06e2cc81640509e8bf6548f", + "sha256:39b0e26725c5023757fc1ab2a89ef9d7ab23b84f9251e28f9cc114d5b59c1b09", + "sha256:46ff647e76f106bb444b4533bb4153c7370cdf52efc62ccfc1a28bdb3cc95442", + "sha256:4dca6244e4121c74cc20542c2ca39e5c4a5027c81d112bfb893cf0790f96f57e", + "sha256:553b0f0d8dbf21890dd66edd771f9b1b5f51bd912fa5f26de4449bfc5af5e029", + "sha256:677ea950bef409b47e51e733283544ac3d660b709cfce7b187f5ace137960d61", + "sha256:6a24357267aa976abab660b1d47a34aaf07259a0c3859a34e536f1ee6e76b5bb", + "sha256:6a6e94c7b02641d1311228a102607ecd576f70734dc3d5e22610111aeacba8a0", + "sha256:6aff3fe5de0831867092e017cf67e2750c6a1c7d88d84d2481bd84a2e019ec35", + "sha256:6ecbb350991d6434e1388bee761ece3260e5228952b1f0c46ffc800eb313ff42", + "sha256:7096a5e0c1115ec82641afbdd70451a144558ea5cf564a896294e346eb611be1", + "sha256:70ed0c2b380eb6248abdef3cd425fc52f0abd92d2b07ce26359fcbc399f636ad", + "sha256:8561da8b3dd22d696244d6d0d5330618c993a215070f473b699e00cf1f3f6443", + "sha256:85b232e791f2229a4f55840ed54706110c80c0a210d076eee093f2b2e33e1bfd", + "sha256:898322f8d078f2654d275124a8dd19b079080ae977033b713f677afcfc88e2b9", + "sha256:8f3953eb575b45480db6568306893f0bd9d8dfeeebd46812aa09ca9579595148", + "sha256:91ba172fc5b03978764d1df5144b4ba4ab13290d7bab7a50f12d8117f8630c38", + "sha256:9d166602b525bf54ac994cf833c385bfcc341b364e3ee71e3bf5a1336e677b55", + "sha256:a57d51ed2997e97f3b8e3500c984db50a554bb5db56c50b5dab1b41339b37e36", + "sha256:b9e89b87c707dd769c4ea91f7a31538888aad05c116a59820f28d59b3ebfe25a", + "sha256:bb8c5fd1684d60a9902c60ebe276da1f2281a318ca16c1d0a96db28f62e9166b", + "sha256:c19814163728941bb871240d45c4c30d33b8a2e85972c44d4e63dd7107faba44", + "sha256:c4ce15276a1a14549d7e81c243b887293904ad2d94ad767f42df91e75fd7b5b6", + "sha256:c7a683c37a8a24f6428c28c561c80d5f4fd316ddcf0c7cab999b15ab3f5c5c69", + "sha256:d609c75b986def706743cdebe5e47553f4a5a1da9c5ff66d76013ef396b5a8a4", + "sha256:d66906d5785da8e0be7360912e99c9188b70f52c422f9fc18223347235691a84", + "sha256:dd7ed7429dbb6c494aa9bc4e09d94b778a3579be699f9d67da7e6804c422d3de", + "sha256:df2631f9d67259dc9620d831384ed7732a198eb434eadf69aea95ad18c587a28", + "sha256:e368b7f7eac182a59ff1f81d5f3802161932a41dc1b1cc45c1f757dc876b5d2c", + "sha256:e40f2013d96d30217a51eeb1db28c9ac41e9d0ee915ef9d00da639c5b63f01a1", + "sha256:f769457a639403073968d118bc70110e7dce294688009f5c24ab78800ae56dc8", + "sha256:fccdf7c2c5821a8cbd0a9440a456f5050492f2270bd54e94360cac663398739b", + "sha256:fd45683c3caddf83abbb1249b653a266e7069a09f486daa8863fb0e7496a9fdb" + ], + "markers": "python_version >= '3.6'", + "version": "==1.7.1" + }, + "mccabe": { + "hashes": [ + "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", + "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e" + ], + "markers": "python_version >= '3.6'", + "version": "==0.7.0" + }, + "mypy-extensions": { + "hashes": [ + "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d", + "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8" + ], + "version": "==0.4.3" + }, + "pathspec": { + "hashes": [ + "sha256:46846318467efc4556ccfd27816e004270a9eeeeb4d062ce5e6fc7a87c573f93", + "sha256:7ace6161b621d31e7902eb6b5ae148d12cfd23f4a249b9ffb6b9fee12084323d" + ], + "markers": "python_version >= '3.7'", + "version": "==0.10.1" + }, + "platformdirs": { + "hashes": [ + "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788", + "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19" + ], + "markers": "python_version >= '3.7'", + "version": "==2.5.2" + }, + "pylint": { + "hashes": [ + "sha256:4b124affc198b7f7c9b5f9ab690d85db48282a025ef9333f51d2d7281b92a6c3", + "sha256:4f3f7e869646b0bd63b3dfb79f3c0f28fc3d2d923ea220d52620fd625aed92b0" + ], + "index": "pypi", + "version": "==2.15.0" + }, + "tomli": { + "hashes": [ + "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", + "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f" + ], + "markers": "python_version >= '3.7'", + "version": "==2.0.1" + }, + "tomlkit": { + "hashes": [ + "sha256:25d4e2e446c453be6360c67ddfb88838cfc42026322770ba13d1fbd403a93a5c", + "sha256:3235a9010fae54323e727c3ac06fb720752fe6635b3426e379daec60fbd44a83" + ], + "markers": "python_version >= '3.6' and python_version < '4.0'", + "version": "==0.11.4" + }, + "wrapt": { + "hashes": [ + "sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3", + "sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b", + "sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4", + "sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2", + "sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656", + "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3", + "sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff", + "sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310", + "sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a", + "sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57", + "sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069", + "sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383", + "sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe", + "sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87", + "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d", + "sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b", + "sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907", + "sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f", + "sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0", + "sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28", + "sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1", + "sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853", + "sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc", + "sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3", + "sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3", + "sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164", + "sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1", + "sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c", + "sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1", + "sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7", + "sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1", + "sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320", + "sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed", + "sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1", + "sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248", + "sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c", + "sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456", + "sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77", + "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef", + "sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1", + "sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7", + "sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86", + "sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4", + "sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d", + "sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d", + "sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8", + "sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5", + "sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471", + "sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00", + "sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68", + "sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3", + "sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d", + "sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735", + "sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d", + "sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569", + "sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7", + "sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59", + "sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5", + "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb", + "sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b", + "sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f", + "sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462", + "sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015", + "sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af" + ], + "markers": "python_version < '3.11'", + "version": "==1.14.1" + } + } } diff --git a/test/conftest.py b/test/conftest.py index 783c2e5c8..1725cb78d 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -1,7 +1,9 @@ from pathlib import Path -from typing import Dict, List +from typing import Dict, Generator, List from pytest import fixture +from pytest_base_url.plugin import base_url +from playwright.sync_api import Playwright, APIRequestContext @fixture def screenshot_dir() -> Path: @@ -21,28 +23,34 @@ def expected_rss_feeds() -> List[Dict[str,str,]]: ] @fixture -def expected_dropdown_items(base_url) -> List[Dict[str,str]]: - return [ - {'href': '/hosts', 'title': 'Hosts'}, - {'href': '/guests', 'title': 'Guests'}, - {'href': 'https://github.com/JupiterBroadcasting/', 'title': 'GitHub'}, - {'href': 'https://jupiter.tube', 'title': 'Peertube'}, - {'href': 'https://www.meetup.com/jupiterbroadcasting', 'title': 'Meetup'}, - {'href': 'https://www.youtube.com/user/jupiterbroadcasting', 'title': 'YouTube'}, - {'href': 'https://twitter.com/jupitersignal', 'title': 'Twitter'}, - {'href': '{}/community/irc'.format(base_url), 'title': 'IRC'}, - {'href': 'http://www.facebook.com/pages/Jupiter-Broadcasting/156241429615', 'title': 'Facebook'}, - {'href': 'https://discord.com/invite/n49fgkp', 'title': 'Self-Hosted Discord'}, - {'href': '{}/community/matrix'.format(base_url), 'title': 'Matrix'}, - {'href': '{}/community/mumble'.format(base_url), 'title': 'Mumble'}, - {'href': 'https://t.me/jupitertelegram', 'title': 'Telegram'}, - {'href': '/show/coder-radio', 'title': 'Coder Radio'}, - {'href': '/show/jupiter-extras', 'title': 'Jupiter EXTRAS'}, - {'href': '/show/linux-action-news', 'title': 'Linux Action News'}, - {'href': '/show/linux-unplugged', 'title': 'LINUX Unplugged'}, - {'href': '/show/office-hours', 'title': 'Office Hours'}, - {'href': '/show/self-hosted', 'title': 'Self-Hosted'}, - ] +def expected_dropdown_items() -> Dict[str,List[Dict[str,str]]]: + return { + "Shows": [ + {'href': '/show/coder-radio', 'title': 'Coder Radio'}, + {'href': '/show/jupiter-extras', 'title': 'Jupiter EXTRAS'}, + {'href': '/show/linux-action-news', 'title': 'Linux Action News'}, + {'href': '/show/linux-unplugged', 'title': 'LINUX Unplugged'}, + {'href': '/show/office-hours', 'title': 'Office Hours'}, + {'href': '/show/self-hosted', 'title': 'Self-Hosted'}, + ], + "People": [ + {'href': '/hosts', 'title': 'Hosts'}, + {'href': '/guests', 'title': 'Guests'}, + ], + "Community": [ + {'href': 'https://github.com/JupiterBroadcasting/', 'title': 'GitHub'}, + {'href': 'https://jupiter.tube', 'title': 'Peertube'}, + {'href': 'https://www.meetup.com/jupiterbroadcasting/', 'title': 'Meetup'}, + {'href': 'https://www.youtube.com/user/jupiterbroadcasting', 'title': 'YouTube'}, + {'href': 'https://twitter.com/jupitersignal', 'title': 'Twitter'}, + {'href': '/community/irc/', 'title': 'IRC'}, + {'href': 'http://www.facebook.com/pages/Jupiter-Broadcasting/156241429615', 'title': 'Facebook'}, + {'href': 'https://discord.com/invite/n49fgkp', 'title': 'Self-Hosted Discord'}, + {'href': '/community/matrix/', 'title': 'Matrix'}, + {'href': '/community/mumble/', 'title': 'Mumble'}, + {'href': 'https://t.me/jupitertelegram', 'title': 'Telegram'}, + ] + } @fixture def expected_dropdowns() -> List[Dict[str,str]]: @@ -67,25 +75,24 @@ def expect_nav_items() -> List[Dict[str,str]]: {'title': 'Contact', 'href': '/contact'}, ] - - page.goto("/") - nav = page.locator('#mainnavigation') - expect(nav).to_be_visible() - dropdown_nav_items = page.locator('.navbar-start > * > a') - count = dropdown_nav_items.count() - for i in range(count): - expect(dropdown_nav_items.nth(i)).to_contain_text(expected_dropdowns[i]['title']) - expect(dropdown_nav_items.nth(i)).to_have_attribute('href', expected_dropdowns[i]['href']) - - - nav_items = page.locator('.navbar-start > a') - count = nav_items.count() - for i in range(count): - expect(nav_items.nth(i)).to_contain_text(expect_nav_items[i]['title']) - expect(nav_items.nth(i)).to_have_attribute('href', expect_nav_items[i]['href']) - - nav_image = page.locator('.navbar-brand > a > img') - - - expect(nav_image.nth(0)).to_be_visible() - +# https://playwright.dev/python/docs/api-testing#configure +# used for doing similar requests to API calls +@fixture(scope="session") +def api_request_context( + # base playwright context/object + playwright: Playwright, + # from the pytest-base-url plugin Playwright installs (automatically) + # so we're not having to hard-code the URL + base_url: base_url, + # Generator is returned based on Playwright docs +) -> Generator[APIRequestContext, None, None]: + # creates APIRequestContext to allow requests to be made + # using the base_url variable (from the plugin) to define + # the base_url which'll allow requests relative to that base_url + request_context = playwright.request.new_context(base_url=base_url) + # essentially a "return", but used with generators + yield request_context + # supposed to get rid of coookies/other stored info after generator is done + # https://playwright.dev/python/docs/api/class-apirequestcontext#api-request-context-dispose + # https://github.com/microsoft/playwright.dev/blob/d9b4a2f3bd0510ea89c87ed230b8241eb33b6688/python/docs/api-testing.mdx#writing-api-test + request_context.dispose() diff --git a/test/docker-compose.yml b/test/docker-compose.yml new file mode 100644 index 000000000..6974dd97e --- /dev/null +++ b/test/docker-compose.yml @@ -0,0 +1,30 @@ +services: + jb-com: + build: + context: ../ + args: + - BASE_URL=TESTS_WEB + restart: unless-stopped + cap_drop: + - ALL + cap_add: + # https://github.com/nginxinc/docker-nginx/blob/f3d86e99ba2db5d9918ede7b094fcad7b9128cd8/stable/alpine/Dockerfile#L49 + - CAP_CHOWN + # drop privs for nginx + - CAP_SETGID + - CAP_SETUID + testing: + build: + context: ../ + dockerfile: ./Dockerfile.tests + volumes: + - ../:/app + working_dir: /app + depends_on: + - jb-com + # running tests via docker-compose exec, so sleeping here to keep alive + entrypoint: [ 'sleep', 'infinity' ] + cap_drop: + - ALL + cap_add: + - CAP_DAC_OVERRIDE diff --git a/test/e2e/test_home.py b/test/e2e/test_home.py index 094b325f4..487ea313b 100644 --- a/test/e2e/test_home.py +++ b/test/e2e/test_home.py @@ -1,15 +1,20 @@ import re from pathlib import Path from typing import Dict, List -from playwright.sync_api import Page, expect +from urllib.parse import urlparse +from pytest import fixture, mark +from playwright.sync_api import Page, expect, Locator -def test_homepage_screenshot(page: Page, screenshot_dir: Path): +# allows this to be run for every test in this file, +# without it needing to specify it +@fixture(autouse=True) +def setup(page: Page): page.goto("/") - page.pause() + +def test_homepage_screenshot(page: Page, screenshot_dir: Path): page.screenshot(path=f"{screenshot_dir}/home.png", full_page=True) def test_homepage_has_logo(page: Page): - page.goto("/") logo = page.locator(".logo") expect(logo).to_be_visible() @@ -17,7 +22,6 @@ def test_homepage_has_logo(page: Page): expect(logo_subtitle).to_contain_text('Home to the best shows on Linux, Open Source, Security, Privacy, Community, Development, and News') def test_pagination(page: Page): - page.goto("/") first_card = page.locator('.card').nth(0).text_content page_2_button = page.locator('[aria-label="pagination"] >> text=2') page_2_button.click() @@ -27,26 +31,44 @@ def test_pagination(page: Page): assert first_card != first_card_second_page def test_rss_feeds(page: Page, expected_rss_feeds: List[Dict[str,str]]): - page.goto("/") - for rss_feed in expected_rss_feeds: element = page.locator('#rss-feeds-menu > div > a[href^="{}"]'.format(rss_feed['href'])) expect(element).to_contain_text(rss_feed['title']) -def test_dropdowns(page: Page, expected_dropdown_items): - page.goto("/") +@mark.dev +def test_dropdowns(page: Page, expected_dropdown_items: Dict[str,List[Dict[str,str]]]): + for dropdown_text, child_elements in expected_dropdown_items.items(): + + # dropdown item to hover over in menu + parent_element: Locator = page.locator(f'.navbar-start >> a:has-text("{dropdown_text}"):visible') + + # hover to show elements + parent_element.hover() + + # this is because Shows's url is show + singular = dropdown_text.lower().rstrip('s') + # test to make sure menu items hyperlink + expect(parent_element).to_have_attribute('href', f'/{singular}') + + # finds sibling element, which contains all the dropdown elements + dropdown_elements: Locator = page.locator(f'a:has-text("{dropdown_text}") + .navbar-dropdown') + + for dropdown_item in child_elements: + item: Locator = dropdown_elements.locator(f'a.navbar-item:has-text("{dropdown_item["title"]}")') + try: + # check if the path matches exactly what's in our expected output + expect(item).to_have_attribute('href', dropdown_item['href']) + except AssertionError: + # if not, then it's a relative links (i.e. /community/irc/) + # so just comparing the path of the item + assert urlparse(item.get_attribute('href')).path == dropdown_item['href'] + expect(item).to_be_visible() - for dropdown_item in expected_dropdown_items: - selector = '.navbar-item > .navbar-dropdown > a[href^="{}"]'.format(dropdown_item['href']) - element = page.locator(selector) - expect(element).to_contain_text(dropdown_item['title']) - def test_nav(page: Page, expected_dropdowns, expect_nav_items): - page.goto("/") nav = page.locator('#mainnavigation') expect(nav).to_be_visible() dropdown_nav_items = page.locator('.navbar-start > * > a') @@ -61,7 +83,7 @@ def test_nav(page: Page, expected_dropdowns, expect_nav_items): for i in range(count): expect(nav_items.nth(i)).to_contain_text(expect_nav_items[i]['title']) expect(nav_items.nth(i)).to_have_attribute('href', expect_nav_items[i]['href']) - + nav_image = page.locator('.navbar-brand > a > img') - expect(nav_image.nth(0)).to_be_visible() \ No newline at end of file + expect(nav_image.nth(0)).to_be_visible() diff --git a/test/e2e/test_infra_configs.py b/test/e2e/test_infra_configs.py index 7e79d4ade..fdddac41d 100644 --- a/test/e2e/test_infra_configs.py +++ b/test/e2e/test_infra_configs.py @@ -1,16 +1,20 @@ -from json import loads as j_loads -from pytest import mark -from playwright.sync_api import Page +from playwright.sync_api import APIRequestContext -@mark.skip(reason="currently failing, and troubleshooting in #383") -def test_matrix_well_known(page: Page): - contents = page.goto("/.well-known/matrix/server").text() - - well_known_structure = { - "m.server": "colony.jupiterbroadcasting.com:443" + +def test_matrix_well_known( + api_request_context: APIRequestContext, +): + well_known_folder = "/.well-known/matrix" + well_known_types = { + "client": { + "m.homeserver": {"base_url": "https://colony.jupiterbroadcasting.com"} + }, + "server": {"m.server": "colony.jupiterbroadcasting.com:443"}, } - if j_loads(contents) == well_known_structure: - assert True - else: - assert False + for well_known_key, well_known_data in well_known_types.items(): + response = api_request_context.get(f"{well_known_folder}/{well_known_key}") + + assert response.ok, "200-299 status code" + assert response.body() != "", "response is not empty" + assert response.json() == well_known_data, "expected reponse for JB matrix" diff --git a/test/requirements.txt b/test/requirements.txt deleted file mode 100644 index 63c1597e1..000000000 --- a/test/requirements.txt +++ /dev/null @@ -1,23 +0,0 @@ --i https://pypi.org/simple -attrs==22.1.0; python_version >= '3.5' -certifi==2022.6.15; python_version >= '3.6' -charset-normalizer==2.1.1; python_version >= '3.6' -greenlet==1.1.2; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' -idna==3.3; python_version >= '3.5' -iniconfig==1.1.1 -packaging==21.3; python_version >= '3.6' -playwright==1.25.1; python_version >= '3.7' -pluggy==1.0.0; python_version >= '3.6' -py==1.11.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' -pyee==8.1.0 -pyparsing==3.0.9; python_full_version >= '3.6.8' -pytest==7.1.2; python_version >= '3.7' -pytest-base-url==2.0.0; python_version >= '3.7' and python_version < '4' -pytest-metadata==2.0.2 -pytest-playwright==0.3.0 -python-slugify==6.1.2; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5' -requests==2.28.1; python_version >= '3.7' and python_version < '4' -text-unidecode==1.3 -tomli==2.0.1; python_version >= '3.7' -urllib3==1.26.12; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5' and python_version < '4' -websockets==10.1; python_version >= '3.7' diff --git a/themes/jb/layouts/partials/episode/tags.html b/themes/jb/layouts/partials/episode/tags.html index 8b7643bc9..c1a6b690c 100644 --- a/themes/jb/layouts/partials/episode/tags.html +++ b/themes/jb/layouts/partials/episode/tags.html @@ -1,5 +1,6 @@ {{ range $tag := . }} - {{$tag}} + + {{$tag}} {{ end }} diff --git a/themes/jb/layouts/partials/footer.html b/themes/jb/layouts/partials/footer.html index 647ada59e..30442278b 100644 --- a/themes/jb/layouts/partials/footer.html +++ b/themes/jb/layouts/partials/footer.html @@ -8,7 +8,7 @@
-
{{ .Site.Params.footer.left.headline }}
+

{{ .Site.Params.footer.left.headline }}

    {{ range where site.RegularPages "Section" "community" }} {{ $link := "" }} @@ -26,7 +26,7 @@
    {{ .Site.Params.footer.left.headline }}
-
{{ .Site.Params.footer.center.headline }}
+

{{ .Site.Params.footer.center.headline }}

    {{ range .Site.Menus.footercenter }}
  • @@ -42,7 +42,7 @@
    {{ .Site.Params.footer.center.headline }}
-
{{ .Site.Params.footer.right.headline }}
+

{{ .Site.Params.footer.right.headline }}

    {{ range .Site.Menus.footerright }}
  • diff --git a/themes/jb/layouts/partials/sponsor/medium.html b/themes/jb/layouts/partials/sponsor/medium.html new file mode 100644 index 000000000..d4bc3c571 --- /dev/null +++ b/themes/jb/layouts/partials/sponsor/medium.html @@ -0,0 +1,8 @@ +

    + + {{- .Title }} - {{ replaceRE (print "^" .Title "(?: is)? (.*)") "$1" .Params.description -}} +

    +

    + {{- $url := urls.Parse .Params.link -}} + {{ if findRE "^www\\.(.*)" $url.Host }}{{ $url.Host }}{{else}}{{ $url.Host | title }}{{ end }}{{ if ne $url.Path "/" }}{{- $url.Path -}}{{- end -}} +

    diff --git a/themes/jb/layouts/sponsors/list.html b/themes/jb/layouts/sponsors/list.html new file mode 100644 index 000000000..a6753e5af --- /dev/null +++ b/themes/jb/layouts/sponsors/list.html @@ -0,0 +1,39 @@ +{{ define "main" }} +
    +
    +
    +

    Sponsors

    +

    We have many sponsors we love who support each show, and we hope you'll enjoy them too:

    + {{- $sponsor_pages := where site.Pages "Section" "sponsors" -}} + {{ if .Params.Sponsors -}} +

    Jupiter Broadcasting

    + {{- range .Params.Sponsors -}} + {{- $sponsor := index (where $sponsor_pages "Params.shortname" .) 0 -}} + {{ partial "sponsor/medium.html" $sponsor }} + {{- end -}} + {{ end }} + {{range where .Site.Sections "Title" "Shows" -}} + {{- range .Pages -}} + {{- $show_sponsors := slice -}} + {{- $show_sponsors_list := slice -}} + {{- with . -}} + {{- if .Params.Sponsors -}} + {{- $show_sponsors_list = $show_sponsors_list | append .Params.Sponsors -}} + {{- end -}} + {{- end -}} + {{- range (where (where .Pages "Type" "episode") "Date" "ge" (now.AddDate 0 0 (int $.Site.Params.Sponsors.lookbackdays) )) -}} + {{- $show_sponsors = $show_sponsors | append .Params.Sponsors -}} + {{- end -}} + {{ if $show_sponsors -}} +

    {{ .Title }}

    + {{- range uniq ($show_sponsors | append $show_sponsors_list) -}} + {{- $sponsor := index (where $sponsor_pages "Params.shortname" .) 0 -}} + {{ partial "sponsor/medium.html" $sponsor }} + {{- end -}} + {{ end }} + {{- end -}} + {{- end -}} +
    +
    +
    +{{ end }}