🧱 Blocklist › Generate #341
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| # # | |
| # @usage https://github.com/ConfigServer-Software/service-blocklists | |
| # @type github workflow | |
| # @updated 09.28.25 | |
| # @usage generates a list of ipsets which can then be used within host files, config server firewall, and various other apps | |
| # | |
| # | |
| # @secrets secrets.SELF_TOKEN self github personal access token (fine-grained) | |
| # secrets.SELF_TOKEN_CL self github personal access token (classic) | |
| # secrets.NPM_TOKEN self npmjs access token | |
| # secrets.PYPI_API_TOKEN self Pypi API token (production site) - https://pypi.org/ | |
| # secrets.PYPI_API_TEST_TOKEN self Pypi API token (test site) - https://test.pypi.org/ | |
| # secrets.SELF_DOCKERHUB_TOKEN self Dockerhub token | |
| # secrets.CODECOV_TOKEN codecov upload token for nodejs projects | |
| # secrets.API_GEOLITE2_KEY maxmind API token | |
| # secrets.CF_ACCOUNT_ID cloudflare account id | |
| # secrets.CF_ACCOUNT_TOKEN cloudflare account token | |
| # secrets.ARTIFACTS_DOMAIN github artifacts domain name | |
| # secrets.ARTIFACTS_PORT github artifacts port | |
| # secrets.ARTIFACTS_GITHUB_SSH_PRIVATE_KEY github artifacts server ssh private key | |
| # secrets.ORG_TOKEN org github personal access token (fine-grained) | |
| # secrets.ORG_TOKEN_CL org github personal access token (classic) | |
| # secrets.ORG_DOCKERHUB_TOKEN org dockerhub secret | |
| # secrets.ORG_GITEA_TOKEN org gitea personal access token (classic) with package:write permission | |
| # secrets.BOT_GPG_KEY_ASC bot gpg private key (armored) | BEGIN PGP PRIVATE KEY BLOCK | |
| # secrets.BOT_GPG_KEY_B64 bot gpg private key (binary) converted to base64 | |
| # secrets.BOT_GPG_PASSPHRASE bot gpg private key passphrase | |
| # secrets.DISCORD_WEBHOOK_CHAN_GITHUB_RELEASES discord webhook to report release notifications from github to discord | |
| # secrets.DISCORD_WEBHOOK_CHAN_GITHUB_WORKFLOWS discord webhook to report workflow notifications from github to discord | |
| # secrets.DISCORD_WEBHOOK_CHAN_GITHUB_UPDATES discord webhook to report activity notifications from github to discord | |
| # | |
| # @local these workflows can be tested locally through the use of `act` | |
| # https://github.com/nektos/act | |
| # Extract act to folder | |
| # Add system env var with path to act.exe | |
| # Run the commands: | |
| # git pull https://github.com/username/repo | |
| # act -W .github/workflows/blocklist-generate.yml -P ubuntu-latest=catthehacker/ubuntu:full-22.04 | |
| # act -W .github/workflows/blocklist-generate.yml -s TOKEN_CL=XXXXXXXXXX --pull=false | |
| # | |
| # 📄 bl-master.sh generate master ipset | URLs: VARARG | |
| # 📄 bl-plain.sh generate ipset from online plain-text url / page | URLs: VARARG | |
| # 📄 bl-json.sh generate ipset from json formatted web url. requires url and jq query | URLs: SINGLE | |
| # 📄 bl-htmlip.sh generate ipset by fetching HTML in web url, pulls only ips with grep rule (cant be changed) | URLs: SINGLE | |
| # 📄 bl-html.sh generate ipset by fetching HTML in web url, does not run its own grep, must be specified in command | URLs: VARARG | |
| # 📄 bl-block.sh generate ipset by fetching locally specified file in /blocks/ repo folder | |
| # 📄 bl-format.sh generate ipset by from an existing list of IPs. does not generate ips itself. only validates a list provided | |
| # 📄 bl-spf.sh generate ipset by fetching _spf ips from domain | |
| # | |
| # local test requires the same structure as the github workflow | |
| # 📁 .github | |
| # 📁 blocks | |
| # 📁 bruteforce | |
| # 📄 01.ipset | |
| # 📁 privacy | |
| # 📄 01.ipset | |
| # 📁 scripts | |
| # 📄 bl-master.sh | |
| # 📄 bl-plain.sh | |
| # 📄 bl-json.sh | |
| # 📄 bl-htmlip.sh | |
| # 📄 bl-html.sh | |
| # 📄 bl-block.sh | |
| # 📄 bl-format.sh | |
| # 📄 bl-spf.sh | |
| # 📁 workflows | |
| # 📄 blocklist-generate.yml | |
| # # | |
| name: '🧱 Blocklist › Generate' | |
| run-name: '🧱 Blocklist › Generate' | |
| # # | |
| # triggers | |
| # # | |
| on: | |
| # # | |
| # Trigger › Workflow Dispatch | |
| # # | |
| workflow_dispatch: | |
| inputs: | |
| RUN_MODE: | |
| description: '🧭 Run Mode' | |
| required: true | |
| default: core_only | |
| type: choice | |
| options: | |
| - core_only | |
| - daily_only | |
| # # | |
| # Trigger › Cron Schedule | |
| # # | |
| schedule: | |
| # - cron: '0 2,8,14,20 * * *' | |
| - cron: '0 */4 * * *' | |
| - cron: '15 1 * * *' | |
| # # | |
| # concurrency | |
| # # | |
| concurrency: | |
| group: blocklist-generate-${{ github.ref }} | |
| cancel-in-progress: false | |
| # # | |
| # environment variables | |
| # # | |
| env: | |
| ASSIGN_USER: Aetherinox | |
| BOT_NAME_1: BinaryServ | |
| BOT_NAME_DEPENDABOT: dependabot[bot] | |
| BOT_NAME_RENOVATE: renovate[bot] | |
| GPG_KEY_BASE64: ${{ secrets.ADMINSERV_GPG_KEY_B64 }} | |
| GPG_KEY_PASSPHRASE: ${{ secrets.ADMINSERV_GPG_PASSPHRASE }} | |
| # # | |
| # jobs | |
| # # | |
| jobs: | |
| # # | |
| # Job › Setup | |
| # # | |
| blocklist-setup: | |
| name: >- | |
| 📦 Setup | |
| runs-on: ubuntu-latest | |
| # runs-on: apollo-x64 | |
| timeout-minutes: 120 | |
| steps: | |
| # # | |
| # Job › Checkout | |
| # # | |
| - name: >- | |
| ☑️ Checkout | |
| uses: actions/checkout@v6 | |
| with: | |
| fetch-depth: 0 | |
| # # | |
| # Job › Job Information | |
| # # | |
| - name: >- | |
| 🔄 Load Job | |
| uses: qoomon/actions--context@v5 | |
| id: 'context' | |
| # # | |
| # Job › Start | |
| # # | |
| - name: >- | |
| ✅ Start | |
| run: | | |
| echo "" | |
| echo "―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――" | |
| echo " Starting Job ${{ steps.context.outputs.job_name }}" | |
| echo "―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――" | |
| YEAR="$(date +'%Y')" | |
| echo "YEAR=${YEAR}" >> $GITHUB_ENV | |
| NOW="$(date +'%m-%d-%Y %H:%M:%S')" # 02-25-2025 12:49:48 | |
| echo "NOW=${NOW}" >> $GITHUB_ENV | |
| NOW_SHORT="$(date +'%m-%d-%Y')" # 02-25-2025 | |
| echo "NOW_SHORT=${NOW_SHORT}" >> $GITHUB_ENV | |
| NOW_LONG="$(date +'%m-%d-%Y %H:%M')" # 02-25-2025 12:49 | |
| echo "NOW_LONG=${NOW_LONG}" >> $GITHUB_ENV | |
| NOW_DOCKER="$(date +'%Y%m%d')" # 20250225 | |
| echo "NOW_DOCKER=${NOW_DOCKER}" >> $GITHUB_ENV | |
| NOW_DOCKER_TS="$(date -u +'%FT%T.%3NZ')" # 2025-02-25T12:50:11.569Z | |
| echo "NOW_DOCKER_TS=${NOW_DOCKER_TS}" >> $GITHUB_ENV | |
| SHA1="$(git rev-parse HEAD)" # 71fad013cfce9116ec62779e4a7e627fe4c33627 | |
| echo "SHA1=${SHA1}" >> $GITHUB_ENV | |
| SHA1_GH="$(echo ${GITHUB_SHA})" # 71fad013cfce9116ec62779e4a7e627fe4c33627 | |
| echo "SHA1_GH=${SHA1_GH}" >> $GITHUB_ENV | |
| PKG_VER_1DIGIT="$(echo ${{ env.IMAGE_VERSION }} | cut -d '.' -f1-1)" # 15.52.35 > 15 | |
| echo "PKG_VER_1DIGIT=${PKG_VER_1DIGIT}" >> $GITHUB_ENV | |
| PKG_VER_2DIGIT="$(echo ${{ env.IMAGE_VERSION }} | cut -d '.' -f1-2)" # 15.52.35 > 15.52 | |
| echo "PKG_VER_2DIGIT=${PKG_VER_2DIGIT}" >> $GITHUB_ENV | |
| echo "―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――" | |
| echo "" | |
| echo "" | |
| sudo apt -qq update | |
| sudo apt -qq install tree | |
| echo "" | |
| echo "" | |
| echo "―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――" | |
| echo "" | |
| echo "" | |
| echo " Runner .............. ${{ runner.name }}" | |
| echo " Workflow ............ ${{ github.workflow }} (#${{ github.workflow_ref }})" | |
| echo " Run Number .......... ${{ github.run_number }}" | |
| echo " Ref ................. ${{ github.ref }}" | |
| echo " Ref Name ............ ${{ github.ref_name }}" | |
| echo " Event Name .......... ${{ github.event_name }}" | |
| echo " Repo ................ ${{ github.repository }}" | |
| echo " Repo Owner .......... ${{ github.repository_owner }}" | |
| echo " Run ID .............. https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" | |
| echo " Triggered By ........ ${{ github.actor }}" | |
| echo " SHA 1 (GITHUB_SHA) .. ${GITHUB_SHA}" | |
| echo " SHA 2 (github.sha) .. ${{ github.sha }}" | |
| echo " SHA 3 (env.SHA1) .... ${SHA1}" | |
| echo " SHA 4 (env.SHA1_GH) . ${SHA1_GH}" | |
| echo " Workspace ........... ${{ github.workspace }}" | |
| echo " PWD ................. ${PWD}" | |
| echo " Job Name ............ ${{ steps.context.outputs.job_name }}" | |
| echo " Job ID .............. ${{ steps.context.outputs.job_id }}" | |
| echo " Job URL ............. ${{ steps.context.outputs.job_url }}" | |
| echo " Run ID .............. ${{ steps.context.outputs.run_id }}" | |
| echo " Run Attempt ......... ${{ steps.context.outputs.run_attempt }}" | |
| echo " Run Number .......... ${{ steps.context.outputs.run_number }}" | |
| echo " Run URL ............. ${{ steps.context.outputs.run_url }}" | |
| echo " Run Env ............. ${{ steps.context.outputs.environment }}" | |
| echo " Run Env URL ......... ${{ steps.context.outputs.environment_url }}" | |
| echo " Run Deployment ...... ${{ steps.context.outputs.deployment_id }}" | |
| echo " Run Deployment URL .. ${{ steps.context.outputs.deployment_url }}" | |
| echo " Run Deployment ...... ${{ steps.context.outputs.deployment_id }}" | |
| echo " Run Runner Name ..... ${{ steps.context.outputs.runner_name }}" | |
| echo " Run Runner ID ....... ${{ steps.context.outputs.runner_id }}" | |
| echo " Year ................ ${YEAR}" | |
| echo " Now ................. ${NOW}" | |
| echo " Now (Short) ......... ${NOW_SHORT}" | |
| echo " Now (Long) .......... ${NOW_LONG}" | |
| echo " Now (Docker) ........ ${NOW_DOCKER}" | |
| echo " Now (Docker TS) ..... ${NOW_DOCKER_TS}" | |
| echo "" | |
| echo "" | |
| echo "―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――" | |
| echo "" | |
| echo "" | |
| tree -I node_modules -I .git -I ./ | |
| echo "" | |
| echo "" | |
| echo "―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――" | |
| echo "" | |
| echo "" | |
| # # | |
| # Generate › Install Packages | |
| # # | |
| - name: "🧱 Install Packages" | |
| run: | | |
| sudo apt-get install -y ipcalc ed html2text whois uuid-runtime autoconf | |
| # # | |
| # Generate › Cache Packages | |
| # # | |
| - name: "🧱 Cache Packages" | |
| uses: awalsh128/cache-apt-pkgs-action@latest | |
| with: | |
| packages: ipcalc ed html2text whois uuid-runtime | |
| version: 1.0 | |
| # # | |
| # Job › Blocklist › Master | |
| # # | |
| blocklist-generate: | |
| name: >- | |
| 📋 Generate › Blocklist | |
| runs-on: ubuntu-latest | |
| # runs-on: apollo-x64 | |
| timeout-minutes: 120 | |
| needs: [ blocklist-setup ] | |
| outputs: | |
| artifact-name: ${{ steps.generate_artifact_zip.outputs.artifact-name }} | |
| artifact-path: ${{ steps.generate_artifact_zip.outputs.artifact-path }} | |
| steps: | |
| # # | |
| # Generate › Checkout | |
| # # | |
| - name: >- | |
| ☑️ Checkout | |
| uses: actions/checkout@v6 | |
| with: | |
| fetch-depth: 0 | |
| # # | |
| # Generate › Job Information | |
| # # | |
| - name: >- | |
| 🔄 Load Job | |
| uses: qoomon/actions--context@v4 | |
| id: 'context' | |
| # # | |
| # Generate › Start | |
| # # | |
| - name: >- | |
| ✅ Start | |
| run: | | |
| echo "" | |
| echo "―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――" | |
| echo " Starting Job ${{ steps.context.outputs.job_name }}" | |
| echo "―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――" | |
| YEAR="$(date +'%Y')" | |
| echo "YEAR=${YEAR}" >> $GITHUB_ENV | |
| NOW="$(date +'%m-%d-%Y %H:%M:%S')" # 02-25-2025 12:49:48 | |
| echo "NOW=${NOW}" >> $GITHUB_ENV | |
| NOW_SHORT="$(date +'%m-%d-%Y')" # 02-25-2025 | |
| echo "NOW_SHORT=${NOW_SHORT}" >> $GITHUB_ENV | |
| NOW_LONG="$(date +'%m-%d-%Y %H:%M')" # 02-25-2025 12:49 | |
| echo "NOW_LONG=${NOW_LONG}" >> $GITHUB_ENV | |
| NOW_DOCKER="$(date +'%Y%m%d')" # 20250225 | |
| echo "NOW_DOCKER=${NOW_DOCKER}" >> $GITHUB_ENV | |
| NOW_DOCKER_TS="$(date -u +'%FT%T.%3NZ')" # 2025-02-25T12:50:11.569Z | |
| echo "NOW_DOCKER_TS=${NOW_DOCKER_TS}" >> $GITHUB_ENV | |
| SHA1="$(git rev-parse HEAD)" # 71fad013cfce9116ec62779e4a7e627fe4c33627 | |
| echo "SHA1=${SHA1}" >> $GITHUB_ENV | |
| SHA1_GH="$(echo ${GITHUB_SHA})" # 71fad013cfce9116ec62779e4a7e627fe4c33627 | |
| echo "SHA1_GH=${SHA1_GH}" >> $GITHUB_ENV | |
| PKG_VER_1DIGIT="$(echo ${{ env.IMAGE_VERSION }} | cut -d '.' -f1-1)" # 15.52.35 > 15 | |
| echo "PKG_VER_1DIGIT=${PKG_VER_1DIGIT}" >> $GITHUB_ENV | |
| PKG_VER_2DIGIT="$(echo ${{ env.IMAGE_VERSION }} | cut -d '.' -f1-2)" # 15.52.35 > 15.52 | |
| echo "PKG_VER_2DIGIT=${PKG_VER_2DIGIT}" >> $GITHUB_ENV | |
| echo "―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――" | |
| echo "" | |
| echo "" | |
| sudo apt -qq update | |
| sudo apt -qq install tree | |
| echo "" | |
| echo "" | |
| echo "―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――" | |
| echo "" | |
| echo "" | |
| echo " Runner .............. ${{ runner.name }}" | |
| echo " Workflow ............ ${{ github.workflow }} (#${{ github.workflow_ref }})" | |
| echo " Run Number .......... ${{ github.run_number }}" | |
| echo " Ref ................. ${{ github.ref }}" | |
| echo " Ref Name ............ ${{ github.ref_name }}" | |
| echo " Event Name .......... ${{ github.event_name }}" | |
| echo " Repo ................ ${{ github.repository }}" | |
| echo " Repo Owner .......... ${{ github.repository_owner }}" | |
| echo " Run ID .............. https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" | |
| echo " Triggered By ........ ${{ github.actor }}" | |
| echo " SHA 1 (GITHUB_SHA) .. ${GITHUB_SHA}" | |
| echo " SHA 2 (github.sha) .. ${{ github.sha }}" | |
| echo " SHA 3 (env.SHA1) .... ${SHA1}" | |
| echo " SHA 4 (env.SHA1_GH) . ${SHA1_GH}" | |
| echo " Workspace ........... ${{ github.workspace }}" | |
| echo " PWD ................. ${PWD}" | |
| echo " Job Name ............ ${{ steps.context.outputs.job_name }}" | |
| echo " Job ID .............. ${{ steps.context.outputs.job_id }}" | |
| echo " Job URL ............. ${{ steps.context.outputs.job_url }}" | |
| echo " Run ID .............. ${{ steps.context.outputs.run_id }}" | |
| echo " Run Attempt ......... ${{ steps.context.outputs.run_attempt }}" | |
| echo " Run Number .......... ${{ steps.context.outputs.run_number }}" | |
| echo " Run URL ............. ${{ steps.context.outputs.run_url }}" | |
| echo " Run Env ............. ${{ steps.context.outputs.environment }}" | |
| echo " Run Env URL ......... ${{ steps.context.outputs.environment_url }}" | |
| echo " Run Deployment ...... ${{ steps.context.outputs.deployment_id }}" | |
| echo " Run Deployment URL .. ${{ steps.context.outputs.deployment_url }}" | |
| echo " Run Deployment ...... ${{ steps.context.outputs.deployment_id }}" | |
| echo " Run Runner Name ..... ${{ steps.context.outputs.runner_name }}" | |
| echo " Run Runner ID ....... ${{ steps.context.outputs.runner_id }}" | |
| echo " Year ................ ${YEAR}" | |
| echo " Now ................. ${NOW}" | |
| echo " Now (Short) ......... ${NOW_SHORT}" | |
| echo " Now (Long) .......... ${NOW_LONG}" | |
| echo " Now (Docker) ........ ${NOW_DOCKER}" | |
| echo " Now (Docker TS) ..... ${NOW_DOCKER_TS}" | |
| echo "" | |
| echo "" | |
| echo "―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――" | |
| echo "" | |
| echo "" | |
| tree -I node_modules -I .git -I ./ | |
| echo "" | |
| echo "" | |
| echo "―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――" | |
| echo "" | |
| echo "" | |
| # # | |
| # Generate › Install Packages | |
| # # | |
| - name: "🧱 Install Packages" | |
| run: | | |
| sudo apt-get install -y ipcalc ed html2text whois uuid-runtime autoconf | |
| # # | |
| # Generate › Cache Packages | |
| # # | |
| - name: "🧱 Cache Packages" | |
| uses: awalsh128/cache-apt-pkgs-action@latest | |
| with: | |
| packages: ipcalc ed html2text whois uuid-runtime | |
| version: 1.0 | |
| # # | |
| # Generate › Check Day by building date key | |
| # # | |
| - name: 'Build day key' | |
| id: task_blocklist_generate_date_key | |
| shell: bash | |
| run: | | |
| echo "day=$(date -u +'%Y-%m-%d')" >> "$GITHUB_OUTPUT" | |
| # # | |
| # Generate › Resolve Run Mode | |
| # # | |
| - name: 'Resolve run mode' | |
| id: task_blocklist_generate_run_mode | |
| run: | | |
| run_core="false" | |
| run_daily="false" | |
| force_daily="false" | |
| if [ "${{ github.event_name }}" = "schedule" ]; then | |
| if [ "${{ github.event.schedule }}" = "0 */4 * * *" ]; then | |
| run_core="true" | |
| elif [ "${{ github.event.schedule }}" = "15 1 * * *" ]; then | |
| run_daily="true" | |
| fi | |
| elif [ "${{ github.event_name }}" = "workflow_dispatch" ]; then | |
| if [ "${{ inputs.RUN_MODE }}" = "core_only" ]; then | |
| run_core="true" | |
| elif [ "${{ inputs.RUN_MODE }}" = "daily_only" ]; then | |
| run_daily="true" | |
| force_daily="true" | |
| fi | |
| fi | |
| echo "run_core=${run_core}" >> "$GITHUB_OUTPUT" | |
| echo "run_daily=${run_daily}" >> "$GITHUB_OUTPUT" | |
| echo "force_daily=${force_daily}" >> "$GITHUB_OUTPUT" | |
| # # | |
| # Generate › Configure | |
| # | |
| # this step installs packages we need to manage ipsets. | |
| # - iprange | |
| # this package allows us to convert ip ranges into a CIDR formatted ip | |
| # 10.10.0.1-10.10.0.9 => 10.10.0.1 | |
| # 10.10.0.2/31 | |
| # 10.10.0.4/30 | |
| # 10.10.0.8/31 | |
| # https://github.com/firehol/iprange | |
| # # | |
| - name: '⚙️ Configure' | |
| id: task_blocklist_generate_configure | |
| if: steps.task_blocklist_generate_run_mode.outputs.run_core == 'true' | |
| run: | | |
| git clone https://github.com/firehol/iprange.git ./.temp/iprange | |
| cd .temp/iprange | |
| ./autogen.sh | |
| ./configure --disable-man | |
| sudo make && make install | |
| # # | |
| # Release › Dockerhub › Install Dependencies | |
| # # | |
| - name: '📦 Install Dependencies' | |
| if: steps.task_blocklist_generate_run_mode.outputs.run_core == 'true' | |
| run: | |
| sudo apt-get install -qq dos2unix | |
| # # | |
| # Release › Dockerhub › Execute dos2unix | |
| # # | |
| - name: '🔐 Apply dos2unix' | |
| if: steps.task_blocklist_generate_run_mode.outputs.run_core == 'true' | |
| run: | | |
| echo "⚠️⚠️⚠️ Running DOS2UNIX ⚠️⚠️⚠️" | |
| find .github/scripts -type f -name "*.sh" -print0 | xargs -0 dos2unix -- | |
| echo "✅✅✅ Completed DOS2UNIX ✅✅✅" | |
| # # | |
| # Generate › Set Template Permissions | |
| # # | |
| - name: "☑️ Set Permissions" | |
| id: task_blocklist_generate_perms | |
| run: | | |
| echo "🔧 Setting executable permissions for scripts" | |
| num_success=0 | |
| num_failure=0 | |
| for f in .github/scripts/*.sh; do | |
| echo "Setting executable permission on ${f}" | |
| chmod 755 "${f}" | |
| if [ -x "${f}" ]; then | |
| echo " ✅ ${f} is executable" | |
| num_success=$((num_success + 1)) | |
| else | |
| echo "::error file=${f},line=1,title=Permission Failed::Failed to set executable bit" | |
| num_failure=$((num_failure + 1)) | |
| fi | |
| echo "" | |
| done | |
| echo "―――――――――――――――――――――――――――――――――――――――――――――――――――――" | |
| echo "✅ ${num_success} successfully verified" | |
| echo "❌ ${num_failure} failed" | |
| echo "―――――――――――――――――――――――――――――――――――――――――――――――――――――" | |
| if [ "${num_failure}" -gt 0 ]; then | |
| exit 1 | |
| fi | |
| echo "🎉 All script permissions verified" | |
| # # | |
| # Generate › Set Env Variables | |
| # # | |
| - name: "📦 Set Env Variables" | |
| id: task_commit_pre | |
| if: steps.task_blocklist_generate_run_mode.outputs.run_core == 'true' | |
| run: | | |
| useragent="${{ vars.BL_GENERAL_USERAGENT }}" | |
| echo "USERAGENT=$(echo $useragent)" >> $GITHUB_ENV | |
| # # | |
| # Generate › Master | |
| # # | |
| - name: "🧱 Generate › Main" | |
| id: task_blocklist_generate_master | |
| if: steps.task_blocklist_generate_run_mode.outputs.run_core == 'true' | |
| run: | | |
| # # | |
| # Main › Master | |
| # # | |
| run_master=".github/scripts/bl-master.sh blocklists/master.ipset ${{ secrets.API_01_FILE_01 }} ${{ secrets.API_01_FILE_02 }} ${{ secrets.API_01_FILE_03 }} ${{ secrets.API_01_FILE_04 }} ${{ secrets.API_01_FILE_05 }} ${{ secrets.API_01_FILE_06 }} ${{ secrets.API_01_FILE_07 }} ${{ secrets.API_01_FILE_08 }} ${{ secrets.API_01_FILE_09 }}" | |
| eval "./$run_master" | |
| # # | |
| # Main › Highrisk | |
| # # | |
| run_highrisk=".github/scripts/bl-block.sh blocklists/highrisk.ipset highrisk" | |
| eval "./$run_highrisk" | |
| # # | |
| # Main › BOGON | |
| # # | |
| ./.github/scripts/bogon.sh blocklists/bogon.ipset | |
| # # | |
| # Generate › Privacy › A - G | |
| # # | |
| - name: "🧱 Generate › Privacy (A-G)" | |
| id: task_blocklist_generate_privacy_a_to_g | |
| if: steps.task_blocklist_generate_run_mode.outputs.run_core == 'true' | |
| run: | | |
| # # | |
| # Privacy › General | |
| # # | |
| ./.github/scripts/bl-block.sh blocklists/privacy/privacy_general.ipset privacy | |
| # # | |
| # Privacy › Activision | |
| # # | |
| ./.github/scripts/tool-range-iprange.sh blocklists/privacy/privacy_activision.ipset "${{ vars.BL_ACTIVISION_URL }}" | |
| # # | |
| # Privacy › Ahrefs | |
| # # | |
| curl -sSL -A "${{ env.USERAGENT }}" ${{ vars.BL_AI_AHREFS_URL }} | jq -r '.ips[].ip_address | select( . != null )' | .github/scripts/bl-format.sh blocklists/privacy/privacy_ahrefs.ipset | |
| # # | |
| # Privacy › AI › ChatGPT | |
| # # | |
| curl -sSL "${{ vars.BL_AI_OPENAI_GPTBOT_URL }}" \ | |
| | jq -r '.prefixes[] | .ipv4Prefix // .ipv6Prefix' \ | |
| | sort -u \ | |
| | .github/scripts/bl-format.sh blocklists/privacy/privacy_ai_openai_gpt_bot.ipset | |
| curl -sSL "${{ vars.BL_AI_OPENAI_GPTCHAT_URL }}" \ | |
| | jq -r '.prefixes[] | .ipv4Prefix // .ipv6Prefix' \ | |
| | sort -u \ | |
| | .github/scripts/bl-format.sh blocklists/privacy/privacy_ai_openai_gpt_chat.ipset | |
| curl -sSL "${{ vars.BL_AI_OPENAI_SEARCHBOT_URL }}" \ | |
| | jq -r '.prefixes[] | .ipv4Prefix // .ipv6Prefix' \ | |
| | sort -u \ | |
| | .github/scripts/bl-format.sh blocklists/privacy/privacy_ai_openai_gpt_searchbot.ipset | |
| # # | |
| # Privacy › AI › Claude | |
| # # | |
| curl -sSL "${{ vars.BL_AI_CLAUDE_URL }}" \ | |
| | sed '/Phased out IP addresses/,$d' \ | |
| | grep -oE '[0-9a-fA-F:.]+/[0-9]+' \ | |
| | sort -u \ | |
| | .github/scripts/bl-format.sh blocklists/privacy/privacy_ai_claude_official.ipset | |
| # # | |
| # Privacy › Akamai (Cloud / CDN) | |
| # # | |
| AKAMAI_IPv4=$( curl -sSL -A "${{ env.USERAGENT }}" ${{ vars.BL_AKAMAI_CORE_URL }}/akamai_ipv4_CIDRs.txt \ | |
| | grep -vE '^(#|;|[[:space:]]*$)' ) | |
| AKAMAI_IPv6=$( curl -sSL -A "${{ env.USERAGENT }}" ${{ vars.BL_AKAMAI_CORE_URL }}/akamai_ipv6_CIDRs.txt \ | |
| | grep -vE '^(#|;|[[:space:]]*$)' ) | |
| AKAMAI_LIST=$( echo -e "${AKAMAI_IPv4}\n${AKAMAI_IPv6}" | sort -u ) | |
| echo "$AKAMAI_LIST" | .github/scripts/bl-format.sh blocklists/privacy/privacy_akamai.ipset | |
| # # | |
| # Privacy › Amazon AWS | |
| # # | |
| run_amz_aws=".github/scripts/bl-json.sh blocklists/privacy/privacy_amazon_aws.ipset ${{ vars.BL_AMAZON_CORE_URL }} '.prefixes[] | select(.service==\"AMAZON\") | .ip_prefix'" | |
| eval "./$run_amz_aws" | |
| # # | |
| # Privacy › Amazon EC2 | |
| # # | |
| run_amz_ec2=".github/scripts/bl-json.sh blocklists/privacy/privacy_amazon_ec2.ipset ${{ vars.BL_AMAZON_CORE_URL }} '.prefixes[] | select(.service==\"EC2\") | .ip_prefix'" | |
| eval "./$run_amz_ec2" | |
| # # | |
| # Privacy › Anthropic | |
| # # | |
| run_mip_anthropic=".github/scripts/bl-mip.sh blocklists/privacy/privacy_anthropic.ipset '${{ vars.BL_AI_ANTHROPIC_URL }}' privacy/anthropic" | |
| eval "./$run_mip_anthropic" | |
| # # | |
| # Privacy › Apple Bot | |
| # # | |
| curl -sSL -A "${{ env.USERAGENT }}" ${{ vars.BL_APPLE_INC_BOT_URL }} | jq -r '.prefixes | .[] |.ipv4Prefix//empty,.ipv6Prefix//empty' | .github/scripts/bl-format.sh blocklists/privacy/privacy_apple_bot.ipset | |
| # # | |
| # Privacy › Apple Proxy | |
| # https://developer.apple.com/icloud/prepare-your-network-for-icloud-private-relay/ | |
| # # | |
| curl -sSL -A "${{ env.USERAGENT }}" ${{ vars.BL_APPLE_INC_PROXY_URL }} \ | |
| | awk -F',' 'NR>1{print $1}' \ | |
| | BL_FORMAT_TRUSTED_INPUT=true BL_FORMAT_SKIP_BOGON_FILTER=true .github/scripts/bl-format.sh blocklists/privacy/privacy_apple_icloud.ipset | |
| # # | |
| # Privacy › Alibaba | |
| # # | |
| ./.github/scripts/bl-whois.sh blocklists/privacy/privacy_alibaba.ipset "${{ vars.BL_CHINA_ALIBABA_ASN }}" | |
| # # | |
| # Privacy › Automattic / Tumblr / Wordpress.com | |
| # # | |
| ./.github/scripts/bl-whois.sh blocklists/privacy/privacy_automattic.ipset "${{ vars.BL_AUTOMATTIC_WP_ASN }}" | |
| # # | |
| # Privacy › Baidu | |
| # # | |
| ./.github/scripts/bl-whois.sh blocklists/privacy/privacy_baidu.ipset "${{ vars.BL_CHINA_BAIDU_ASN }}" | |
| # # | |
| # Privacy › Bing | |
| # # | |
| ./.github/scripts/bl-json.sh blocklists/privacy/privacy_bing.ipset "${{ vars.BL_BING_URL }}" '.prefixes | .[] |.ipv4Prefix//empty,.ipv6Prefix//empty' | |
| # # | |
| # Privacy › Blizzard | |
| # # | |
| ./.github/scripts/tool-range-iprange.sh blocklists/privacy/privacy_blizzard.ipset "${{ vars.BL_BLIZZARD_URL }}" | |
| # # | |
| # Privacy › Bluehost | |
| # # | |
| ./.github/scripts/bl-whois.sh blocklists/privacy/privacy_bluehost_eig.ipset "${{ vars.BL_BLUEHOST_EIG_ASN }}" | |
| # # | |
| # Privacy › Bunny CDN | |
| # # | |
| BUNNYCDN_IPv4=$( curl -sSL ${{ vars.BL_BUNNYCDN_URL }}/plain \ | |
| | grep -vE '^(#|;|[[:space:]]*$)' ) | |
| BUNNYCDN_IPv6=$( curl -sSL ${{ vars.BL_BUNNYCDN_URL }}/ipv6 \ | |
| | jq -r '.[] | select( . != null )' | grep -vE '^(#|;|[[:space:]]*$)' ) | |
| BUNNYCDN_LIST=$( echo -e "${BUNNYCDN_IPv4}\n${BUNNYCDN_IPv6}" | sort -u ) | |
| echo "$BUNNYCDN_LIST" | .github/scripts/bl-format.sh blocklists/privacy/privacy_bunnycdn.ipset | |
| # # | |
| # Privacy › ByteDance / Tiktok | |
| # # | |
| ./.github/scripts/bl-whois.sh blocklists/privacy/privacy_bytedance.ipset "${{ vars.BL_TIKTOK_BYTEDANCE_ASN }}" | |
| # Privacy › Censys, Inc. | |
| ./.github/scripts/bl-whois.sh blocklists/privacy/privacy_censys_io.ipset "${{ vars.BL_CENSYS_IO_ASN }}" | |
| # Privacy › China › China Broadcasting Network | |
| ./.github/scripts/bl-whois.sh blocklists/privacy/privacy_china_broadcasting_network.ipset "${{ vars.BL_CHINA_BROADCASTING_NETWORK_ASN }}" | |
| # Privacy › China › China Mobile | |
| ./.github/scripts/bl-whois.sh blocklists/privacy/privacy_china_mobile.ipset "${{ vars.BL_CHINA_MOBILE_ASN }}" | |
| # Privacy › China › ChinaNET | |
| ./.github/scripts/bl-whois.sh blocklists/privacy/privacy_china_chinanet.ipset "${{ vars.BL_CHINA_CHINANET_ASN }}" | |
| # Privacy › China › China / Shanghai Mobile Communications Co.,Ltd. | |
| ./.github/scripts/bl-whois.sh blocklists/privacy/privacy_china_net_mobile.ipset "${{ vars.BL_CHINA_NETMOBILE_ASN }}" | |
| # Privacy › China › China Networks Inter-Exchange | |
| ./.github/scripts/bl-whois.sh blocklists/privacy/privacy_china_networks.ipset "${{ vars.BL_CHINA_NETWORKS_ASN }}" | |
| # Privacy › China › Telecom | |
| ./.github/scripts/bl-whois.sh blocklists/privacy/privacy_china_telecom.ipset "${{ vars.BL_CHINA_TELECOM_ASN }}" | |
| # Privacy › China › Tencent Networks | |
| ./.github/scripts/bl-whois.sh blocklists/privacy/privacy_china_tencent.ipset "${{ vars.BL_CHINA_TENCENT_NET_ASN }}" | |
| # Privacy › China › Tietong | |
| ./.github/scripts/bl-whois.sh blocklists/privacy/privacy_china_tietong.ipset "${{ vars.BL_CHINA_TIETONG_ASN }}" | |
| # Privacy › China › Unicorn | |
| ./.github/scripts/bl-whois.sh blocklists/privacy/privacy_china_unicorn.ipset "${{ vars.BL_CHINA_UNICORN_ASN }}" | |
| # # | |
| # Privacy › Cloudfront | |
| # # | |
| ./.github/scripts/bl-json.sh blocklists/privacy/privacy_cloudfront.ipset "${{ vars.BL_AMAZON_CLOUDFRONT_URL }}" 'map(.[]) | sort | .[]' | |
| # # | |
| # Privacy › Cloudflare CDN | |
| # # | |
| CLOUDFLARE_IPv4=$( curl -sSL -A "${{ env.USERAGENT }}" ${{ vars.BL_CLOUDFLARE_URL }}/ips-v4 \ | |
| | grep -vE '^(#|;|[[:space:]]*$)' ) | |
| CLOUDFLARE_IPv6=$( curl -sSL -A "${{ env.USERAGENT }}" ${{ vars.BL_CLOUDFLARE_URL }}/ips-v6 \ | |
| | grep -vE '^(#|;|[[:space:]]*$)' ) | |
| CLOUDFLARE_LIST=$( echo -e "${CLOUDFLARE_IPv4}\n${CLOUDFLARE_IPv6}" | sort -u ) | |
| echo "$CLOUDFLARE_LIST" | .github/scripts/bl-format.sh blocklists/privacy/privacy_cloudflarecdn.ipset | |
| # # | |
| # Privacy › COLOCROSSING / HostPapa | |
| # # | |
| ./.github/scripts/bl-whois.sh blocklists/privacy/privacy_colocrossing_hostpapa.ipset "${{ vars.BL_COLOCROSSING_HOSTPAPA_ASN }}" | |
| # # | |
| # Privacy › Contabo Hosting | |
| # # | |
| ./.github/scripts/bl-whois.sh blocklists/privacy/privacy_contabo_gmbh.ipset "${{ vars.BL_CONTABO_GMBH_ASN }}" | |
| # # | |
| # Privacy › Digital Ocean | |
| # # | |
| ./.github/scripts/bl-whois.sh blocklists/privacy/privacy_digitalocean_asn.ipset "${{ vars.BL_DIGITAL_OCEAN_ASN }}" | |
| # # | |
| # Privacy › Digital Ocean | |
| # # | |
| curl -sSL -A "${{ env.USERAGENT }}" ${{ vars.BL_DIGITAL_OCEAN_URL }} \ | |
| | cut -d, -f1 \ | |
| | grep -vE '^(#|;|[[:space:]]*$)' \ | |
| | sort -u \ | |
| | .github/scripts/bl-format.sh blocklists/privacy/privacy_digitalocean_source.ipset | |
| # # | |
| # Privacy › Dream Hosting | |
| # # | |
| ./.github/scripts/bl-whois.sh blocklists/privacy/privacy_dreamhost.ipset "${{ vars.BL_DREAMHOST_ASN }}" | |
| # # | |
| # Privacy › DuckDuckGo | |
| # | |
| # Originally, we crawled DDG's Github docs page. However, it was shut down in 2025. | |
| # curl -sSL -A "${{ env.USERAGENT }}" https://raw.githubusercontent.com/duckduckgo/duckduckgo-help-pages/master/_docs/results/duckduckbot.md | grep "^\- " | awk '{gsub("-",""); print}' | awk '{gsub(/ /,""); print}' | .github/scripts/bl-format.sh blocklists/privacy/privacy_duckduckgo.ipset | |
| # | |
| # Now we use their official json list | |
| # https://duckduckgo.com/duckduckbot.json | |
| # # | |
| ./.github/scripts/bl-json.sh blocklists/privacy/privacy_duckduckgo.ipset "${{ vars.BL_DUCKDUCKGO_URL }}" '.prefixes[] | select(.ipv4Prefix or .ipv6Prefix) | (.ipv4Prefix // .ipv6Prefix)' | |
| # # | |
| # Privacy › Electronic Arts & IGN | |
| # # | |
| ./.github/scripts/tool-range-iprange.sh blocklists/privacy/privacy_electronicarts_ign.ipset "${{ vars.BL_ELECTRONICARTS_URL }}" | |
| # # | |
| # Privacy › Facebook | |
| # https://github.com/SecOps-Institute/FacebookIPLists/blob/master/facebook_asn_list.lst | |
| # https://bgp.he.net/search?search%5Bsearch%5D=facebook&commit=Search | |
| # # | |
| ./.github/scripts/bl-whois.sh blocklists/privacy/privacy_facebook.ipset "${{ vars.BL_FACEBOOK_ASN }}" | |
| # # | |
| # Privacy › Fastly | |
| # # | |
| ./.github/scripts/bl-json.sh blocklists/privacy/privacy_fastly.ipset ${{ vars.BL_FASTLY_URL }} 'map(.[]) | .[]' | |
| # # | |
| # Privacy › Github | |
| # | |
| # https://api.github.com/meta | |
| # # | |
| GITHUB_URL="${{ vars.BL_GITHUB_URL }}" | |
| ./.github/scripts/bl-json.sh blocklists/privacy/privacy_github_all.ipset $GITHUB_URL '.. | strings | select(test(\"^[0-9a-fA-F:.]+/[0-9]+$\"))' | |
| ./.github/scripts/bl-json.sh blocklists/privacy/privacy_github_hooks.ipset $GITHUB_URL '.hooks[]' | |
| ./.github/scripts/bl-json.sh blocklists/privacy/privacy_github_web.ipset $GITHUB_URL '.web[]' | |
| ./.github/scripts/bl-json.sh blocklists/privacy/privacy_github_api.ipset $GITHUB_URL '.api[]' | |
| ./.github/scripts/bl-json.sh blocklists/privacy/privacy_github_git.ipset $GITHUB_URL '.git[]' | |
| ./.github/scripts/bl-json.sh blocklists/privacy/privacy_github_packages.ipset $GITHUB_URL '.packages[]' | |
| ./.github/scripts/bl-json.sh blocklists/privacy/privacy_github_pages.ipset $GITHUB_URL '.pages[]' | |
| ./.github/scripts/bl-json.sh blocklists/privacy/privacy_github_importer.ipset $GITHUB_URL '.importer[]' | |
| ./.github/scripts/bl-json.sh blocklists/privacy/privacy_github_actions.ipset $GITHUB_URL '.actions[]' | |
| ./.github/scripts/bl-json.sh blocklists/privacy/privacy_github_actions_macos.ipset $GITHUB_URL '.actions_macos[]' | |
| ./.github/scripts/bl-json.sh blocklists/privacy/privacy_github_codespaces.ipset $GITHUB_URL '.codespaces[]' | |
| ./.github/scripts/bl-json.sh blocklists/privacy/privacy_github_copilot.ipset $GITHUB_URL '.copilot[]' | |
| # # | |
| # Privacy › Google › Core | |
| # # | |
| ./.github/scripts/bl-json.sh blocklists/privacy/privacy_google_core.ipset "${{ vars.BL_GOOGLE_BOTS_URL }}" '.prefixes[] | select(.ipv4Prefix or .ipv6Prefix) | (.ipv4Prefix // .ipv6Prefix)' | |
| # # | |
| # Privacy › Google › Bot | |
| # # | |
| ./.github/scripts/bl-json.sh blocklists/privacy/privacy_google_bots.ipset "${{ vars.BL_GOOGLE_BOTS_URL }}" '.prefixes | .[] |.ipv4Prefix//empty,.ipv6Prefix//empty' | |
| # # | |
| # Privacy › Google › Cloud | |
| # # | |
| ./.github/scripts/bl-json.sh blocklists/privacy/privacy_google_cloud.ipset "${{ vars.BL_GOOGLE_CLOUD_URL }}" '.prefixes[] | select(.ipv4Prefix or .ipv6Prefix) | (.ipv4Prefix // .ipv6Prefix)' | |
| # # | |
| # Generate › Privacy › H - N | |
| # # | |
| - name: '🧱 Generate › Privacy (H-N)' | |
| id: task_blocklist_generate_privacy_h_to_n | |
| if: steps.task_blocklist_generate_run_mode.outputs.run_core == 'true' | |
| run: | | |
| # # | |
| # Privacy › Huawei | |
| # # | |
| ./.github/scripts/bl-whois.sh blocklists/privacy/privacy_huawei.ipset "${{ vars.BL_HUAWEI_ASN }}" | |
| # # | |
| # Privacy › Linode | |
| # # | |
| curl -fsSL "${{ vars.BL_LINODE_URL }}" | awk -F',' '{print $1}' | sort -u | .github/scripts/bl-format.sh blocklists/privacy/privacy_linode.ipset | |
| # # | |
| # Privacy › Microsoft › 365 | |
| # # | |
| MS_365_URL="${{ vars.BL_MICROSOFT_365_URL }}?clientrequestid=$(uuidgen)" | |
| ./.github/scripts/bl-json.sh blocklists/privacy/privacy_microsoft_365.ipset $MS_365_URL '.[] | .ips[]?' | |
| # # | |
| # Privacy › Microsoft › Azure | |
| # | |
| # - Public Cloud | |
| # - US Government | |
| # - Germany Cloud | |
| # - China Cloud | |
| # | |
| # @ref https://azservicetags.azurewebsites.net/ | |
| # # | |
| MS_AZURE_URL_PUBLIC=$( curl -fsSL "${{ vars.BL_MICROSOFT_AZURE_PUBLIC_URL }}" | grep -o 'https://download.microsoft.com/download[^"]*ServiceTags_Public_[0-9]\+\.json' | head -n1 ) | |
| ./.github/scripts/bl-json.sh blocklists/privacy/privacy_microsoft_azure_public.ipset $MS_AZURE_URL_PUBLIC '.values[].properties.addressPrefixes[]' | |
| MS_AZURE_URL_USGOV=$( curl -fsSL "${{ vars.BL_MICROSOFT_AZURE_USGOV_URL }}" | grep -o 'https://download.microsoft.com/download[^"]*ServiceTags_AzureGovernment_[0-9]\+\.json' | head -n1 ) | |
| ./.github/scripts/bl-json.sh blocklists/privacy/privacy_microsoft_azure_usgov.ipset $MS_AZURE_URL_USGOV '.values[].properties.addressPrefixes[]' | |
| MS_AZURE_URL_GERMANY=$( curl -fsSL "${{ vars.BL_MICROSOFT_AZURE_GERMANY_URL }}" | grep -o 'https://download.microsoft.com/download[^"]*ServiceTags_AzureGermany_[0-9]\+\.json' | head -n1 ) | |
| ./.github/scripts/bl-json.sh blocklists/privacy/privacy_microsoft_azure_usgov.ipset $MS_AZURE_URL_GERMANY '.values[].properties.addressPrefixes[]' | |
| MS_AZURE_URL_CHINA=$( curl -fsSL "${{ vars.BL_MICROSOFT_AZURE_CHINA_URL }}" | grep -o 'https://download.microsoft.com/download[^"]*ServiceTags_China_[0-9]\+\.json' | head -n1 ) | |
| ./.github/scripts/bl-json.sh blocklists/privacy/privacy_microsoft_azure_china.ipset $MS_AZURE_URL_CHINA '.values[].properties.addressPrefixes[]' | |
| # # | |
| # Privacy › Nintendo | |
| # # | |
| ./.github/scripts/tool-range-iprange.sh blocklists/privacy/privacy_nintendo.ipset "${{ vars.BL_NINTENDO_URL }}" | |
| # # | |
| # Generate › Privacy › O - T | |
| # # | |
| - name: '🧱 Generate › Privacy (O-T)' | |
| id: task_blocklist_generate_privacy_o_to_t | |
| if: steps.task_blocklist_generate_run_mode.outputs.run_core == 'true' | |
| run: | | |
| # # | |
| # Privacy › Oracle › OCI (EC2 / GCP) | |
| # # | |
| ./.github/scripts/bl-json.sh blocklists/privacy/privacy_oracle_oci.ipset "${{ vars.BL_ORACLE_OCI_URL }}" '.regions[].cidrs[] | select(.tags[] == "OCI") | .cidr' | |
| # # | |
| # Privacy › Oracle › Oracle services network (OSN) | |
| # # | |
| ./.github/scripts/bl-json.sh blocklists/privacy/privacy_oracle_osn.ipset "${{ vars.BL_ORACLE_OSN_URL }}" '.regions[].cidrs[] | select(.tags[] == "OSN") | .cidr' | |
| # # | |
| # Privacy › Oracle › Object Storage | |
| # # | |
| ./.github/scripts/bl-json.sh blocklists/privacy/privacy_oracle_object_storage.ipset "${{ vars.BL_ORACLE_OBJ_URL }}" '.regions[].cidrs[] | select(.tags[] == "OBJECT_STORAGE") | .cidr' | |
| # # | |
| # Privacy › Oracle › All | |
| # # | |
| ./.github/scripts/bl-json.sh blocklists/privacy/privacy_oracle_all.ipset "${{ vars.BL_ORACLE_ALL_URL }}" '.regions[].cidrs[] | .cidr' | |
| # # | |
| # Privacy › Pandora | |
| # # | |
| ./.github/scripts/tool-range-iprange.sh blocklists/privacy/privacy_pandora.ipset "${{ vars.BL_PANDORA_URL }}" | |
| # # | |
| # Privacy › Pingdom | |
| # # | |
| PINGDOM_IPv4=$( curl -sSL "${{ vars.BL_PINGDOM_URL }}/ipv4" \ | |
| | grep -vE '^(#|;|[[:space:]]*$)') | |
| PINGDOM_IPv6=$( curl -sSL "${{ vars.BL_PINGDOM_URL }}/ipv6" \ | |
| | grep -vE '^(#|;|[[:space:]]*$)') | |
| PINGDOM_LIST=$( echo -e "${PINGDOM_IPv4}\n${PINGDOM_IPv6}" | sort -u ) | |
| echo "$PINGDOM_LIST" | .github/scripts/bl-format.sh blocklists/privacy/privacy_pingdom.ipset | |
| # # | |
| # Privacy › Pirate Bay | |
| # # | |
| ./.github/scripts/tool-range-iprange.sh blocklists/privacy/privacy_piratebay.ipset "${{ vars.BL_PIRATEBAY_URL }}" | |
| # # | |
| # Privacy › ProtonVPN | |
| # | |
| # Gathering these are tricky. Requires the authentication COOKIE / session_id to access API. | |
| # User Id => secrets.BL_PROTON_VPN_USERID | |
| # Cookie => secrets.BL_PROTON_VPN_AUTHCOOKIE | |
| # Session id => secrets.BL_PROTON_VPN_SESSIONID | |
| # # | |
| curl -s \ | |
| -H 'x-pm-appversion: AndroidVPN_5.5.52' \ | |
| -H 'x-pm-apiversion: 3' \ | |
| -H 'x-pm-locale: en_US' \ | |
| -H 'x-pm-uid: ${{ secrets.BL_PROTON_VPN_USERID }}' \ | |
| -H 'cookie: ${{ secrets.BL_PROTON_VPN_AUTHCOOKIE }}; Session-Id=${{ secrets.BL_PROTON_VPN_SESSIONID }}; Tag=default; Theme=1; Domain=protonvpn.com; st=eyJ0IjoiZiIsImgiOiIwIn0' \ | |
| '${{ vars.BL_PROTON_VPN_URL }}' \ | |
| | jq -r '.LogicalServers[] | .Servers[]? | .EntryIP, .ExitIP, .ExitIPv6 | select(. != null)' \ | |
| | sort -u \ | |
| | .github/scripts/bl-format.sh blocklists/privacy/privacy_proton_vpn.ipset | |
| # Privacy › Punkbuster | |
| ./.github/scripts/tool-range-iprange.sh blocklists/privacy/privacy_punkbuster.ipset "${{ vars.BL_PUNKBUSTER_URL }}" | |
| # Privacy › Riot Games | |
| ./.github/scripts/tool-range-iprange.sh blocklists/privacy/privacy_riot_games.ipset "${{ vars.BL_RIOTGAMES_URL }}" | |
| # Privacy › RSS API | |
| curl -sSL -A "${{ env.USERAGENT }}" "${{ vars.BL_RSSAPI_URL }}" | .github/scripts/bl-format.sh blocklists/privacy/privacy_rssapi.ipset | |
| # Privacy › Shadow Server | |
| ./.github/scripts/bl-whois.sh blocklists/privacy/privacy_shadowserver.ipset "${{ vars.BL_SHADOWSERVER_FOUNDATION_ASN }}" | |
| # Privacy › Sony › All | |
| ./.github/scripts/bl-whois.sh blocklists/privacy/privacy_sony_all.ipset "${{ vars.BL_SONY_ALL_ASN }}" | |
| # Privacy › Sony › Sony Network Communications Inc. (Japan/ISP/So-net) | |
| ./.github/scripts/bl-whois.sh blocklists/privacy/privacy_sony_network_communications.ipset "${{ vars.BL_SONY_NETWORK_COMMUNICATIONS_ASN }}" | |
| # Privacy › Sony › Sony Interactive Entertainment LLC (Playstation) | |
| ./.github/scripts/bl-whois.sh blocklists/privacy/privacy_sony_playstation_network.ipset "${{ vars.BL_SONY_PLAYSTATION_NETWORK_ASN }}" | |
| # Privacy › Sony › Sony Pictures Technologies Inc. | |
| ./.github/scripts/bl-whois.sh blocklists/privacy/privacy_sony_pictures_technologies.ipset "${{ vars.BL_SONY_PICTURES_TECHNOLOGIES_ASN }}" | |
| # Privacy › Sony › User Reported | |
| ./.github/scripts/tool-range-iprange.sh blocklists/privacy/privacy_sony_userflagged.ipset "${{ vars.BL_SONY_USERFLAGGED_URL }}" | |
| # Privacy › Steam | |
| ./.github/scripts/tool-range-iprange.sh blocklists/privacy/privacy_steam.ipset "${{ vars.BL_STEAM_URL }}" | |
| # Privacy › Stripe › API | |
| curl -sSL -A "${{ env.USERAGENT }}" "${{ vars.BL_STRIPE_API_URL }}" | .github/scripts/bl-format.sh blocklists/privacy/privacy_stripe_api.ipset | |
| # Privacy › Stripe › Armada Gator | |
| curl -sSL -A "${{ env.USERAGENT }}" "${{ vars.BL_STRIPE_ARMADA_GATOR_URL }}" | .github/scripts/bl-format.sh blocklists/privacy/privacy_stripe_armada_gator.ipset | |
| # Privacy › Stripe › Webhooks | |
| curl -sSL -A "${{ env.USERAGENT }}" "${{ vars.BL_STRIPE_WEBHOOKS_URL }}" | .github/scripts/bl-format.sh blocklists/privacy/privacy_stripe_webhooks.ipset | |
| # Privacy › Telegram | |
| curl -sSL -A "${{ env.USERAGENT }}" "${{ vars.BL_TELEGRAM_URL }}" | .github/scripts/bl-format.sh blocklists/privacy/privacy_telegram.ipset | |
| # Privacy › Tumblr | |
| ./.github/scripts/bl-whois.sh blocklists/privacy/privacy_tumblr.ipset "${{ vars.BL_TUMBLR_ASN }}" | |
| # Privacy › Twitter / X | |
| ./.github/scripts/bl-whois.sh blocklists/privacy/privacy_twitter_x.ipset "${{ vars.BL_TWITTER_INC_X_ASN }}" | |
| # # | |
| # Generate › Privacy › U - Z | |
| # # | |
| - name: '🧱 Generate › Privacy (U-Z)' | |
| id: task_blocklist_generate_privacy_u_to_z | |
| if: steps.task_blocklist_generate_run_mode.outputs.run_core == 'true' | |
| run: | | |
| # Privacy › Ubisoft | |
| ./.github/scripts/tool-range-iprange.sh blocklists/privacy/privacy_ubisoft.ipset "${{ vars.BL_UBISOFT_URL }}" | |
| # Privacy › Uptime Robot | |
| curl -sSL -A "${{ env.USERAGENT }}" "${{ vars.BL_UPTIMEROBOT_URL }}" | .github/scripts/bl-format.sh blocklists/privacy/privacy_uptimerobot.ipset | |
| # Privacy › WebPageTest | |
| curl -sSLk -A "${{ env.USERAGENT }}" "${{ secrets.PROXY_WEB_URL_HTTPS }}/json?url=https%3A%2F%2Fwebpagetest.org%2Faddresses.php%3Ff%3Djson" | jq -r '.data[].addresses[] | select( . != null )' | .github/scripts/bl-format.sh blocklists/privacy/privacy_webpagetest.ipset | |
| # Privacy › Xfire | |
| ./.github/scripts/tool-range-iprange.sh blocklists/privacy/privacy_xfire.ipset "${{ vars.BL_XFIRE_URL }}" | |
| # # | |
| # Generate › Spam | |
| # # | |
| - name: '🧱 Generate › Spam' | |
| id: task_blocklist_generate_spam | |
| if: steps.task_blocklist_generate_run_mode.outputs.run_core == 'true' | |
| run: | | |
| run_spamhaus=".github/scripts/bl-plain.sh blocklists/spam/spam_spamhaus.ipset ${{ vars.BL_SPAM_SPAMHAUS_SRC }}" | |
| eval "./$run_spamhaus" | |
| # # | |
| # Generate › Spam › Forums | |
| # | |
| # only updated once per day (daily path) | |
| # # | |
| - name: '🧱 Generate › Spam › Forums (1/day)' | |
| id: task_blocklist_spam_generate_forums | |
| if: steps.task_blocklist_generate_run_mode.outputs.run_daily == 'true' | |
| run: | | |
| chmod +x ".github/scripts/bl-plain.sh" | |
| run_forums=".github/scripts/bl-plain.sh blocklists/spam/spam_forums.ipset ${{ vars.BL_SPAM_FORUMS_SRC }}" | |
| eval "./$run_forums" | |
| # # | |
| # Generate › Internet Service Provider | |
| # | |
| # @resources https://ftp.arin.net/info/asn.txt | |
| # https://networksdb.io | |
| # https://rapidapi.com | |
| # https://ip.guide/ | |
| # https://2ip.io | |
| # https://ip2location.com | |
| # https://ipqualityscore.com | |
| # https://ipinfo.io | |
| # https://radb.net | |
| # https://bgpview.io | |
| # @info script on another server is responsible for ensuring this workflow list is kept up to date with the correct ASN. | |
| # we use numerous resources to compare ASNs to see which ones are active and which ones have been migrated. | |
| # # | |
| - name: '🧱 Generate › ISP' | |
| id: task_blocklist_generate_isp | |
| if: steps.task_blocklist_generate_run_mode.outputs.run_core == 'true' | |
| run: | | |
| # ISP › AOL | |
| ./.github/scripts/bl-block.sh blocklists/isp/isp_aol.ipset isp/aol.ipset | |
| # ISP › ATT | |
| ./.github/scripts/bl-whois.sh blocklists/isp/isp_att.ipset "${{ vars.BL_ATT_ASN }}" | |
| # ISP › Cablevision | Later merged with Suddenlink | |
| ./.github/scripts/bl-whois.sh blocklists/isp/isp_cablevision.ipset "${{ vars.BL_CABLEVISION_ASN }}" | |
| # ISP › Charter & Spectrum (Previously Time Warner Cable) | |
| ./.github/scripts/bl-whois.sh blocklists/isp/isp_charter_spectrum_timewarnercable.ipset "${{ vars.BL_CHARTER_SPECTRUM_TIMEWARNERCABLE_ASN }}" | |
| # ISP › Comcast | |
| ./.github/scripts/bl-whois.sh blocklists/isp/isp_comcast.ipset "${{ vars.BL_COMCAST_ASN }}" | |
| # ISP › Cox Communications | |
| # https://networksdb.io/ip-addresses-of/cox-communications-inc | |
| ./.github/scripts/bl-whois.sh blocklists/isp/isp_cox_communications.ipset "${{ vars.BL_COX_COMMUNICATIONS_ASN }}" | |
| # ISP › Embarq | |
| ./.github/scripts/bl-whois.sh blocklists/isp/isp_embarq.ipset "${{ vars.BL_EMBARQ_ASN }}" | |
| # ISP › Frontier Communications | |
| ./.github/scripts/bl-whois.sh blocklists/isp/isp_frontier_communications.ipset "${{ vars.BL_FRONTIER_COMMUNICATIONS_ASN }}" | |
| # ISP › Qwest / CenturyLink | |
| ./.github/scripts/bl-whois.sh blocklists/isp/isp_qwest_centurylink.ipset "${{ vars.BL_QWEST_CENTURYLINK_ASN }}" | |
| # ISP › Sprint | |
| ./.github/scripts/bl-whois.sh blocklists/isp/isp_sprint.ipset "${{ vars.BL_SPRINT_ASN }}" | |
| # ISP › Verizon | |
| # https://networksdb.io/search/org/verizon | |
| ./.github/scripts/bl-whois.sh blocklists/isp/isp_verizon.ipset "${{ vars.BL_VERIZON_ASN }}" | |
| # ISP › SpaceX Starlink | |
| ./.github/scripts/bl-whois.sh blocklists/isp/isp_spacex_starlink.ipset "${{ vars.BL_SPACEX_STARLINK_ASN }}" | |
| # ISP › Suddenlink / Altice / Optiumum | |
| ./.github/scripts/bl-json.sh blocklists/isp/isp_suddenlink_altice_optimum.ipset https://ip.guide/AS19108 '.routes | .v4//empty,.v6//empty | .[]' | |
| # # | |
| # Generate › Geographical › Geolite2 › Setup | |
| # | |
| # this step should only be ran once per day (at 1:15am UTC). | |
| # The vars defined below are used for caching. The current day of the year + year are calculated, this allows | |
| # the same cached files to be used in a 24 hour period. When the day of the year changes, a new set of geo files will | |
| # be updated. | |
| # | |
| # CACHE VARS: year_week outputs the current week of the year, and year | |
| # 51_2024 | |
| # | |
| # year_day outputs the current day of the year, and year | |
| # 308_2024 | |
| # # | |
| - name: '🧱 Geographical › GeoLite2 (Setup)' | |
| id: task_blocklist_geographical_generate_setup | |
| if: steps.task_blocklist_generate_run_mode.outputs.run_daily == 'true' | |
| run: | | |
| should_run="true" | |
| force_run="${{ steps.task_blocklist_generate_run_mode.outputs.force_daily }}" | |
| echo "should_run=${should_run}" >> "$GITHUB_OUTPUT" | |
| echo "force_run=${force_run}" >> "$GITHUB_OUTPUT" | |
| echo "year_week=$(date +'%U_%Y')" >> $GITHUB_ENV | |
| echo "year_day=$(date +'%j_%Y')" >> $GITHUB_ENV | |
| # # | |
| # Generate › Geographical › Geolite2 › Cache | |
| # | |
| # uses the same cache in a 24 hour period. | |
| # | |
| # @output cache-hit | |
| # only run step if cache hit found | |
| # if: steps.task_blocklist_geographical_generate_cache.outputs.cache-hit == 'true' | |
| # # | |
| - name: '🧱 Geographical › GeoLite2 (Cache)' | |
| id: task_blocklist_geographical_generate_cache | |
| uses: actions/cache@v5 | |
| if: steps.task_blocklist_geographical_generate_setup.outputs.should_run == 'true' | |
| with: | |
| path: .github/.daily/geolite2 | |
| key: daily-${{ runner.os }}-geolite2-${{ steps.task_blocklist_generate_date_key.outputs.day }} | |
| # # | |
| # Generate › Geographical › Geolite2 › Build | |
| # # | |
| - name: '🧱 Geographical › GeoLite2 (1/day)' | |
| id: task_blocklist_generate_geolite2_country | |
| if: steps.task_blocklist_geographical_generate_setup.outputs.should_run == 'true' && ( steps.task_blocklist_geographical_generate_setup.outputs.force_run == 'true' || steps.task_blocklist_geographical_generate_cache.outputs.cache-hit != 'true' ) | |
| run: | | |
| chmod +x ".github/scripts/bl-geolite2.sh" | |
| run_geolite2=".github/scripts/bl-geolite2.sh -l ${{ secrets.API_GEOLITE2_KEY }}" | |
| eval "./$run_geolite2" | |
| # # | |
| # Generate › ASN | |
| # | |
| # ASN numbers are automatically managed & updated via a backend system. Store ASNs as github variable. | |
| # # | |
| - name: '🧱 Geographical › GeoLite2 › ASN (1/day)' | |
| id: task_blocklist_generate_geolite2_asn | |
| if: steps.task_blocklist_geographical_generate_setup.outputs.should_run == 'true' && ( steps.task_blocklist_geographical_generate_setup.outputs.force_run == 'true' || steps.task_blocklist_geographical_generate_cache.outputs.cache-hit != 'true' ) | |
| run: | | |
| export MAXMIND_LICENSE_KEY="${{ secrets.API_GEOLITE2_KEY }}" | |
| export BL_GEOLITE2_REUSE_TEMP=true | |
| rm -rf .github/temp .github/.temp | |
| ./.github/scripts/bl-geolite2_asn.sh --folder A --file akamai_core --asn ${{ vars.BL_AKAMAI_CORE_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder A --file akamai_linode --asn ${{ vars.BL_AKAMAI_LINODE_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder A --file alibaba --asn ${{ vars.BL_CHINA_ALIBABA_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder A --file amazon_core --asn ${{ vars.BL_AMAZON_CORE_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder C --file amazon_cloudfront --asn ${{ vars.BL_AMAZON_CLOUDFRONT_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder A --file apple_inc --asn ${{ vars.BL_APPLE_INC_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder A --file att --asn ${{ vars.BL_ATT_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder A --file automattic --asn ${{ vars.BL_AUTOMATTIC_WP_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder B --file baidu --asn ${{ vars.BL_CHINA_BAIDU_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder B --file bluehost_eig --asn ${{ vars.BL_BLUEHOST_EIG_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder B --file bunnycdn --asn ${{ vars.BL_BUNNYCDN_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder C --file cablevision --asn ${{ vars.BL_CABLEVISION_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder C --file censys_io --asn ${{ vars.BL_CENSYS_IO_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder C --file charter_spectrum_timewarnercable --asn ${{ vars.BL_CHARTER_SPECTRUM_TIMEWARNERCABLE_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder C --file china_broadcasting_network --asn ${{ vars.BL_CHINA_BROADCASTING_NETWORK_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder C --file china_mobile --asn ${{ vars.BL_CHINA_MOBILE_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder C --file china_chinanet --asn ${{ vars.BL_CHINA_CHINANET_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder C --file china_net_mobile --asn ${{ vars.BL_CHINA_NETMOBILE_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder C --file china_networks --asn ${{ vars.BL_CHINA_NETWORKS_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder C --file china_telecom --asn ${{ vars.BL_CHINA_TELECOM_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder C --file china_tencent_net --asn ${{ vars.BL_CHINA_TENCENT_NET_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder C --file china_tietong --asn ${{ vars.BL_CHINA_TIETONG_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder C --file china_unicorn --asn ${{ vars.BL_CHINA_UNICORN_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder C --file cloudflare --asn ${{ vars.BL_CLOUDFLARE_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder C --file colocrossing_hostpapa --asn ${{ vars.BL_COLOCROSSING_HOSTPAPA_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder C --file comcast --asn ${{ vars.BL_COMCAST_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder C --file contabo_gmbh --asn ${{ vars.BL_CONTABO_GMBH_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder C --file cox_communications --asn ${{ vars.BL_COX_COMMUNICATIONS_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder D --file digital_ocean --asn ${{ vars.BL_DIGITAL_OCEAN_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder D --file dreamhost --asn ${{ vars.BL_DREAMHOST_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder E --file embarq --asn ${{ vars.BL_EMBARQ_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder F --file facebook --asn ${{ vars.BL_FACEBOOK_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder F --file fastly --asn ${{ vars.BL_FASTLY_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder F --file fdc_servers --asn ${{ vars.BL_FDC_SERVERS_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder F --file frontier_communications --asn ${{ vars.BL_FRONTIER_COMMUNICATIONS_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder G --file github_inc --asn ${{ vars.BL_GITHUB_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder G --file godaddy --asn ${{ vars.BL_GODADDY_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder G --file google --asn ${{ vars.BL_GOOGLE_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder H --file haproxy_technologies_inc --asn ${{ vars.BL_HAPROXY_TECHNOLOGIES_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder H --file hetzner --asn ${{ vars.BL_HETZNER_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder H --file hostinger --asn ${{ vars.BL_HOSTINGER_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder I --file inmotion_hosting --asn ${{ vars.BL_INMOTION_HOSTING_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder I --file ionos_1and1 --asn ${{ vars.BL_IONOS_1AND1_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder I --file internet_measurement_com --asn ${{ vars.BL_INTERNET_MEASUREMENT_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder K --file korea_telecom --asn ${{ vars.BL_KOREA_TELECOM_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder L --file leaseweb_hosting --asn ${{ vars.BL_LEASEWEB_HOSTING_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder M --file microsoft_corp --asn ${{ vars.BL_MICROSOFT_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder N --file namecheap --asn ${{ vars.BL_NAMECHEAP_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder N --file nintendo --asn ${{ vars.BL_NINTENDO_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder O --file onyphe_io --asn ${{ vars.BL_ONYPHE_IO_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder O --file oracle --asn ${{ vars.BL_ORACLE_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder O --file ovh_cloud --asn ${{ vars.BL_OVH_CLOUD_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder P --file psychz_networks --asn ${{ vars.BL_PSYCHZ_NETWORKS_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder Q --file quadra_net --asn ${{ vars.BL_QUADRA_NET_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder Q --file qwest_centurylink --asn ${{ vars.BL_QWEST_CENTURYLINK_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder R --file rackspace_hosting --asn ${{ vars.BL_RACKSPACE_HOSTING_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder R --file recyber_net --asn ${{ vars.BL_RECYBER_NET_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder R --file russia_crimeacom_south_llc --asn ${{ vars.BL_RUSSIA_CRIMEACOM_SOUTH_LLC_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder R --file russia_zummer_llc --asn ${{ vars.BL_RUSSIA_ZUMMER_LLC_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder S --file scaleway_sas --asn ${{ vars.BL_SCALEWAY_SAS_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder S --file servers_dot_com_inc --asn ${{ vars.BL_SERVERS_DOT_COM_INC_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder S --file shadowserver_foundation --asn ${{ vars.BL_SHADOWSERVER_FOUNDATION_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder S --file shodan_io --asn ${{ vars.BL_SHODAN_IO_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder S --file sharktech --asn ${{ vars.BL_SHARKTECH_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder S --file stackpath_blackhost --asn ${{ vars.BL_STACKPATH_BLACKHOST_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder S --file spacex_starlink --asn ${{ vars.BL_SPACEX_STARLINK_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder S --file sprint --asn ${{ vars.BL_SPRINT_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder T --file tiktok_bytedance --asn ${{ vars.BL_TIKTOK_BYTEDANCE_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder T --file tumblr --asn ${{ vars.BL_TUMBLR_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder T --file twitter_x --asn ${{ vars.BL_TWITTER_INC_X_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder U --file upcloud_usa_inc --asn ${{ vars.BL_UPCLOUD_USA_INC_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder U --file usa_department_of_defense_dnic --asn ${{ vars.BL_USA_DOD_DNIC_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder U --file usa_internet_corp --asn ${{ vars.BL_USA_INTERNET_CORP_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder V --file verizon --asn ${{ vars.BL_VERIZON_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder V --file vietel --asn ${{ vars.BL_VIETNAM_VIETEL_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder V --file vultr --asn ${{ vars.BL_VULTR_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder Y --file yahoo --asn ${{ vars.BL_YAHOO_ASN }} | |
| ./.github/scripts/bl-geolite2_asn.sh --folder Y --file youtube --asn ${{ vars.BL_YOUTUBE_ASN }} | |
| rm -rf blocklists/geolite/asn/ipv4 blocklists/geolite/asn/ipv6 | |
| rm -rf .github/temp .github/.temp | |
| # # | |
| # Generate › Transmission | |
| # # | |
| - name: '🧱 Generate › Transmission' | |
| id: task_blocklist_generate_transmission | |
| if: steps.task_blocklist_generate_run_mode.outputs.run_core == 'true' | |
| run: | | |
| run_bt=".github/scripts/bt-transmission.sh" | |
| eval "./$run_bt" | |
| # # | |
| # Delete Leftover Temps | |
| # # | |
| - name: '🧱 Generate › Cleanup Temps' | |
| id: task_blocklist_generate_cleanup | |
| if: steps.task_blocklist_generate_run_mode.outputs.run_core == 'true' | |
| run: | | |
| echo "🧹 Cleaning up leftover .tmp files..." | |
| find ./blocklists -type f -name '*.tmp' -print -delete | |
| echo "✅ Cleanup complete." | |
| # # | |
| # Generate › Verbose › List Tree | |
| # # | |
| - name: '🌲 Debug › Verbose Tree Listing' | |
| if: steps.task_blocklist_generate_run_mode.outputs.run_core == 'true' | |
| run: | | |
| echo "―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――" | |
| echo " PWD ..................... ${PWD}" | |
| echo " GITHUB.WORKSPACE ........ ${{ github.workspace }}" | |
| echo -e | |
| echo -e | |
| tree -I node_modules -I .git -I ./ | |
| echo "―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――" | |
| # # | |
| # Generate › SFTP / SSH › Upload Artifact | |
| # # | |
| #- name: 'SSH › Zip Artifacts' | |
| # id: generate_artifact_zip | |
| # run: | | |
| # REPO_NAME="${{ github.event.repository.name }}" # csf-firewall | |
| # BRANCH_NAME="${{ github.ref_name }}" # main | |
| # RUN_ID="${{ github.run_id }}" # 17373119899 | |
| # ARTIFACT_NAME="artifact_${BRANCH_NAME}_${RUN_ID}.zip" # artifacts_main_xxxxxx.zip | |
| # ARTIFACT_PATH="${REPO_NAME}/${ARTIFACT_NAME}" # csf-firewall/artifacts_main_xxxxxx.zip | |
| # echo "REPO_NAME=$REPO_NAME" >> $GITHUB_ENV # csf-firewall | |
| # echo "ARTIFACT_NAME=$ARTIFACT_NAME" >> $GITHUB_ENV # artifacts_main_xxxxxx.zip | |
| # echo "ARTIFACT_PATH=$ARTIFACT_PATH" >> $GITHUB_ENV # csf-firewall/artifacts_main_xxxxxx.zip | |
| # echo "artifact-name=$ARTIFACT_NAME" >> $GITHUB_OUTPUT # artifacts_main_xxxxxx.zip | |
| # echo "artifact-path=$ARTIFACT_PATH" >> $GITHUB_OUTPUT # csf-firewall/artifacts_main_xxxxxx.zip | |
| # zip -r "$ARTIFACT_NAME" ./* # delete original zip before push | |
| # # | |
| # Generate › SFTP / SSH › Setup SSH Key | |
| # # | |
| #- name: 'SSH › Upload Artifacts' | |
| # run: | | |
| # # Make sure the .ssh folder exists | |
| # mkdir -p ~/.ssh | |
| # echo "${{ secrets.ARTIFACTS_GITHUB_SSH_PRIVATE_KEY }}" > "$HOME/.ssh/github_sftp_key" | |
| # chmod 600 "$HOME/.ssh/github_sftp_key" | |
| # # Upload to repo-specific directory | |
| # sftp -P ${{ secrets.ARTIFACTS_PORT }} \ | |
| # -i "$HOME/.ssh/github_sftp_key" \ | |
| # -o StrictHostKeyChecking=no \ | |
| # github@${{ secrets.ARTIFACTS_DOMAIN }} <<EOF | |
| # mkdir $REPO_NAME | |
| # cd $REPO_NAME | |
| # put $ARTIFACT_NAME | |
| # bye | |
| # EOF | |
| # # | |
| # Generate › Artifact › Upload | |
| # # | |
| - name: "🎁 Generate › Upload Artifact" | |
| id: task_blocklist_generate_artifact_upload | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: blocklist-latest | |
| path: ./ | |
| retention-days: 1 | |
| # # | |
| # Job › Commit | |
| # # | |
| blocklist-commit: | |
| name: >- | |
| 📋 Commit | |
| runs-on: ubuntu-latest | |
| # runs-on: apollo-x64 | |
| timeout-minutes: 120 | |
| needs: [ blocklist-setup, blocklist-generate ] | |
| steps: | |
| # # | |
| # Commit › Checkout | |
| # # | |
| - name: >- | |
| ☑️ Checkout | |
| uses: actions/checkout@v6 | |
| with: | |
| fetch-depth: 0 | |
| # # | |
| # Commit › Artifact › Setup SSH | |
| # # | |
| #- name: '🎁 Commit › Setup SSH Artifact' | |
| # run: | | |
| # mkdir -p ~/.ssh | |
| # echo "${{ secrets.ARTIFACTS_GITHUB_SSH_PRIVATE_KEY }}" > "$HOME/.ssh/github_sftp_key" | |
| # chmod 600 "$HOME/.ssh/github_sftp_key" | |
| # # | |
| # Commit › Artifact › Download | |
| # # | |
| #- name: '🎁 Commit › Download Artifact' | |
| # run: | | |
| # ARTIFACT_PATH="${{ needs.blocklist-generate.outputs.artifact-path }}" # csf-firewall/artifacts_main_xxxxxx.zip | |
| # ARTIFACT_NAME="$(basename "$ARTIFACT_PATH")" # artifacts_main_xxxxxx.zip | |
| # REPO_NAME="$(dirname "$ARTIFACT_PATH")" # csf-firewall | |
| # echo "Downloading $ARTIFACT_PATH" | |
| # sftp -P ${{ secrets.ARTIFACTS_PORT }} \ | |
| # -i "$HOME/.ssh/github_sftp_key" \ | |
| # -o StrictHostKeyChecking=no \ | |
| # github@${{ secrets.ARTIFACTS_DOMAIN }} <<EOF | |
| # cd $REPO_NAME | |
| # get $ARTIFACT_NAME | |
| # bye | |
| # EOF | |
| # unzip -o "$ARTIFACT_NAME" # unzip -o artifacts_main_xxxxxx.zip | |
| # rm -f "$ARTIFACT_NAME" # rm -f artifacts_main_xxxxxx.zip | |
| # # | |
| # Commit › GPG › Import Key (No Passphrase) | |
| # | |
| # requires your GPG private key, converted to base64 binary .gpg (not armored .asc) | |
| # | |
| # this is utilized to generate signed hash digest | |
| # | |
| # find . -maxdepth 1 \( -name '*.zip' -o -name '*.gz' \) -printf '%P\n' | xargs -r sha1sum | gpg --digest-algo sha256 --clearsign > sha1sum.txt.asc | |
| # find . -maxdepth 1 \( -name '*.zip' -o -name '*.gz' \) -printf '%P\n' | xargs -r sha256sum | gpg --digest-algo sha256 --clearsign > sha256 | |
| # # | |
| - name: '🪪 Commit › GPG › Import Signing Key › W/o Passphrase' | |
| if: env.GPG_KEY_BASE64 != '' && env.GPG_KEY_PASSPHRASE == '' | |
| run: | | |
| echo "$GPG_KEY_BASE64" | base64 -di | gpg --import | |
| # # | |
| # Commit › GPG › Import Key (With Passphrase) | |
| # | |
| # requires your GPG private key, converted to base64 binary .gpg (not armored .asc) | |
| # | |
| # this is utilized to generate signed hash digest | |
| # | |
| # find . -maxdepth 1 \( -name '*.zip' -o -name '*.gz' \) -printf '%P\n' | xargs -r sha1sum | gpg --digest-algo sha256 --clearsign > sha1sum.txt.asc | |
| # find . -maxdepth 1 \( -name '*.zip' -o -name '*.gz' \) -printf '%P\n' | xargs -r sha256sum | gpg --digest-algo sha256 --clearsign > sha256 | |
| # # | |
| - name: '🪪 Commit › GPG › Import Signing Key › w/ Passphrase' | |
| if: env.GPG_KEY_BASE64 != '' && env.GPG_KEY_PASSPHRASE != '' | |
| run: | | |
| echo "$GPG_KEY_BASE64" | base64 -di > /tmp/signing-key.gpg | |
| echo "$GPG_KEY_PASSPHRASE" | gpg --pinentry-mode loopback --passphrase-fd 0 --import /tmp/signing-key.gpg | |
| (echo "$GPG_KEY_PASSPHRASE"; echo; echo) | gpg --command-fd 0 --pinentry-mode loopback --change-passphrase $(gpg --list-secret-keys --with-colons 2> /dev/null | grep '^sec:' | cut --delimiter ':' --fields 5 | tail -n 1) | |
| # # | |
| # Commit › Artifact › Download | |
| # # | |
| - name: "🎁 Commit › Download Artifact" | |
| id: task_commit_artifact_download | |
| uses: actions/download-artifact@v4 | |
| with: | |
| name: blocklist-latest | |
| path: ./ | |
| # # | |
| # Commit › Precommit | |
| # # | |
| - name: "📦 Commit › Pre-commit" | |
| id: task_commit_pre | |
| run: | | |
| now=$(date -u '+%m/%d/%Y %H:%M') | |
| commit_label="Sync" >> $GITHUB_ENV | |
| commit_message="\`️️🔒 $commit_label 🔒\` \`$now UTC\`" >> $GITHUB_ENV | |
| echo "COMMIT_MESSAGE=$(echo $commit_message)" >> $GITHUB_ENV | |
| echo "NOW=$(echo $now)" >> $GITHUB_ENV | |
| # # | |
| # Commit › Update README | |
| # # | |
| - name: "📄 Commit › Update README" | |
| id: task_commit_readme_update | |
| run: | | |
| chmod +x ".github/scripts/update-readme.sh" | |
| run_readme=".github/scripts/update-readme.sh README.md" | |
| eval "./$run_readme" | |
| # # | |
| # Commit › GPG Key | |
| # | |
| # required in order to do signed commits | |
| # # | |
| - name: "📦 Commit › GPG Key" | |
| id: task_commit_gpg | |
| uses: crazy-max/ghaction-import-gpg@v6 | |
| with: | |
| gpg_private_key: ${{ secrets.ADMINSERV_GPG_KEY_ASC }} | |
| passphrase: ${{ secrets.ADMINSERV_GPG_PASSPHRASE }} | |
| git_user_signingkey: true | |
| git_commit_gpgsign: true | |
| # # | |
| # Commit › Commit | |
| # # | |
| - name: "📦 Commit › Execute" | |
| id: task_commit_execute | |
| uses: stefanzweifel/git-auto-commit-action@v5 | |
| with: | |
| commit_message: ${{ env.COMMIT_MESSAGE }} | |
| commit_author: "${{ steps.task_commit_gpg.outputs.name }} <${{ steps.task_commit_gpg.outputs.email }}>" | |
| commit_user_name: ${{ steps.task_commit_gpg.outputs.name }} | |
| commit_user_email: ${{ steps.task_commit_gpg.outputs.email }} | |
| # # | |
| # Commit › Clean up Artifacts | |
| # | |
| # Left-behind artifacts cleaned up after 5 days | |
| # # | |
| # - name: '🎁 Commit › Cleanup Old Artifacts' | |
| # run: | | |
| # ssh -p ${{ secrets.ARTIFACTS_PORT }} \ | |
| # -i ~/.ssh/github_sftp_key \ | |
| # -o StrictHostKeyChecking=no \ | |
| # github@${{ secrets.ARTIFACTS_DOMAIN }} \ | |
| # "find ~/public_html -name 'artifacts_*.zip' -type f -mtime +5 -delete" |