Skip to content

Get and Save GitHub IPs #107

Get and Save GitHub IPs

Get and Save GitHub IPs #107

Workflow file for this run

name: Get and Save GitHub IPs
on:
workflow_dispatch:
schedule:
- cron: '0 0 * * *'
permissions:
contents: write
jobs:
get_and_commit_ips:
runs-on: ubuntu-latest
steps:
- name: Check out repository
uses: actions/checkout@v4
- name: Fetch and Filter GitHub Meta IPs
shell: bash
run: |
# 1. Create a Python script to handle subnet collapsing
# This script removes IPs that are already contained in larger networks (e.g. removing /32 if inside a /20)
cat << 'EOF' > collapse_ips.py
import sys
import ipaddress
# Read all lines from stdin
input_data = sys.stdin.read().split()
# Parse valid IPv4 networks
networks = []
for line in input_data:
try:
networks.append(ipaddress.IPv4Network(line.strip()))
except ValueError:
continue # Skip invalid entries
# collapse_addresses deduplicates, removes contained subnets, and merges adjacent ones
collapsed = ipaddress.collapse_addresses(networks)
# Print the result
for net in collapsed:
print(str(net))
EOF
# 2. Fetch data, process with jq, and pipe to the Python script
curl -fsSL "https://api.github.com/meta" | jq -r '
. |
{
hooks: .hooks,
web: .web,
api: .api,
git: .git,
github_enterprise_importer: .github_enterprise_importer,
packages: .packages,
pages: .pages,
importer: .importer
} |
flatten |
map(select(. | test("^\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}(\\/\\d{1,2})?$"))) |
unique |
.[]
' | python3 collapse_ips.py > github_ip_list.txt
# 3. Clean up script
rm collapse_ips.py
# Debug output to verify content length in logs
echo "Generated IP list with $(wc -l < github_ip_list.txt) entries."
- name: Commit and Push Changes
uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: Automated update of GitHub IP list
file_pattern: github_ip_list.txt
# The action automatically handles "No changes to commit" scenarios