-
-
Notifications
You must be signed in to change notification settings - Fork 41
Expand file tree
/
Copy pathcheck_url.py
More file actions
138 lines (110 loc) · 4.85 KB
/
check_url.py
File metadata and controls
138 lines (110 loc) · 4.85 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
import json
import requests
from bs4 import BeautifulSoup
from urllib.parse import urlparse
# Suppress warnings from insecure requests, as we will disable SSL verification.
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
# --- CONFIGURATION ---
# The source URL to fetch the master list of shops from.
SOURCE_URL = "https://opennx.github.io/tinfoil.json"
# Ghostland shops mapping to their "/up" check URLs
GHOSTLAND_SHOPS = {
"nx.ghostland.at": "https://nx.ghostland.at/up",
"nx-retro.ghostland.at": "https://nx.ghostland.at/up",
"nx-saves.ghostland.at": "https://nx.ghostland.at/up"
}
# --- DATA FETCHING ---
def fetch_shop_list():
"""Fetches the master list of shops from the source JSON URL."""
try:
print(f"Fetching master shop list from {SOURCE_URL}...")
response = requests.get(SOURCE_URL, timeout=15, verify=False)
response.raise_for_status()
print("Successfully fetched master list.")
return response.json()
except requests.exceptions.RequestException as e:
print(f"FATAL: Could not fetch master shop list: {e}")
return None
except json.JSONDecodeError:
print("FATAL: Failed to parse master shop list as JSON.")
return None
# --- STATUS CHECKING FUNCTIONS ---
def check_ghostland_status(status_url):
"""Ghostland shops return plain 'ok' on their /up endpoint when operational."""
try:
headers = {'User-Agent': 'Python Status Checker/2.5'}
response = requests.get(status_url, timeout=10, headers=headers, verify=False)
response.raise_for_status()
content = response.text.strip().lower()
if content == "ok":
return "Online"
else:
return "Offline"
except requests.exceptions.RequestException as e:
print(f"[Ghostland check error] {status_url}: {e}")
return "Check failed"
def check_generic_url(url):
"""Performs a comprehensive check on a generic URL."""
try:
response = requests.get(url, timeout=10, stream=True, verify=False)
if response.status_code != 200:
return f"Offline ({response.status_code})"
content_type = response.headers.get('Content-Type', '').lower()
if 'text/html' not in content_type:
return "Invalid content"
content = response.raw.read(200000, decode_content=True).decode('utf-8', 'ignore').lower()
soup = BeautifulSoup(content, "html.parser")
title_text = soup.title.string.strip().lower() if soup.title else ""
if "maintenance" in title_text:
return "Under maintenance"
broken_indicators = ["default web page", "site not found", "502 bad gateway", "error 403"]
if any(bad in content for bad in broken_indicators):
return "Error/Placeholder"
working_indicators = [".nsp", ".xci", "/files/", "tinfoil", ".nsz", ".iso",
"eshop", "shop", "switch", "game", "region", "release",
"\"files\":", "\"directories\":", "index.html", "server"]
if any(good in content for good in working_indicators):
return "Online"
if len(content.strip()) < 300:
return "Possibly blank"
return "Online"
except requests.exceptions.RequestException as e:
print(f" - Error connecting to {url}: {e}")
return "Connection failed"
# --- MAIN SCRIPT LOGIC ---
def main():
"""Main function to fetch the remote shop list, check each one, and update the local tinfoil.json file."""
master_data = fetch_shop_list()
if not master_data:
return
directories = master_data.get("directories", [])
status_parts = []
print("\nChecking individual shop statuses...")
for full_url in directories:
host = urlparse(full_url).netloc or full_url
print(f"-> Checking '{host}' ({full_url})...")
matched_ghost_host = None
for ghost_host in GHOSTLAND_SHOPS:
if ghost_host in full_url:
matched_ghost_host = ghost_host
break
if matched_ghost_host:
status = check_ghostland_status(GHOSTLAND_SHOPS[matched_ghost_host])
else:
status = check_generic_url(full_url)
print(f" - Status: {status}")
status_parts.append(f"{host}: {status}")
master_data["success"] = (
"Open NX Shops status list:\n\n" +
"\n".join(status_parts) +
"\n\nSuggest a new shop on:\nhttps://opennx.github.io/"
)
try:
with open("tinfoil.json", "w", encoding="utf-8") as f:
json.dump(master_data, f, ensure_ascii=False, indent=4)
print("\nSuccessfully updated local tinfoil.json with the latest shop list and statuses.")
except IOError as e:
print(f"\nError: Could not write to tinfoil.json: {e}")
if __name__ == "__main__":
main()