-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathextract_from_html_table.py
More file actions
137 lines (107 loc) · 4.98 KB
/
extract_from_html_table.py
File metadata and controls
137 lines (107 loc) · 4.98 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
#!/usr/bin/env python3
"""
Extract ALL data from the HTML table that's already on the page
The website has a table (grdMaps) with all districts, taluks, hoblis, and villages!
"""
import requests
from bs4 import BeautifulSoup
import json
from collections import defaultdict
def extract_all_data_from_table():
"""Extract all data from the grdMaps table in the HTML"""
print("Fetching website HTML...")
url = "https://landrecords.karnataka.gov.in/service3/"
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
}
response = requests.get(url, headers=headers)
response.raise_for_status()
print("Parsing HTML and extracting table data...")
soup = BeautifulSoup(response.content, 'html.parser')
# Find the table with id="grdMaps"
table = soup.find('table', {'id': 'grdMaps'})
if not table:
print("❌ Table not found!")
return None
print("✅ Found data table!")
# Extract all rows (skip header)
rows = table.find_all('tr')[1:] # Skip header row
print(f"Found {len(rows)} data rows")
# Organize data by district -> taluk -> hobli -> villages
data_structure = defaultdict(lambda: defaultdict(lambda: defaultdict(list)))
district_map = {} # Map district names to values
# First, get district values from dropdown
district_select = soup.find('select', {'name': 'ddl_district'})
for option in district_select.find_all('option'):
value = option.get('value', '')
label = option.text.strip()
if value and value not in ['0', 'All']:
district_map[label] = value
# Extract data from table rows
for row in rows:
cells = row.find_all('td')
if len(cells) >= 4:
district_name = cells[0].find('span').text.strip() if cells[0].find('span') else ''
taluk_name = cells[1].find('span').text.strip() if cells[1].find('span') else ''
hobli_name = cells[2].find('span').text.strip() if cells[2].find('span') else ''
village_name = cells[3].find('span').text.strip() if cells[3].find('span') else ''
if district_name and taluk_name and hobli_name and village_name:
# Add village to structure
if village_name not in data_structure[district_name][taluk_name][hobli_name]:
data_structure[district_name][taluk_name][hobli_name].append(village_name)
# Convert to final structure
all_data = []
for district_name, taluks in sorted(data_structure.items()):
district_value = district_map.get(district_name, '')
district_data = {
'value': district_value,
'label': district_name,
'taluks': []
}
for taluk_name, hoblis in sorted(taluks.items()):
taluk_data = {
'value': '', # Will need to be filled from form submissions
'label': taluk_name,
'hoblis': []
}
for hobli_name, villages in sorted(hoblis.items()):
hobli_data = {
'value': '', # Will need to be filled from form submissions
'label': hobli_name,
'villages': [{'value': str(i+1), 'label': v} for i, v in enumerate(sorted(villages))]
}
taluk_data['hoblis'].append(hobli_data)
district_data['taluks'].append(taluk_data)
all_data.append(district_data)
return all_data
if __name__ == "__main__":
print("=" * 60)
print("Extracting ALL Data from HTML Table")
print("=" * 60)
print()
try:
data = extract_all_data_from_table()
if data:
# Save to JSON
output_file = "complete-karnataka-data-from-html.json"
with open(output_file, "w", encoding="utf-8") as f:
json.dump(data, f, indent=2, ensure_ascii=False)
print("\n" + "=" * 60)
print("✅ Extraction Complete!")
print("=" * 60)
print(f"Total districts: {len(data)}")
total_taluks = sum(len(d["taluks"]) for d in data)
total_hoblis = sum(len(t["hoblis"]) for d in data for t in d["taluks"])
total_villages = sum(len(h["villages"]) for d in data for t in d["taluks"] for h in t["hoblis"])
print(f"Total taluks: {total_taluks}")
print(f"Total hoblis: {total_hoblis}")
print(f"Total villages: {total_villages}")
print(f"\n💾 Data saved to: {output_file}")
print("\n⚠️ Note: Taluk and Hobli 'value' fields are empty.")
print(" They need to be filled by matching with dropdown values.")
else:
print("❌ Failed to extract data")
except Exception as e:
print(f"\n❌ Error: {e}")
import traceback
traceback.print_exc()