From 8e91639ac9777173ade43360e0d1634c11ae289d Mon Sep 17 00:00:00 2001 From: TalhaMangarah Date: Mon, 16 Feb 2026 02:10:38 +0000 Subject: [PATCH 1/3] fix: NewhamCouncil - disable SSL verification to resolve certificate verification errors --- uk_bin_collection/uk_bin_collection/councils/NewhamCouncil.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/uk_bin_collection/uk_bin_collection/councils/NewhamCouncil.py b/uk_bin_collection/uk_bin_collection/councils/NewhamCouncil.py index 937f32af02..bfc22a40db 100644 --- a/uk_bin_collection/uk_bin_collection/councils/NewhamCouncil.py +++ b/uk_bin_collection/uk_bin_collection/councils/NewhamCouncil.py @@ -19,7 +19,7 @@ def parse_data(self, page: str, **kwargs) -> dict: raise ValueError(f"Error getting identifier: {str(e)}") # Make a BS4 object - page = requests.get(url) + page = requests.get(url, verify=False) soup = BeautifulSoup(page.text, "html.parser") soup.prettify From f0b83be54a995935cec28fcd043b291c4a2dedb9 Mon Sep 17 00:00:00 2001 From: TalhaMangarah Date: Mon, 16 Feb 2026 02:13:21 +0000 Subject: [PATCH 2/3] fix: NewhamCouncil - correct datetime parsing from DD/MM/YYYY to MM/DD/YYYY --- uk_bin_collection/uk_bin_collection/councils/NewhamCouncil.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/uk_bin_collection/uk_bin_collection/councils/NewhamCouncil.py b/uk_bin_collection/uk_bin_collection/councils/NewhamCouncil.py index bfc22a40db..7b1aacb105 100644 --- a/uk_bin_collection/uk_bin_collection/councils/NewhamCouncil.py +++ b/uk_bin_collection/uk_bin_collection/councils/NewhamCouncil.py @@ -49,7 +49,7 @@ def parse_data(self, page: str, **kwargs) -> dict: .find_next("mark") .next_sibling.strip() ) - next_collection = datetime.strptime(date, "%d/%m/%Y") + next_collection = datetime.strptime(date, "%m/%d/%Y") dict_data = { "type": bin_type, From d05e2c7db97b49fca63c6b27521b75623b47f5c3 Mon Sep 17 00:00:00 2001 From: TalhaMangarah Date: Mon, 16 Feb 2026 02:13:45 +0000 Subject: [PATCH 3/3] feat: NewhamCouncil - add food waste collection scraping --- .../uk_bin_collection/councils/NewhamCouncil.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/uk_bin_collection/uk_bin_collection/councils/NewhamCouncil.py b/uk_bin_collection/uk_bin_collection/councils/NewhamCouncil.py index 7b1aacb105..68ea843ef3 100644 --- a/uk_bin_collection/uk_bin_collection/councils/NewhamCouncil.py +++ b/uk_bin_collection/uk_bin_collection/councils/NewhamCouncil.py @@ -36,13 +36,20 @@ def parse_data(self, page: str, **kwargs) -> dict: if len(sections_recycling) > 0: sections.append(sections_recycling[0]) + # as well as one for food waste + sections_food_waste = soup.find_all( + "div", {"class": "card h-100 card-food"} + ) + if len(sections_food_waste) > 0: + sections.append(sections_food_waste[0]) + # For each bin section, get the text and the list elements for item in sections: header = item.find("div", {"class": "card-header"}) bin_type_element = header.find_next("b") if bin_type_element is not None: bin_type = bin_type_element.text - array_expected_types = ["Domestic", "Recycling"] + array_expected_types = ["Domestic", "Recycling", "Food Waste"] if bin_type in array_expected_types: date = ( item.find_next("p", {"class": "card-text"})