From 70fa0619d46623f173b2c8f2e30746cfeafc150f Mon Sep 17 00:00:00 2001 From: Adrian Rumpold Date: Tue, 10 Aug 2021 20:11:14 +0200 Subject: [PATCH] fix: Simply HTTP error handling --- .../home_assistant_covid19_augsburg/crawler.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/custom_components/home_assistant_covid19_augsburg/crawler.py b/custom_components/home_assistant_covid19_augsburg/crawler.py index 5c9a190..e77d4dc 100644 --- a/custom_components/home_assistant_covid19_augsburg/crawler.py +++ b/custom_components/home_assistant_covid19_augsburg/crawler.py @@ -97,8 +97,7 @@ class CovidCrawler(CovidCrawlerBase): import requests result = requests.get(url) - if not result.ok: - result.raise_for_status() + result.raise_for_status() soup = BeautifulSoup(result.text, "html.parser") match = soup.find(class_="frame--type-textpic") @@ -153,7 +152,9 @@ class CovidCrawler(CovidCrawlerBase): async def crawl_vaccination(self) -> VaccinationData: _log.info("Fetching COVID-19 vaccination data update") - url = "https://www.augsburg.de/umwelt-sozgcoiales/gesundheit/coronavirus/impfzentrum" + url = ( + "https://www.augsburg.de/umwelt-soziales/gesundheit/coronavirus/impfzentrum" + ) container_id = "c1088140" if self.hass: @@ -165,8 +166,7 @@ class CovidCrawler(CovidCrawlerBase): import requests result = requests.get(url) - if not result.ok: - result.raise_for_status() + result.raise_for_status() soup = BeautifulSoup(result.text, "html.parser") result = soup.find(id=container_id)