diff --git a/custom_components/home_assistant_covid19_augsburg/__init__.py b/custom_components/home_assistant_covid19_augsburg/__init__.py index 491e446..82f5b0d 100644 --- a/custom_components/home_assistant_covid19_augsburg/__init__.py +++ b/custom_components/home_assistant_covid19_augsburg/__init__.py @@ -66,8 +66,8 @@ async def get_coordinator(hass): return hass.data[DOMAIN] async def async_get_data() -> IncidenceData: - crawler = CovidCrawler() - return crawler.crawl() + crawler = CovidCrawler(hass) + return await crawler.crawl() hass.data[DOMAIN] = DataUpdateCoordinator( hass, diff --git a/custom_components/home_assistant_covid19_augsburg/crawler.py b/custom_components/home_assistant_covid19_augsburg/crawler.py index 1b1727f..af668f1 100644 --- a/custom_components/home_assistant_covid19_augsburg/crawler.py +++ b/custom_components/home_assistant_covid19_augsburg/crawler.py @@ -5,8 +5,8 @@ import re from abc import ABC, abstractmethod from dataclasses import dataclass -import requests from bs4 import BeautifulSoup +from homeassistant import aiohttp_client _log = logging.getLogger(__name__) @@ -29,12 +29,13 @@ class CovidCrawlerBase(ABC): class CovidCrawler(CovidCrawlerBase): - def __init__(self) -> None: + def __init__(self, hass) -> None: self.url = ( "https://www.augsburg.de/umwelt-soziales/gesundheit/coronavirus/fallzahlen" ) + self.hass = hass - def crawl(self) -> IncidenceData: + async def crawl(self) -> IncidenceData: """ Fetch COVID-19 infection data from the target website. """ @@ -43,11 +44,8 @@ class CovidCrawler(CovidCrawlerBase): locale.setlocale(locale.LC_ALL, "de_DE.utf8") - result = requests.get(self.url) - if not result.ok: - result.raise_for_status() - - soup = BeautifulSoup(result.text, features="html.parser") + result = await aiohttp_client.async_get_clientsession(self.hass).get(self.url) + soup = BeautifulSoup(await result.text(), features="html.parser") match = soup.find(class_="frame--type-textpic") text = match.p.text