From 975b7666e2c06a458d218a757a5f73691a689b36 Mon Sep 17 00:00:00 2001 From: Adrian Rumpold Date: Fri, 18 Jun 2021 17:17:28 +0200 Subject: [PATCH] feat: Implement HA sensor to expose data --- .../crawler.py | 54 +++++++++++++--- .../home_assistant_covid19_augsburg/sensor.py | 62 +++++++++++++++++++ 2 files changed, 107 insertions(+), 9 deletions(-) create mode 100644 custom_components/home_assistant_covid19_augsburg/sensor.py diff --git a/custom_components/home_assistant_covid19_augsburg/crawler.py b/custom_components/home_assistant_covid19_augsburg/crawler.py index af668f1..9dab378 100644 --- a/custom_components/home_assistant_covid19_augsburg/crawler.py +++ b/custom_components/home_assistant_covid19_augsburg/crawler.py @@ -11,6 +11,12 @@ from homeassistant import aiohttp_client _log = logging.getLogger(__name__) +def parse_num(s, t=int): + if len(s): + return t(s.replace(".", "").replace(",", ".")) + return 0 + + @dataclass class IncidenceData: location: str @@ -29,7 +35,7 @@ class CovidCrawlerBase(ABC): class CovidCrawler(CovidCrawlerBase): - def __init__(self, hass) -> None: + def __init__(self, hass=None) -> None: self.url = ( "https://www.augsburg.de/umwelt-soziales/gesundheit/coronavirus/fallzahlen" ) @@ -42,10 +48,18 @@ class CovidCrawler(CovidCrawlerBase): _log.info("Fetching COVID-19 data update") - locale.setlocale(locale.LC_ALL, "de_DE.utf8") + if self.hass: + result = await aiohttp_client.async_get_clientsession(self.hass).get( + self.url + ) + soup = BeautifulSoup(await result.text(), "html.parser") + else: + import requests - result = await aiohttp_client.async_get_clientsession(self.hass).get(self.url) - soup = BeautifulSoup(await result.text(), features="html.parser") + result = requests.get(self.url) + if not result.ok: + result.raise_for_status() + soup = BeautifulSoup(result.text, "html.parser") match = soup.find(class_="frame--type-textpic") text = match.p.text @@ -55,16 +69,35 @@ class CovidCrawler(CovidCrawlerBase): if not matches: raise ValueError("Could not extract incidence from scraped web page") - incidence = locale.atof(matches.group(1)) + incidence = parse_num(matches.group(1), t=float) _log.debug(f"Parsed incidence: {incidence}") text = match.h2.text - matches = re.search(r"\((\d+\. \w+)\)", text) + matches = re.search(r"\((\d+)\. (\w+)\)", text) if not matches: raise ValueError("Could not extract date from scraped web page") - date = datetime.datetime.strptime(matches.group(1), "%d. %B") - date = date.replace(year=datetime.datetime.now().year).date() + months = [ + "Januar", + "Februar", + "März", + "April", + "Mai", + "Juni", + "Juli", + "August", + "September", + "Oktober", + "November", + "Dezember", + ] + day = parse_num(matches.group(1)) + month_name = matches.group(2) + date = datetime.date( + year=datetime.datetime.now().year, + month=1 + months.index(month_name), + day=day, + ) _log.debug(f"Parsed date: {date}") match = match.find_next_sibling(class_="frame--type-textpic") @@ -83,7 +116,10 @@ class CovidCrawler(CovidCrawlerBase): if not matches: continue cases.update( - {k: int(v.replace(".", "")) for k, v in matches.groupdict().items()} + { + k: parse_num(v.replace(".", "")) + for k, v in matches.groupdict().items() + } ) result = IncidenceData("Augsburg", incidence, date, **cases) diff --git a/custom_components/home_assistant_covid19_augsburg/sensor.py b/custom_components/home_assistant_covid19_augsburg/sensor.py new file mode 100644 index 0000000..b2480a6 --- /dev/null +++ b/custom_components/home_assistant_covid19_augsburg/sensor.py @@ -0,0 +1,62 @@ +from home_assistant_covid19_augsburg import get_coordinator +from homeassistant.helpers.entity import Entity + + +async def async_setup_entry(hass, _, async_add_entities): + """Defer sensor setup to the shared sensor module.""" + coordinator = await get_coordinator(hass) + + async_add_entities([CoronaHessenSensor(coordinator)]) + + +class CoronaHessenSensor(Entity): + """Representation of a county with Corona cases.""" + + def __init__(self, coordinator): + """Initialize sensor.""" + self.coordinator = coordinator + self._name = "Coronavirus Augsburg" + self._state = None + + @property + def available(self): + return self.coordinator.last_update_success in self.coordinator.data + + @property + def name(self): + return self._name + + @property + def unique_id(self): + return self._name + + @property + def icon(self): + return "mdi:biohazard" + + @property + def unit_of_measurement(self): + return "people" + + @property + def state(self): + return self.coordinator.data.total_cases + + @property + def device_state_attributes(self): + return { + "date": self.coordinator.data.date, + "total_cases": self.coordinator.data.total_cases, + "num_dead": self.coordinator.data.num_dead, + "num_recovered": self.coordinator.data.num_recovered, + "num_infected": self.coordinator.data.num_infected, + "incidence": self.coordinator.data.incidence, + } + + async def async_added_to_hass(self): + """When entity is added to hass.""" + self.coordinator.async_add_listener(self.async_write_ha_state) + + async def async_will_remove_from_hass(self): + """When entity will be removed from hass.""" + self.coordinator.async_remove_listener(self.async_write_ha_state)