10 Commits

Author SHA1 Message Date
Adrian Rumpold
f385ee3a5a fix: Return vaccination percentage instead of ratio 2021-08-11 10:07:00 +02:00
Adrian Rumpold
573f91e2f9 chore: Bump manifest version 2021-08-11 09:47:20 +02:00
Adrian Rumpold
025a6475dd fix: Actually add vaccination entity to integratoin 2021-08-11 08:52:56 +02:00
Adrian Rumpold
216775e68f fix: Fix and disable some HACS validations 2021-08-11 08:39:56 +02:00
Adrian Rumpold
403efb937b feat(CI): Add HACS validation step to Github Actions 2021-08-11 08:32:51 +02:00
Adrian Rumpold
559a463140 feat: Add new entity for vaccination data to Home Asssistant integration 2021-08-11 08:28:24 +02:00
Adrian Rumpold
2f73be9010 chore: Refactor duplicated HTTP fetching code 2021-08-11 08:12:48 +02:00
Adrian Rumpold
b6184be32f fix: Enable commented out code 2021-08-10 20:11:23 +02:00
Adrian Rumpold
70fa0619d4 fix: Simply HTTP error handling 2021-08-10 20:11:14 +02:00
Adrian Rumpold
f83bb077c1 feat: Crawling and parsing of vaccination data
See #2
2021-08-10 20:06:41 +02:00
7 changed files with 191 additions and 56 deletions

View File

@@ -50,3 +50,13 @@ jobs:
- name: Test with pytest - name: Test with pytest
run: | run: |
poetry run pytest poetry run pytest
validate:
runs-on: "ubuntu-latest"
steps:
- uses: "actions/checkout@v2"
- name: HACS validation
uses: "hacs/action@main"
with:
category: "integration"
ignore: brands wheels

View File

@@ -12,7 +12,7 @@ if TYPE_CHECKING:
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from .const import DOMAIN from .const import DOMAIN
from .crawler import CovidCrawler, IncidenceData from .crawler import CovidCrawler
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@@ -67,9 +67,12 @@ async def get_coordinator(hass: HomeAssistant):
if DOMAIN in hass.data: if DOMAIN in hass.data:
return hass.data[DOMAIN] return hass.data[DOMAIN]
async def async_get_data() -> IncidenceData: async def async_get_data() -> dict:
crawler = CovidCrawler(hass) crawler = CovidCrawler(hass)
return await crawler.crawl() return {
"incidence": await crawler.crawl_incidence(),
"vaccination": await crawler.crawl_vaccination(),
}
hass.data[DOMAIN] = DataUpdateCoordinator( hass.data[DOMAIN] = DataUpdateCoordinator(
hass, hass,

View File

@@ -15,6 +15,32 @@ def parse_num(s, t=int):
return 0 return 0
def parse_date(
day: int, month: str, year=datetime.datetime.now().year
) -> datetime.date:
"""Parse a German medium-form date, e.g. 17. August into a datetime.date"""
months = [
"Januar",
"Februar",
"März",
"April",
"Mai",
"Juni",
"Juli",
"August",
"September",
"Oktober",
"November",
"Dezember",
]
date = datetime.date(
year=int(year),
month=1 + months.index(month),
day=parse_num(day),
)
return date
@dataclass @dataclass
class IncidenceData: class IncidenceData:
location: str location: str
@@ -26,40 +52,59 @@ class IncidenceData:
num_dead: int = 0 num_dead: int = 0
@dataclass
class VaccinationData:
date: str
total_vaccinations: int = 0
num_vaccinated_once: int = 0
num_vaccinated_full: int = 0
ratio_vaccinated_once: float = 0.0
ratio_vaccinated_full: float = 0.0
class CovidCrawlerBase(ABC): class CovidCrawlerBase(ABC):
@abstractmethod @abstractmethod
def crawl(self) -> IncidenceData: def crawl_incidence(self) -> IncidenceData:
pass
@abstractmethod
def crawl_vaccination(self) -> VaccinationData:
pass pass
class CovidCrawler(CovidCrawlerBase): class CovidCrawler(CovidCrawlerBase):
def __init__(self, hass=None) -> None: def __init__(self, hass=None) -> None:
self.url = (
"https://www.augsburg.de/umwelt-soziales/gesundheit/coronavirus/fallzahlen"
)
self.hass = hass self.hass = hass
async def crawl(self) -> IncidenceData: async def _fetch(self, url: str) -> str:
"""Fetch a URL, using either the current Home Assistant instance or requests"""
if self.hass:
from homeassistant.helpers import aiohttp_client
result = await aiohttp_client.async_get_clientsession(self.hass).get(url)
soup = BeautifulSoup(await result.text(), "html.parser")
else:
import requests
result = requests.get(url)
result.raise_for_status()
soup = BeautifulSoup(result.text, "html.parser")
return soup
async def crawl_incidence(self) -> IncidenceData:
""" """
Fetch COVID-19 infection data from the target website. Fetch COVID-19 infection data from the target website.
""" """
_log.info("Fetching COVID-19 data update") _log.info("Fetching COVID-19 data update")
if self.hass: url = (
from homeassistant.helpers import aiohttp_client "https://www.augsburg.de/umwelt-soziales/gesundheit/coronavirus/fallzahlen"
)
result = await aiohttp_client.async_get_clientsession(self.hass).get( soup = await self._fetch(url)
self.url
)
soup = BeautifulSoup(await result.text(), "html.parser")
else:
import requests
result = requests.get(self.url)
if not result.ok:
result.raise_for_status()
soup = BeautifulSoup(result.text, "html.parser")
match = soup.find(class_="frame--type-textpic") match = soup.find(class_="frame--type-textpic")
text = match.p.text text = match.p.text
@@ -79,27 +124,7 @@ class CovidCrawler(CovidCrawlerBase):
if not matches: if not matches:
raise ValueError(f"Could not extract date from scraped web page, {text=}") raise ValueError(f"Could not extract date from scraped web page, {text=}")
months = [ date = parse_date(matches.group(1), matches.group(2))
"Januar",
"Februar",
"März",
"April",
"Mai",
"Juni",
"Juli",
"August",
"September",
"Oktober",
"November",
"Dezember",
]
day = parse_num(matches.group(1))
month_name = matches.group(2)
date = datetime.date(
year=datetime.datetime.now().year,
month=1 + months.index(month_name),
day=day,
)
_log.debug(f"Parsed date: {date}") _log.debug(f"Parsed date: {date}")
match = match.find_next_sibling(class_="frame--type-textpic") match = match.find_next_sibling(class_="frame--type-textpic")
@@ -130,3 +155,47 @@ class CovidCrawler(CovidCrawlerBase):
_log.debug(f"Result data: {result}") _log.debug(f"Result data: {result}")
return result return result
async def crawl_vaccination(self) -> VaccinationData:
_log.info("Fetching COVID-19 vaccination data update")
url = (
"https://www.augsburg.de/umwelt-soziales/gesundheit/coronavirus/impfzentrum"
)
soup = await self._fetch(url)
container_id = "c1088140"
result = soup.find(id=container_id)
text = re.sub(r"\s+", " ", result.text)
regexes = [
r"(?P<total_vaccinations>\d+[.]\d+) Impfdosen",
r"Weitere (?P<num_vaccinated_once>\d+[.]\d+) Personen haben die Erstimpfung erhalten",
r"(?P<num_vaccinated_full>\d+[.]\d+) Personen sind bereits vollständig geimpft",
]
values = {}
for r in regexes:
matches = re.search(r, text)
if not matches:
continue
values.update(
{
k: parse_num(v.replace(".", ""))
for k, v in matches.groupdict().items()
}
)
matches = re.search(r"Stand (?P<day>\d+)\. (?P<month>\w+) (?P<year>\d+)", text)
if not matches:
raise ValueError(f"Could not extract date from scraped web page, {text=}")
values["date"] = parse_date(**matches.groupdict()).strftime("%Y-%m-%d")
result = VaccinationData(**values)
# Total population in Augsburg as of 2020
# https://www.augsburg.de/fileadmin/user_upload/buergerservice_rathaus/rathaus/statisiken_und_geodaten/statistiken/Monitoring/Demografiemonitoring_der_Stadt_Augsburg_2021.pdf
population = 299021
result.ratio_vaccinated_full = result.num_vaccinated_full / population * 100
result.ratio_vaccinated_once = result.num_vaccinated_once / population * 100
_log.debug(f"Result data: {result}")
return result

View File

@@ -3,7 +3,10 @@ from .crawler import CovidCrawler
async def main(): async def main():
crawler = CovidCrawler() crawler = CovidCrawler()
result = await crawler.crawl() result = await crawler.crawl_incidence()
print(result)
result = await crawler.crawl_vaccination()
print(result) print(result)

View File

@@ -1,9 +1,10 @@
{ {
"domain": "covid19_augsburg", "domain": "covid19_augsburg",
"name": "COVID-19 Augsburg", "name": "COVID-19 Augsburg",
"version": "0.1.0", "version": "1.1.0",
"config_flow": true, "config_flow": true,
"documentation": "https://github.com/AdrianoKF/home-assistant-covid19-augsburg", "documentation": "https://github.com/AdrianoKF/home-assistant-covid19-augsburg",
"issue_tracker": "https://github.com/AdrianoKF/home-assistant-covid19-augsburg/issues",
"requirements": ["beautifulsoup4==4.8.2"], "requirements": ["beautifulsoup4==4.8.2"],
"dependencies": [], "dependencies": [],
"codeowners": ["@AdrianoKF"] "codeowners": ["@AdrianoKF"]

View File

@@ -1,3 +1,5 @@
from dataclasses import asdict
from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity import Entity
from . import get_coordinator from . import get_coordinator
@@ -7,7 +9,12 @@ async def async_setup_entry(hass, _, async_add_entities):
"""Defer sensor setup to the shared sensor module.""" """Defer sensor setup to the shared sensor module."""
coordinator = await get_coordinator(hass) coordinator = await get_coordinator(hass)
async_add_entities([CoronaAugsburgSensor(coordinator)]) async_add_entities(
[
CoronaAugsburgSensor(coordinator),
CoronaAugsburgVaccinationSensor(coordinator),
]
)
class CoronaAugsburgSensor(Entity): class CoronaAugsburgSensor(Entity):
@@ -41,18 +48,59 @@ class CoronaAugsburgSensor(Entity):
@property @property
def state(self): def state(self):
return self.coordinator.data.incidence return self.coordinator.data["incidence"].incidence
@property @property
def device_state_attributes(self): def device_state_attributes(self):
return { data = self.coordinator.data["incidence"]
"date": self.coordinator.data.date, return asdict(data)
"incidence": self.coordinator.data.incidence,
"total_cases": self.coordinator.data.total_cases, async def async_added_to_hass(self):
"num_dead": self.coordinator.data.num_dead, """When entity is added to hass."""
"num_recovered": self.coordinator.data.num_recovered, self.coordinator.async_add_listener(self.async_write_ha_state)
"num_infected": self.coordinator.data.num_infected,
} async def async_will_remove_from_hass(self):
"""When entity will be removed from hass."""
self.coordinator.async_remove_listener(self.async_write_ha_state)
class CoronaAugsburgVaccinationSensor(Entity):
"""Representation of vaccination data for the city of Augsburg"""
def __init__(self, coordinator):
"""Initialize sensor."""
self.coordinator = coordinator
self._name = "COVID-19 Vaccinations Augsburg"
self._state = None
@property
def available(self):
return self.coordinator.last_update_success and self.coordinator.data
@property
def name(self):
return self._name
@property
def unique_id(self):
return self._name
@property
def icon(self):
return "mdi:biohazard"
@property
def unit_of_measurement(self):
return ""
@property
def state(self):
return self.coordinator.data["vaccination"].total_vaccinations
@property
def device_state_attributes(self):
data = self.coordinator.data["vaccination"]
return asdict(data)
async def async_added_to_hass(self): async def async_added_to_hass(self):
"""When entity is added to hass.""" """When entity is added to hass."""

View File

@@ -3,5 +3,6 @@
TODO: Remove once other tests have been added. TODO: Remove once other tests have been added.
""" """
def test_example(): def test_example():
assert True assert True