Compare commits
32 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a44308a4e1 | ||
|
|
dd5bb2916b | ||
|
|
67bb1e49ef | ||
|
|
aaea39657e | ||
|
|
81c19b552d | ||
|
|
0d609ade9a | ||
|
|
2b453f4b5e | ||
|
|
62904f4c09 | ||
|
|
35d5232d8e | ||
|
|
1c3b0ae0b5 | ||
|
|
d2e8f77725 | ||
|
|
92e99e03ef | ||
|
|
903a512f99 | ||
|
|
f385ee3a5a | ||
|
|
573f91e2f9 | ||
|
|
025a6475dd | ||
|
|
216775e68f | ||
|
|
403efb937b | ||
|
|
559a463140 | ||
|
|
2f73be9010 | ||
|
|
b6184be32f | ||
|
|
70fa0619d4 | ||
|
|
f83bb077c1 | ||
|
|
8a97e92458 | ||
|
|
68c879583a | ||
|
|
5eeb0c5eae | ||
|
|
3fa66f0289 | ||
|
|
79ec497614 | ||
|
|
767392a3bb | ||
|
|
cb1b4ecc08 | ||
|
|
7ef44355ab | ||
|
|
12762f5027 |
62
.github/workflows/python-app.yml
vendored
Normal file
62
.github/workflows/python-app.yml
vendored
Normal file
@@ -0,0 +1,62 @@
|
||||
# This workflow will install Python dependencies, run tests and lint with a single version of Python
|
||||
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
|
||||
|
||||
name: Python application
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Cache
|
||||
uses: actions/cache@v2.1.6
|
||||
with:
|
||||
# A list of files, directories, and wildcard patterns to cache and restore
|
||||
path: .venv
|
||||
# An explicit key for restoring and saving the cache
|
||||
key: venv-cache-${{hashFiles('**/poetry.lock')}}
|
||||
restore-keys: |
|
||||
venv-cache-${{hashFiles('**/poetry.lock')}}
|
||||
venv-cache-
|
||||
- name: Python Poetry Action
|
||||
# You may pin to the exact commit or the version.
|
||||
# uses: abatilo/actions-poetry@8284d202bc272a8d0597e26e1c0b4a0d0c73db93
|
||||
uses: abatilo/actions-poetry@v2.1.0
|
||||
with:
|
||||
# The version of poetry to install
|
||||
poetry-version: 1.1.7
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
poetry config virtualenvs.in-project true
|
||||
poetry install
|
||||
- name: Lint with flake8
|
||||
run: |
|
||||
# stop the build if there are Python syntax errors or undefined names
|
||||
poetry run flake8 . --count --exclude .git,.venv --select=E9,F63,F7,F82 --show-source --statistics
|
||||
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
|
||||
poetry run flake8 . --count --exclude .git,.venv --exit-zero --max-complexity=10 --max-line-length=127 --statistics
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
poetry run pytest
|
||||
|
||||
validate:
|
||||
runs-on: "ubuntu-latest"
|
||||
steps:
|
||||
- uses: "actions/checkout@v2"
|
||||
- name: HACS validation
|
||||
uses: "hacs/action@main"
|
||||
with:
|
||||
category: "integration"
|
||||
ignore: brands wheels
|
||||
24
README.md
24
README.md
@@ -2,7 +2,8 @@
|
||||
|
||||
## Adding to your dashboard
|
||||
|
||||
You can add an overview of the current infection numbers to your dashboard using the [multiple-entity-row](https://github.com/benct/lovelace-multiple-entity-row) card:
|
||||
You can add an overview of the current infection and vaccination numbers to your dashboard
|
||||
using the [multiple-entity-row](https://github.com/benct/lovelace-multiple-entity-row) card:
|
||||
|
||||
```yaml
|
||||
type: entities
|
||||
@@ -24,4 +25,25 @@ entities:
|
||||
secondary_info:
|
||||
attribute: incidence
|
||||
unit: cases/100k
|
||||
- type: custom:multiple-entity-row
|
||||
entity: sensor.covid_19_vaccinations_augsburg
|
||||
entities:
|
||||
- attribute: ratio_vaccinated_once
|
||||
name: Once
|
||||
format: precision1
|
||||
unit: '%'
|
||||
- attribute: ratio_vaccinated_full
|
||||
name: Fully
|
||||
format: precision1
|
||||
unit: '%'
|
||||
- attribute: ratio_vaccinated_total
|
||||
name: Total
|
||||
format: precision1
|
||||
unit: '%'
|
||||
show_state: false
|
||||
icon: mdi:needle
|
||||
name: COVID-19 Vaccinations
|
||||
secondary_info:
|
||||
attribute: date
|
||||
format: date
|
||||
```
|
||||
|
||||
@@ -12,7 +12,7 @@ if TYPE_CHECKING:
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import DOMAIN
|
||||
from .crawler import CovidCrawler, IncidenceData
|
||||
from .crawler import CovidCrawler
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -67,9 +67,12 @@ async def get_coordinator(hass: HomeAssistant):
|
||||
if DOMAIN in hass.data:
|
||||
return hass.data[DOMAIN]
|
||||
|
||||
async def async_get_data() -> IncidenceData:
|
||||
async def async_get_data() -> dict:
|
||||
crawler = CovidCrawler(hass)
|
||||
return await crawler.crawl()
|
||||
return {
|
||||
"incidence": await crawler.crawl_incidence(),
|
||||
"vaccination": await crawler.crawl_vaccination(),
|
||||
}
|
||||
|
||||
hass.data[DOMAIN] = DataUpdateCoordinator(
|
||||
hass,
|
||||
|
||||
@@ -15,6 +15,32 @@ def parse_num(s, t=int):
|
||||
return 0
|
||||
|
||||
|
||||
def parse_date(
|
||||
day: int, month: str, year=datetime.datetime.now().year
|
||||
) -> datetime.date:
|
||||
"""Parse a German medium-form date, e.g. 17. August into a datetime.date"""
|
||||
months = [
|
||||
"Januar",
|
||||
"Februar",
|
||||
"März",
|
||||
"April",
|
||||
"Mai",
|
||||
"Juni",
|
||||
"Juli",
|
||||
"August",
|
||||
"September",
|
||||
"Oktober",
|
||||
"November",
|
||||
"Dezember",
|
||||
]
|
||||
date = datetime.date(
|
||||
year=int(year),
|
||||
month=1 + months.index(month),
|
||||
day=parse_num(day),
|
||||
)
|
||||
return date
|
||||
|
||||
|
||||
@dataclass
|
||||
class IncidenceData:
|
||||
location: str
|
||||
@@ -26,46 +52,68 @@ class IncidenceData:
|
||||
num_dead: int = 0
|
||||
|
||||
|
||||
@dataclass
|
||||
class VaccinationData:
|
||||
date: str
|
||||
|
||||
total_vaccinations: int = 0
|
||||
num_vaccinated_once: int = 0
|
||||
num_vaccinated_full: int = 0
|
||||
num_vaccinated_booster: int = 0
|
||||
|
||||
ratio_vaccinated_once: float = 0.0
|
||||
ratio_vaccinated_full: float = 0.0
|
||||
ratio_vaccinated_total: float = 0.0
|
||||
ratio_vaccinated_booster: float = 0.0
|
||||
|
||||
|
||||
class CovidCrawlerBase(ABC):
|
||||
@abstractmethod
|
||||
def crawl(self) -> IncidenceData:
|
||||
def crawl_incidence(self) -> IncidenceData:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def crawl_vaccination(self) -> VaccinationData:
|
||||
pass
|
||||
|
||||
|
||||
class CovidCrawler(CovidCrawlerBase):
|
||||
def __init__(self, hass=None) -> None:
|
||||
self.url = (
|
||||
"https://www.augsburg.de/umwelt-soziales/gesundheit/coronavirus/fallzahlen"
|
||||
)
|
||||
self.hass = hass
|
||||
|
||||
async def crawl(self) -> IncidenceData:
|
||||
async def _fetch(self, url: str) -> str:
|
||||
"""Fetch a URL, using either the current Home Assistant instance or requests"""
|
||||
|
||||
if self.hass:
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
|
||||
result = await aiohttp_client.async_get_clientsession(self.hass).get(url)
|
||||
soup = BeautifulSoup(await result.text(), "html.parser")
|
||||
else:
|
||||
import requests
|
||||
|
||||
result = requests.get(url)
|
||||
result.raise_for_status()
|
||||
soup = BeautifulSoup(result.text, "html.parser")
|
||||
return soup
|
||||
|
||||
async def crawl_incidence(self) -> IncidenceData:
|
||||
"""
|
||||
Fetch COVID-19 infection data from the target website.
|
||||
"""
|
||||
|
||||
_log.info("Fetching COVID-19 data update")
|
||||
|
||||
if self.hass:
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
url = (
|
||||
"https://www.augsburg.de/umwelt-soziales/gesundheit/coronavirus/fallzahlen"
|
||||
)
|
||||
soup = await self._fetch(url)
|
||||
|
||||
result = await aiohttp_client.async_get_clientsession(self.hass).get(
|
||||
self.url
|
||||
)
|
||||
soup = BeautifulSoup(await result.text(), "html.parser")
|
||||
else:
|
||||
import requests
|
||||
|
||||
result = requests.get(self.url)
|
||||
if not result.ok:
|
||||
result.raise_for_status()
|
||||
soup = BeautifulSoup(result.text, "html.parser")
|
||||
|
||||
match = soup.find(class_="frame--type-textpic")
|
||||
text = match.p.text
|
||||
match = soup.find(id="c1067628")
|
||||
text = match.text.strip()
|
||||
_log.debug(f"Infection data text: {text}")
|
||||
|
||||
matches = re.search(r"(\d+,\d+)\sNeuinfektion", text)
|
||||
matches = re.search(r"(\d+(,\d+)?)\sNeuinfektion", text)
|
||||
if not matches:
|
||||
raise ValueError(
|
||||
f"Could not extract incidence from scraped web page, {text=}"
|
||||
@@ -74,38 +122,15 @@ class CovidCrawler(CovidCrawlerBase):
|
||||
incidence = parse_num(matches.group(1), t=float)
|
||||
_log.debug(f"Parsed incidence: {incidence}")
|
||||
|
||||
text = match.h2.text
|
||||
matches = re.search(r"\((\d+)\. (\w+).*\)", text)
|
||||
match = soup.find(id="c1052517")
|
||||
text = match.text.strip()
|
||||
matches = re.search(r"Stand: (\d+)\. (\w+) (\d{4})", text)
|
||||
if not matches:
|
||||
raise ValueError(f"Could not extract date from scraped web page, {text=}")
|
||||
|
||||
months = [
|
||||
"Januar",
|
||||
"Februar",
|
||||
"März",
|
||||
"April",
|
||||
"Mai",
|
||||
"Juni",
|
||||
"Juli",
|
||||
"August",
|
||||
"September",
|
||||
"Oktober",
|
||||
"November",
|
||||
"Dezember",
|
||||
]
|
||||
day = parse_num(matches.group(1))
|
||||
month_name = matches.group(2)
|
||||
date = datetime.date(
|
||||
year=datetime.datetime.now().year,
|
||||
month=1 + months.index(month_name),
|
||||
day=day,
|
||||
)
|
||||
date = parse_date(matches.group(1), matches.group(2), matches.group(3))
|
||||
_log.debug(f"Parsed date: {date}")
|
||||
|
||||
match = match.find_next_sibling(class_="frame--type-textpic")
|
||||
text = match.text
|
||||
_log.debug(f"Infection counts text: {text}")
|
||||
|
||||
regexes = [
|
||||
r"Insgesamt: (?P<total_cases>[0-9.]+)",
|
||||
r"genesen: (?P<num_recovered>[0-9.]+)",
|
||||
@@ -130,3 +155,56 @@ class CovidCrawler(CovidCrawlerBase):
|
||||
_log.debug(f"Result data: {result}")
|
||||
|
||||
return result
|
||||
|
||||
async def crawl_vaccination(self) -> VaccinationData:
|
||||
_log.info("Fetching COVID-19 vaccination data update")
|
||||
url = (
|
||||
"https://www.augsburg.de/umwelt-soziales/gesundheit/coronavirus/impfzentrum"
|
||||
)
|
||||
soup = await self._fetch(url)
|
||||
|
||||
container_id = "c1088140"
|
||||
result = soup.find(id=container_id)
|
||||
text = re.sub(r"\s+", " ", result.text)
|
||||
regexes = [
|
||||
r"(?P<total_vaccinations>\d+([.]\d+)?) Personen in Augsburg mindestens",
|
||||
r"(?P<num_vaccinated_full>\d+([.]\d+)?) Personen sind mindestens zweimal geimpft",
|
||||
r"(?P<num_vaccinated_booster>\d+([.]\d+)?) Personen haben eine Auffrischungsimpfung",
|
||||
]
|
||||
values = {}
|
||||
for r in regexes:
|
||||
matches = re.search(r, text)
|
||||
if not matches:
|
||||
continue
|
||||
values.update(
|
||||
{
|
||||
k: parse_num(v.replace(".", ""))
|
||||
for k, v in matches.groupdict().items()
|
||||
}
|
||||
)
|
||||
|
||||
matches = re.search(r"Stand (?P<day>\d+)\. (?P<month>\w+) (?P<year>\d+)", text)
|
||||
if not matches:
|
||||
raise ValueError(f"Could not extract date from scraped web page, {text=}")
|
||||
|
||||
values["num_vaccinated_once"] = values["total_vaccinations"] - (
|
||||
values["num_vaccinated_full"] + values["num_vaccinated_booster"]
|
||||
)
|
||||
|
||||
values["date"] = parse_date(**matches.groupdict()).strftime("%Y-%m-%d")
|
||||
result = VaccinationData(**values)
|
||||
|
||||
# Total population in Augsburg as listed on the crawled page
|
||||
population = 298014
|
||||
|
||||
result.ratio_vaccinated_full = result.num_vaccinated_full / population * 100
|
||||
result.ratio_vaccinated_once = result.num_vaccinated_once / population * 100
|
||||
result.ratio_vaccinated_total = (
|
||||
result.ratio_vaccinated_once + result.ratio_vaccinated_full
|
||||
)
|
||||
result.ratio_vaccinated_booster = (
|
||||
result.num_vaccinated_booster / population * 100
|
||||
)
|
||||
_log.debug(f"Result data: {result}")
|
||||
|
||||
return result
|
||||
|
||||
@@ -3,7 +3,10 @@ from .crawler import CovidCrawler
|
||||
|
||||
async def main():
|
||||
crawler = CovidCrawler()
|
||||
result = await crawler.crawl()
|
||||
result = await crawler.crawl_incidence()
|
||||
print(result)
|
||||
|
||||
result = await crawler.crawl_vaccination()
|
||||
print(result)
|
||||
|
||||
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
{
|
||||
"domain": "covid19_augsburg",
|
||||
"name": "COVID-19 Augsburg",
|
||||
"version": "0.1.0",
|
||||
"version": "1.2.0",
|
||||
"config_flow": true,
|
||||
"documentation": "https://github.com/AdrianoKF/home-assistant-covid19-augsburg",
|
||||
"issue_tracker": "https://github.com/AdrianoKF/home-assistant-covid19-augsburg/issues",
|
||||
"requirements": ["beautifulsoup4==4.8.2"],
|
||||
"dependencies": [],
|
||||
"codeowners": ["@AdrianoKF"]
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from dataclasses import asdict
|
||||
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
from . import get_coordinator
|
||||
@@ -7,7 +9,12 @@ async def async_setup_entry(hass, _, async_add_entities):
|
||||
"""Defer sensor setup to the shared sensor module."""
|
||||
coordinator = await get_coordinator(hass)
|
||||
|
||||
async_add_entities([CoronaAugsburgSensor(coordinator)])
|
||||
async_add_entities(
|
||||
[
|
||||
CoronaAugsburgSensor(coordinator),
|
||||
CoronaAugsburgVaccinationSensor(coordinator),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class CoronaAugsburgSensor(Entity):
|
||||
@@ -41,18 +48,59 @@ class CoronaAugsburgSensor(Entity):
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
return self.coordinator.data.incidence
|
||||
return self.coordinator.data["incidence"].incidence
|
||||
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
return {
|
||||
"date": self.coordinator.data.date,
|
||||
"incidence": self.coordinator.data.incidence,
|
||||
"total_cases": self.coordinator.data.total_cases,
|
||||
"num_dead": self.coordinator.data.num_dead,
|
||||
"num_recovered": self.coordinator.data.num_recovered,
|
||||
"num_infected": self.coordinator.data.num_infected,
|
||||
}
|
||||
data = self.coordinator.data["incidence"]
|
||||
return asdict(data)
|
||||
|
||||
async def async_added_to_hass(self):
|
||||
"""When entity is added to hass."""
|
||||
self.coordinator.async_add_listener(self.async_write_ha_state)
|
||||
|
||||
async def async_will_remove_from_hass(self):
|
||||
"""When entity will be removed from hass."""
|
||||
self.coordinator.async_remove_listener(self.async_write_ha_state)
|
||||
|
||||
|
||||
class CoronaAugsburgVaccinationSensor(Entity):
|
||||
"""Representation of vaccination data for the city of Augsburg"""
|
||||
|
||||
def __init__(self, coordinator):
|
||||
"""Initialize sensor."""
|
||||
self.coordinator = coordinator
|
||||
self._name = "COVID-19 Vaccinations Augsburg"
|
||||
self._state = None
|
||||
|
||||
@property
|
||||
def available(self):
|
||||
return self.coordinator.last_update_success and self.coordinator.data
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def unique_id(self):
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def icon(self):
|
||||
return "mdi:needle"
|
||||
|
||||
@property
|
||||
def unit_of_measurement(self):
|
||||
return ""
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
return self.coordinator.data["vaccination"].total_vaccinations
|
||||
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
data = self.coordinator.data["vaccination"]
|
||||
return asdict(data)
|
||||
|
||||
async def async_added_to_hass(self):
|
||||
"""When entity is added to hass."""
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
[tool.poetry]
|
||||
name = "git add re"
|
||||
name = "home_assistant_covid19_augsburg"
|
||||
version = "0.1.0"
|
||||
description = ""
|
||||
authors = ["Adrian Rumpold <a.rumpold@gmail.com>"]
|
||||
|
||||
8
tests/test_example.py
Normal file
8
tests/test_example.py
Normal file
@@ -0,0 +1,8 @@
|
||||
"""Placeholder test suite to Pytest doesn't exit with error code
|
||||
|
||||
TODO: Remove once other tests have been added.
|
||||
"""
|
||||
|
||||
|
||||
def test_example():
|
||||
assert True
|
||||
Reference in New Issue
Block a user