Initial commit
This commit is contained in:
105
.gitignore
vendored
Normal file
105
.gitignore
vendored
Normal file
@@ -0,0 +1,105 @@
|
||||
#
|
||||
.idea
|
||||
.vscode
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
pip-wheel-metadata/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||
__pypackages__/
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# reports
|
||||
pylint.html
|
||||
5
README.md
Normal file
5
README.md
Normal file
@@ -0,0 +1,5 @@
|
||||
# Home Assistant Augsburg COVID-19 Tracker Integration
|
||||
|
||||
## Requirements
|
||||
|
||||
Your system needs to have the `de_DE.utf8` locale installed in order to parse the dates from the scraped web response.
|
||||
@@ -0,0 +1,80 @@
|
||||
"""The corona_hessen component."""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import re
|
||||
from datetime import timedelta
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .const import DOMAIN
|
||||
from .crawler import CovidCrawler, IncidenceData
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
__version__ = "0.1.0"
|
||||
|
||||
PLATFORMS = ["sensor"]
|
||||
|
||||
HYPHEN_PATTERN = re.compile(r"- (.)")
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: dict):
|
||||
"""Set up the Coronavirus Augsburg component."""
|
||||
# Make sure coordinator is initialized.
|
||||
coordinator = await get_coordinator(hass)
|
||||
|
||||
async def handle_refresh(call):
|
||||
_LOGGER.info("Refreshing Coronavirus Augsburg data...")
|
||||
await coordinator.async_refresh()
|
||||
|
||||
hass.services.async_register(DOMAIN, "refresh", handle_refresh)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
|
||||
"""Set up Coronavirus Augsburg from a config entry."""
|
||||
|
||||
for component in PLATFORMS:
|
||||
hass.async_create_task(
|
||||
hass.config_entries.async_forward_entry_setup(entry, component)
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
|
||||
"""Unload a config entry."""
|
||||
unload_ok = all(
|
||||
await asyncio.gather(
|
||||
*[
|
||||
hass.config_entries.async_forward_entry_unload(entry, cmp)
|
||||
for cmp in PLATFORMS
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
return unload_ok
|
||||
|
||||
|
||||
async def get_coordinator(hass):
|
||||
"""Get the data update coordinator."""
|
||||
if DOMAIN in hass.data:
|
||||
return hass.data[DOMAIN]
|
||||
|
||||
async def async_get_data() -> IncidenceData:
|
||||
crawler = CovidCrawler()
|
||||
return crawler.crawl()
|
||||
|
||||
hass.data[DOMAIN] = DataUpdateCoordinator(
|
||||
hass,
|
||||
logging.getLogger(__name__),
|
||||
name=DOMAIN,
|
||||
update_method=async_get_data,
|
||||
update_interval=timedelta(hours=6),
|
||||
)
|
||||
await hass.data[DOMAIN].async_refresh()
|
||||
return hass.data[DOMAIN]
|
||||
@@ -0,0 +1 @@
|
||||
DOMAIN = "covid19_augsburg"
|
||||
94
custom_components/home_assistant_covid19_augsburg/crawler.py
Normal file
94
custom_components/home_assistant_covid19_augsburg/crawler.py
Normal file
@@ -0,0 +1,94 @@
|
||||
import datetime
|
||||
import locale
|
||||
import logging
|
||||
import re
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
_log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class IncidenceData:
|
||||
location: str
|
||||
date: datetime.date
|
||||
incidence: float
|
||||
total_cases: int = 0
|
||||
num_infected: int = 0
|
||||
num_recovered: int = 0
|
||||
num_dead: int = 0
|
||||
|
||||
|
||||
class CovidCrawlerBase(ABC):
|
||||
@abstractmethod
|
||||
def crawl(self) -> IncidenceData:
|
||||
pass
|
||||
|
||||
|
||||
class CovidCrawler(CovidCrawlerBase):
|
||||
def __init__(self) -> None:
|
||||
self.url = (
|
||||
"https://www.augsburg.de/umwelt-soziales/gesundheit/coronavirus/fallzahlen"
|
||||
)
|
||||
|
||||
def crawl(self) -> IncidenceData:
|
||||
"""
|
||||
Fetch COVID-19 infection data from the target website.
|
||||
"""
|
||||
|
||||
_log.info("Fetching COVID-19 data update")
|
||||
|
||||
locale.setlocale(locale.LC_ALL, "de_DE.utf8")
|
||||
|
||||
result = requests.get(self.url)
|
||||
if not result.ok:
|
||||
result.raise_for_status()
|
||||
|
||||
soup = BeautifulSoup(result.text, features="html.parser")
|
||||
|
||||
match = soup.find(class_="frame--type-textpic")
|
||||
text = match.p.text
|
||||
_log.debug(f"Infection data text: {text}")
|
||||
|
||||
matches = re.search(r"(\d+,\d+) Neuinfektion", text)
|
||||
if not matches:
|
||||
raise ValueError("Could not extract incidence from scraped web page")
|
||||
|
||||
incidence = locale.atof(matches.group(1))
|
||||
_log.debug(f"Parsed incidence: {incidence}")
|
||||
|
||||
text = match.h2.text
|
||||
matches = re.search(r"\((\d+\. \w+)\)", text)
|
||||
if not matches:
|
||||
raise ValueError("Could not extract date from scraped web page")
|
||||
|
||||
date = datetime.datetime.strptime(matches.group(1), "%d. %B")
|
||||
date = date.replace(year=datetime.datetime.now().year).date()
|
||||
_log.debug(f"Parsed date: {date}")
|
||||
|
||||
match = match.find_next_sibling(class_="frame--type-textpic")
|
||||
text = match.text
|
||||
_log.debug(f"Infection counts text: {text}")
|
||||
|
||||
regexes = [
|
||||
r"Insgesamt: (?P<total_cases>[0-9.]+)",
|
||||
r"genesen: (?P<num_recovered>[0-9.]+)",
|
||||
r"infiziert: (?P<num_infected>[0-9.]+)",
|
||||
r"verstorben: (?P<num_dead>[0-9.]+)",
|
||||
]
|
||||
cases = {}
|
||||
for r in regexes:
|
||||
matches = re.search(r, text)
|
||||
if not matches:
|
||||
continue
|
||||
cases.update(
|
||||
{k: int(v.replace(".", "")) for k, v in matches.groupdict().items()}
|
||||
)
|
||||
|
||||
result = IncidenceData("Augsburg", incidence, date, **cases)
|
||||
_log.debug(f"Result data: {result}")
|
||||
|
||||
return result
|
||||
11
custom_components/home_assistant_covid19_augsburg/main.py
Normal file
11
custom_components/home_assistant_covid19_augsburg/main.py
Normal file
@@ -0,0 +1,11 @@
|
||||
from home_assistant_covid19_augsburg.crawler import CovidCrawler
|
||||
|
||||
|
||||
def main():
|
||||
crawler = CovidCrawler()
|
||||
result = crawler.crawl()
|
||||
print(result)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"domain": "covid19_augsburg",
|
||||
"name": "COVID-19 Augsburg",
|
||||
"version": "0.1.0",
|
||||
"config_flow": true,
|
||||
"documentation": "https://github.com/AdrianoKF/home-assistant-covid19-augsburg",
|
||||
"requirements": ["beautifulsoup4==4.8.2", "requests==2.25.1"],
|
||||
"dependencies": [],
|
||||
"codeowners": ["@AdrianoKF"]
|
||||
}
|
||||
@@ -0,0 +1,2 @@
|
||||
refresh:
|
||||
description: Refreshes data from web
|
||||
1024
poetry.lock
generated
Normal file
1024
poetry.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
31
pyproject.toml
Normal file
31
pyproject.toml
Normal file
@@ -0,0 +1,31 @@
|
||||
[tool.poetry]
|
||||
name = "git add re"
|
||||
version = "0.1.0"
|
||||
description = ""
|
||||
authors = ["Adrian Rumpold <a.rumpold@gmail.com>"]
|
||||
packages = [
|
||||
{ include = "home_assistant_covid19_augsburg", from = "custom_components" }
|
||||
]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.9"
|
||||
beautifulsoup4 = "^4.9.3"
|
||||
requests = "^2.25.1"
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
pytest = "^5.2"
|
||||
flake8 = "^3.9.2"
|
||||
homeassistant = "^2021.6.5"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core>=1.0.0"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.black]
|
||||
line-length = 88
|
||||
target-version = ['py39']
|
||||
include = '\.pyi?$'
|
||||
|
||||
[tool.isort]
|
||||
profile = "black"
|
||||
multi_line_output = 3
|
||||
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
Reference in New Issue
Block a user