Compare commits
	
		
			27 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | 0d609ade9a | ||
|  | 2b453f4b5e | ||
|  | 62904f4c09 | ||
|  | 35d5232d8e | ||
|  | 1c3b0ae0b5 | ||
|  | d2e8f77725 | ||
|  | 92e99e03ef | ||
|  | 903a512f99 | ||
|  | f385ee3a5a | ||
|  | 573f91e2f9 | ||
|  | 025a6475dd | ||
|  | 216775e68f | ||
|  | 403efb937b | ||
|  | 559a463140 | ||
|  | 2f73be9010 | ||
|  | b6184be32f | ||
|  | 70fa0619d4 | ||
|  | f83bb077c1 | ||
|  | 8a97e92458 | ||
|  | 68c879583a | ||
|  | 5eeb0c5eae | ||
|  | 3fa66f0289 | ||
|  | 79ec497614 | ||
|  | 767392a3bb | ||
|  | cb1b4ecc08 | ||
|  | 7ef44355ab | ||
|  | 12762f5027 | 
							
								
								
									
										62
									
								
								.github/workflows/python-app.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										62
									
								
								.github/workflows/python-app.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,62 @@ | |||||||
|  | # This workflow will install Python dependencies, run tests and lint with a single version of Python | ||||||
|  | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions | ||||||
|  |  | ||||||
|  | name: Python application | ||||||
|  |  | ||||||
|  | on: | ||||||
|  |   push: | ||||||
|  |     branches: [ main ] | ||||||
|  |   pull_request: | ||||||
|  |     branches: [ main ] | ||||||
|  |  | ||||||
|  | jobs: | ||||||
|  |   build: | ||||||
|  |  | ||||||
|  |     runs-on: ubuntu-latest | ||||||
|  |  | ||||||
|  |     steps: | ||||||
|  |     - uses: actions/checkout@v2 | ||||||
|  |     - name: Set up Python 3.9 | ||||||
|  |       uses: actions/setup-python@v2 | ||||||
|  |       with: | ||||||
|  |         python-version: 3.9 | ||||||
|  |     - name: Cache | ||||||
|  |       uses: actions/cache@v2.1.6 | ||||||
|  |       with: | ||||||
|  |         # A list of files, directories, and wildcard patterns to cache and restore | ||||||
|  |         path: .venv | ||||||
|  |         # An explicit key for restoring and saving the cache | ||||||
|  |         key: venv-cache-${{hashFiles('**/poetry.lock')}} | ||||||
|  |         restore-keys: | | ||||||
|  |           venv-cache-${{hashFiles('**/poetry.lock')}} | ||||||
|  |           venv-cache- | ||||||
|  |     - name: Python Poetry Action | ||||||
|  |       # You may pin to the exact commit or the version. | ||||||
|  |       # uses: abatilo/actions-poetry@8284d202bc272a8d0597e26e1c0b4a0d0c73db93 | ||||||
|  |       uses: abatilo/actions-poetry@v2.1.0 | ||||||
|  |       with: | ||||||
|  |         # The version of poetry to install | ||||||
|  |         poetry-version: 1.1.7 | ||||||
|  |     - name: Install dependencies | ||||||
|  |       run: | | ||||||
|  |         poetry config virtualenvs.in-project true | ||||||
|  |         poetry install | ||||||
|  |     - name: Lint with flake8 | ||||||
|  |       run: | | ||||||
|  |         # stop the build if there are Python syntax errors or undefined names | ||||||
|  |         poetry run flake8 . --count --exclude .git,.venv --select=E9,F63,F7,F82 --show-source --statistics | ||||||
|  |         # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide | ||||||
|  |         poetry run flake8 . --count --exclude .git,.venv  --exit-zero --max-complexity=10 --max-line-length=127 --statistics | ||||||
|  |     - name: Test with pytest | ||||||
|  |       run: | | ||||||
|  |         poetry run pytest | ||||||
|  |  | ||||||
|  |   validate: | ||||||
|  |     runs-on: "ubuntu-latest" | ||||||
|  |     steps: | ||||||
|  |       - uses: "actions/checkout@v2" | ||||||
|  |       - name: HACS validation | ||||||
|  |         uses: "hacs/action@main" | ||||||
|  |         with: | ||||||
|  |           category: "integration" | ||||||
|  |           ignore: brands wheels | ||||||
							
								
								
									
										24
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										24
									
								
								README.md
									
									
									
									
									
								
							| @@ -2,7 +2,8 @@ | |||||||
|  |  | ||||||
| ## Adding to your dashboard | ## Adding to your dashboard | ||||||
|  |  | ||||||
| You can add an overview of the current infection numbers to your dashboard using the [multiple-entity-row](https://github.com/benct/lovelace-multiple-entity-row) card: | You can add an overview of the current infection and vaccination numbers to your dashboard | ||||||
|  | using the [multiple-entity-row](https://github.com/benct/lovelace-multiple-entity-row) card: | ||||||
|  |  | ||||||
| ```yaml | ```yaml | ||||||
| type: entities | type: entities | ||||||
| @@ -24,4 +25,25 @@ entities: | |||||||
|     secondary_info: |     secondary_info: | ||||||
|       attribute: incidence |       attribute: incidence | ||||||
|       unit: cases/100k |       unit: cases/100k | ||||||
|  |   - type: custom:multiple-entity-row | ||||||
|  |     entity: sensor.covid_19_vaccinations_augsburg | ||||||
|  |     entities: | ||||||
|  |       - attribute: ratio_vaccinated_once | ||||||
|  |         name: Once | ||||||
|  |         format: precision1 | ||||||
|  |         unit: '%' | ||||||
|  |       - attribute: ratio_vaccinated_full | ||||||
|  |         name: Fully | ||||||
|  |         format: precision1 | ||||||
|  |         unit: '%' | ||||||
|  |       - attribute: ratio_vaccinated_total | ||||||
|  |         name: Total | ||||||
|  |         format: precision1 | ||||||
|  |         unit: '%' | ||||||
|  |     show_state: false | ||||||
|  |     icon: mdi:needle | ||||||
|  |     name: COVID-19 Vaccinations | ||||||
|  |     secondary_info: | ||||||
|  |       attribute: date | ||||||
|  |       format: date | ||||||
| ``` | ``` | ||||||
|   | |||||||
| @@ -12,7 +12,7 @@ if TYPE_CHECKING: | |||||||
|     from homeassistant.core import HomeAssistant |     from homeassistant.core import HomeAssistant | ||||||
|  |  | ||||||
| from .const import DOMAIN | from .const import DOMAIN | ||||||
| from .crawler import CovidCrawler, IncidenceData | from .crawler import CovidCrawler | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) | _LOGGER = logging.getLogger(__name__) | ||||||
|  |  | ||||||
| @@ -67,9 +67,12 @@ async def get_coordinator(hass: HomeAssistant): | |||||||
|     if DOMAIN in hass.data: |     if DOMAIN in hass.data: | ||||||
|         return hass.data[DOMAIN] |         return hass.data[DOMAIN] | ||||||
|  |  | ||||||
|     async def async_get_data() -> IncidenceData: |     async def async_get_data() -> dict: | ||||||
|         crawler = CovidCrawler(hass) |         crawler = CovidCrawler(hass) | ||||||
|         return await crawler.crawl() |         return { | ||||||
|  |             "incidence": await crawler.crawl_incidence(), | ||||||
|  |             "vaccination": await crawler.crawl_vaccination(), | ||||||
|  |         } | ||||||
|  |  | ||||||
|     hass.data[DOMAIN] = DataUpdateCoordinator( |     hass.data[DOMAIN] = DataUpdateCoordinator( | ||||||
|         hass, |         hass, | ||||||
|   | |||||||
| @@ -15,70 +15,10 @@ def parse_num(s, t=int): | |||||||
|     return 0 |     return 0 | ||||||
|  |  | ||||||
|  |  | ||||||
| @dataclass | def parse_date( | ||||||
| class IncidenceData: |     day: int, month: str, year=datetime.datetime.now().year | ||||||
|     location: str | ) -> datetime.date: | ||||||
|     date: str |     """Parse a German medium-form date, e.g. 17. August into a datetime.date""" | ||||||
|     incidence: float |  | ||||||
|     total_cases: int = 0 |  | ||||||
|     num_infected: int = 0 |  | ||||||
|     num_recovered: int = 0 |  | ||||||
|     num_dead: int = 0 |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class CovidCrawlerBase(ABC): |  | ||||||
|     @abstractmethod |  | ||||||
|     def crawl(self) -> IncidenceData: |  | ||||||
|         pass |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class CovidCrawler(CovidCrawlerBase): |  | ||||||
|     def __init__(self, hass=None) -> None: |  | ||||||
|         self.url = ( |  | ||||||
|             "https://www.augsburg.de/umwelt-soziales/gesundheit/coronavirus/fallzahlen" |  | ||||||
|         ) |  | ||||||
|         self.hass = hass |  | ||||||
|  |  | ||||||
|     async def crawl(self) -> IncidenceData: |  | ||||||
|         """ |  | ||||||
|         Fetch COVID-19 infection data from the target website. |  | ||||||
|         """ |  | ||||||
|  |  | ||||||
|         _log.info("Fetching COVID-19 data update") |  | ||||||
|  |  | ||||||
|         if self.hass: |  | ||||||
|             from homeassistant.helpers import aiohttp_client |  | ||||||
|  |  | ||||||
|             result = await aiohttp_client.async_get_clientsession(self.hass).get( |  | ||||||
|                 self.url |  | ||||||
|             ) |  | ||||||
|             soup = BeautifulSoup(await result.text(), "html.parser") |  | ||||||
|         else: |  | ||||||
|             import requests |  | ||||||
|  |  | ||||||
|             result = requests.get(self.url) |  | ||||||
|             if not result.ok: |  | ||||||
|                 result.raise_for_status() |  | ||||||
|             soup = BeautifulSoup(result.text, "html.parser") |  | ||||||
|  |  | ||||||
|         match = soup.find(class_="frame--type-textpic") |  | ||||||
|         text = match.p.text |  | ||||||
|         _log.debug(f"Infection data text: {text}") |  | ||||||
|  |  | ||||||
|         matches = re.search(r"(\d+,\d+)\sNeuinfektion", text) |  | ||||||
|         if not matches: |  | ||||||
|             raise ValueError( |  | ||||||
|                 f"Could not extract incidence from scraped web page, {text=}" |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         incidence = parse_num(matches.group(1), t=float) |  | ||||||
|         _log.debug(f"Parsed incidence: {incidence}") |  | ||||||
|  |  | ||||||
|         text = match.h2.text |  | ||||||
|         matches = re.search(r"\((\d+)\. (\w+).*\)", text) |  | ||||||
|         if not matches: |  | ||||||
|             raise ValueError(f"Could not extract date from scraped web page, {text=}") |  | ||||||
|  |  | ||||||
|     months = [ |     months = [ | ||||||
|         "Januar", |         "Januar", | ||||||
|         "Februar", |         "Februar", | ||||||
| @@ -93,18 +33,101 @@ class CovidCrawler(CovidCrawlerBase): | |||||||
|         "November", |         "November", | ||||||
|         "Dezember", |         "Dezember", | ||||||
|     ] |     ] | ||||||
|         day = parse_num(matches.group(1)) |  | ||||||
|         month_name = matches.group(2) |  | ||||||
|     date = datetime.date( |     date = datetime.date( | ||||||
|             year=datetime.datetime.now().year, |         year=int(year), | ||||||
|             month=1 + months.index(month_name), |         month=1 + months.index(month), | ||||||
|             day=day, |         day=parse_num(day), | ||||||
|     ) |     ) | ||||||
|         _log.debug(f"Parsed date: {date}") |     return date | ||||||
|  |  | ||||||
|         match = match.find_next_sibling(class_="frame--type-textpic") |  | ||||||
|         text = match.text | @dataclass | ||||||
|         _log.debug(f"Infection counts text: {text}") | class IncidenceData: | ||||||
|  |     location: str | ||||||
|  |     date: str | ||||||
|  |     incidence: float | ||||||
|  |     total_cases: int = 0 | ||||||
|  |     num_infected: int = 0 | ||||||
|  |     num_recovered: int = 0 | ||||||
|  |     num_dead: int = 0 | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @dataclass | ||||||
|  | class VaccinationData: | ||||||
|  |     date: str | ||||||
|  |  | ||||||
|  |     total_vaccinations: int = 0 | ||||||
|  |     num_vaccinated_once: int = 0 | ||||||
|  |     num_vaccinated_full: int = 0 | ||||||
|  |  | ||||||
|  |     ratio_vaccinated_once: float = 0.0 | ||||||
|  |     ratio_vaccinated_full: float = 0.0 | ||||||
|  |     ratio_vaccinated_total: float = 0.0 | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class CovidCrawlerBase(ABC): | ||||||
|  |     @abstractmethod | ||||||
|  |     def crawl_incidence(self) -> IncidenceData: | ||||||
|  |         pass | ||||||
|  |  | ||||||
|  |     @abstractmethod | ||||||
|  |     def crawl_vaccination(self) -> VaccinationData: | ||||||
|  |         pass | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class CovidCrawler(CovidCrawlerBase): | ||||||
|  |     def __init__(self, hass=None) -> None: | ||||||
|  |         self.hass = hass | ||||||
|  |  | ||||||
|  |     async def _fetch(self, url: str) -> str: | ||||||
|  |         """Fetch a URL, using either the current Home Assistant instance or requests""" | ||||||
|  |  | ||||||
|  |         if self.hass: | ||||||
|  |             from homeassistant.helpers import aiohttp_client | ||||||
|  |  | ||||||
|  |             result = await aiohttp_client.async_get_clientsession(self.hass).get(url) | ||||||
|  |             soup = BeautifulSoup(await result.text(), "html.parser") | ||||||
|  |         else: | ||||||
|  |             import requests | ||||||
|  |  | ||||||
|  |             result = requests.get(url) | ||||||
|  |             result.raise_for_status() | ||||||
|  |             soup = BeautifulSoup(result.text, "html.parser") | ||||||
|  |         return soup | ||||||
|  |  | ||||||
|  |     async def crawl_incidence(self) -> IncidenceData: | ||||||
|  |         """ | ||||||
|  |         Fetch COVID-19 infection data from the target website. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         _log.info("Fetching COVID-19 data update") | ||||||
|  |  | ||||||
|  |         url = ( | ||||||
|  |             "https://www.augsburg.de/umwelt-soziales/gesundheit/coronavirus/fallzahlen" | ||||||
|  |         ) | ||||||
|  |         soup = await self._fetch(url) | ||||||
|  |  | ||||||
|  |         match = soup.find(id="c1067628") | ||||||
|  |         text = match.text.strip() | ||||||
|  |         _log.debug(f"Infection data text: {text}") | ||||||
|  |  | ||||||
|  |         matches = re.search(r"(\d+,\d+)\sNeuinfektion", text) | ||||||
|  |         if not matches: | ||||||
|  |             raise ValueError( | ||||||
|  |                 f"Could not extract incidence from scraped web page, {text=}" | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |         incidence = parse_num(matches.group(1), t=float) | ||||||
|  |         _log.debug(f"Parsed incidence: {incidence}") | ||||||
|  |  | ||||||
|  |         match = soup.find(id="c1052517") | ||||||
|  |         text = match.text.strip() | ||||||
|  |         matches = re.search(r"Stand: (\d+)\. (\w+) (\d{4})", text) | ||||||
|  |         if not matches: | ||||||
|  |             raise ValueError(f"Could not extract date from scraped web page, {text=}") | ||||||
|  |  | ||||||
|  |         date = parse_date(matches.group(1), matches.group(2), matches.group(3)) | ||||||
|  |         _log.debug(f"Parsed date: {date}") | ||||||
|  |  | ||||||
|         regexes = [ |         regexes = [ | ||||||
|             r"Insgesamt: (?P<total_cases>[0-9.]+)", |             r"Insgesamt: (?P<total_cases>[0-9.]+)", | ||||||
| @@ -130,3 +153,50 @@ class CovidCrawler(CovidCrawlerBase): | |||||||
|         _log.debug(f"Result data: {result}") |         _log.debug(f"Result data: {result}") | ||||||
|  |  | ||||||
|         return result |         return result | ||||||
|  |  | ||||||
|  |     async def crawl_vaccination(self) -> VaccinationData: | ||||||
|  |         _log.info("Fetching COVID-19 vaccination data update") | ||||||
|  |         url = ( | ||||||
|  |             "https://www.augsburg.de/umwelt-soziales/gesundheit/coronavirus/impfzentrum" | ||||||
|  |         ) | ||||||
|  |         soup = await self._fetch(url) | ||||||
|  |  | ||||||
|  |         container_id = "c1088140" | ||||||
|  |         result = soup.find(id=container_id) | ||||||
|  |         text = re.sub(r"\s+", " ", result.text) | ||||||
|  |         regexes = [ | ||||||
|  |             r"(?P<total_vaccinations>\d+[.]\d+) Impfdosen", | ||||||
|  |             r"Weitere (?P<num_vaccinated_once>\d+[.]\d+) Personen haben die Erstimpfung erhalten", | ||||||
|  |             r"(?P<num_vaccinated_full>\d+[.]\d+) Personen sind bereits vollständig geimpft", | ||||||
|  |         ] | ||||||
|  |         values = {} | ||||||
|  |         for r in regexes: | ||||||
|  |             matches = re.search(r, text) | ||||||
|  |             if not matches: | ||||||
|  |                 continue | ||||||
|  |             values.update( | ||||||
|  |                 { | ||||||
|  |                     k: parse_num(v.replace(".", "")) | ||||||
|  |                     for k, v in matches.groupdict().items() | ||||||
|  |                 } | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |         matches = re.search(r"Stand (?P<day>\d+)\. (?P<month>\w+) (?P<year>\d+)", text) | ||||||
|  |         if not matches: | ||||||
|  |             raise ValueError(f"Could not extract date from scraped web page, {text=}") | ||||||
|  |  | ||||||
|  |         values["date"] = parse_date(**matches.groupdict()).strftime("%Y-%m-%d") | ||||||
|  |         result = VaccinationData(**values) | ||||||
|  |  | ||||||
|  |         # Total population in Augsburg as of 2020 | ||||||
|  |         # https://www.augsburg.de/fileadmin/user_upload/buergerservice_rathaus/rathaus/statisiken_und_geodaten/statistiken/Monitoring/Demografiemonitoring_der_Stadt_Augsburg_2021.pdf | ||||||
|  |         population = 299021 | ||||||
|  |  | ||||||
|  |         result.ratio_vaccinated_full = result.num_vaccinated_full / population * 100 | ||||||
|  |         result.ratio_vaccinated_once = result.num_vaccinated_once / population * 100 | ||||||
|  |         result.ratio_vaccinated_total = ( | ||||||
|  |             result.ratio_vaccinated_once + result.ratio_vaccinated_full | ||||||
|  |         ) | ||||||
|  |         _log.debug(f"Result data: {result}") | ||||||
|  |  | ||||||
|  |         return result | ||||||
|   | |||||||
| @@ -3,7 +3,10 @@ from .crawler import CovidCrawler | |||||||
|  |  | ||||||
| async def main(): | async def main(): | ||||||
|     crawler = CovidCrawler() |     crawler = CovidCrawler() | ||||||
|     result = await crawler.crawl() |     result = await crawler.crawl_incidence() | ||||||
|  |     print(result) | ||||||
|  |  | ||||||
|  |     result = await crawler.crawl_vaccination() | ||||||
|     print(result) |     print(result) | ||||||
|  |  | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,9 +1,10 @@ | |||||||
| { | { | ||||||
|     "domain": "covid19_augsburg", |     "domain": "covid19_augsburg", | ||||||
|     "name": "COVID-19 Augsburg", |     "name": "COVID-19 Augsburg", | ||||||
|     "version": "0.1.0", |     "version": "1.1.2", | ||||||
|     "config_flow": true, |     "config_flow": true, | ||||||
|     "documentation": "https://github.com/AdrianoKF/home-assistant-covid19-augsburg", |     "documentation": "https://github.com/AdrianoKF/home-assistant-covid19-augsburg", | ||||||
|  |     "issue_tracker": "https://github.com/AdrianoKF/home-assistant-covid19-augsburg/issues", | ||||||
|     "requirements": ["beautifulsoup4==4.8.2"], |     "requirements": ["beautifulsoup4==4.8.2"], | ||||||
|     "dependencies": [], |     "dependencies": [], | ||||||
|     "codeowners": ["@AdrianoKF"] |     "codeowners": ["@AdrianoKF"] | ||||||
|   | |||||||
| @@ -1,3 +1,5 @@ | |||||||
|  | from dataclasses import asdict | ||||||
|  |  | ||||||
| from homeassistant.helpers.entity import Entity | from homeassistant.helpers.entity import Entity | ||||||
|  |  | ||||||
| from . import get_coordinator | from . import get_coordinator | ||||||
| @@ -7,7 +9,12 @@ async def async_setup_entry(hass, _, async_add_entities): | |||||||
|     """Defer sensor setup to the shared sensor module.""" |     """Defer sensor setup to the shared sensor module.""" | ||||||
|     coordinator = await get_coordinator(hass) |     coordinator = await get_coordinator(hass) | ||||||
|  |  | ||||||
|     async_add_entities([CoronaAugsburgSensor(coordinator)]) |     async_add_entities( | ||||||
|  |         [ | ||||||
|  |             CoronaAugsburgSensor(coordinator), | ||||||
|  |             CoronaAugsburgVaccinationSensor(coordinator), | ||||||
|  |         ] | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
| class CoronaAugsburgSensor(Entity): | class CoronaAugsburgSensor(Entity): | ||||||
| @@ -41,18 +48,59 @@ class CoronaAugsburgSensor(Entity): | |||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def state(self): |     def state(self): | ||||||
|         return self.coordinator.data.incidence |         return self.coordinator.data["incidence"].incidence | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def device_state_attributes(self): |     def device_state_attributes(self): | ||||||
|         return { |         data = self.coordinator.data["incidence"] | ||||||
|             "date": self.coordinator.data.date, |         return asdict(data) | ||||||
|             "incidence": self.coordinator.data.incidence, |  | ||||||
|             "total_cases": self.coordinator.data.total_cases, |     async def async_added_to_hass(self): | ||||||
|             "num_dead": self.coordinator.data.num_dead, |         """When entity is added to hass.""" | ||||||
|             "num_recovered": self.coordinator.data.num_recovered, |         self.coordinator.async_add_listener(self.async_write_ha_state) | ||||||
|             "num_infected": self.coordinator.data.num_infected, |  | ||||||
|         } |     async def async_will_remove_from_hass(self): | ||||||
|  |         """When entity will be removed from hass.""" | ||||||
|  |         self.coordinator.async_remove_listener(self.async_write_ha_state) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class CoronaAugsburgVaccinationSensor(Entity): | ||||||
|  |     """Representation of vaccination data for the city of Augsburg""" | ||||||
|  |  | ||||||
|  |     def __init__(self, coordinator): | ||||||
|  |         """Initialize sensor.""" | ||||||
|  |         self.coordinator = coordinator | ||||||
|  |         self._name = "COVID-19 Vaccinations Augsburg" | ||||||
|  |         self._state = None | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def available(self): | ||||||
|  |         return self.coordinator.last_update_success and self.coordinator.data | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def name(self): | ||||||
|  |         return self._name | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def unique_id(self): | ||||||
|  |         return self._name | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def icon(self): | ||||||
|  |         return "mdi:needle" | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def unit_of_measurement(self): | ||||||
|  |         return "" | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def state(self): | ||||||
|  |         return self.coordinator.data["vaccination"].total_vaccinations | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def device_state_attributes(self): | ||||||
|  |         data = self.coordinator.data["vaccination"] | ||||||
|  |         return asdict(data) | ||||||
|  |  | ||||||
|     async def async_added_to_hass(self): |     async def async_added_to_hass(self): | ||||||
|         """When entity is added to hass.""" |         """When entity is added to hass.""" | ||||||
|   | |||||||
| @@ -1,5 +1,5 @@ | |||||||
| [tool.poetry] | [tool.poetry] | ||||||
| name = "git add re" | name = "home_assistant_covid19_augsburg" | ||||||
| version = "0.1.0" | version = "0.1.0" | ||||||
| description = "" | description = "" | ||||||
| authors = ["Adrian Rumpold <a.rumpold@gmail.com>"] | authors = ["Adrian Rumpold <a.rumpold@gmail.com>"] | ||||||
|   | |||||||
							
								
								
									
										8
									
								
								tests/test_example.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										8
									
								
								tests/test_example.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,8 @@ | |||||||
|  | """Placeholder test suite to Pytest doesn't exit with error code | ||||||
|  |  | ||||||
|  | TODO: Remove once other tests have been added. | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_example(): | ||||||
|  |     assert True | ||||||
		Reference in New Issue
	
	Block a user