initial commit

This commit is contained in:
2022-12-20 21:26:47 +01:00
commit 2962a6db69
722 changed files with 63886 additions and 0 deletions

View File

View File

@@ -0,0 +1,88 @@
import logging
from datetime import timedelta
SENSOR_TYPES = {
"gft": ["GFT", "mdi:recycle"],
"kerstboom": ["Kerstboom", "mdi:recycle"],
"papier": ["Papier", "mdi:recycle"],
"pbd": ["PBD", "mdi:recycle"],
"restafval": ["Restafval", "mdi:recycle"],
"takken": ["Takken", "mdi:recycle"],
"textiel": ["Textiel", "mdi:recycle"],
"trash_type_today": ["Today", "mdi:recycle"],
"trash_type_tomorrow": ["Tomorrow", "mdi:recycle"],
}
SENSOR_LOCATIONS_TO_URL = {
"trashapi": [
"http://trashapi.azurewebsites.net/trash?Location={0}&ZipCode={1}&HouseNumber={2}&HouseNumberSuffix={3}&DiftarCode={4}"
]
}
MONTH_TO_NUMBER = {
"jan": "01",
"feb": "02",
"mrt": "03",
"apr": "04",
"mei": "05",
"jun": "06",
"jul": "07",
"aug": "08",
"sep": "09",
"okt": "10",
"nov": "11",
"dec": "12",
"januari": "01",
"februari": "02",
"maart": "03",
"april": "04",
"mei": "05",
"juni": "06",
"juli": "07",
"augustus": "08",
"september": "09",
"oktober": "10",
"november": "11",
"december": "12",
}
NUMBER_TO_MONTH = {
1: "januari",
2: "februari",
3: "maart",
4: "april",
5: "mei",
6: "juni",
7: "juli",
8: "augustus",
9: "september",
10: "oktober",
11: "november",
12: "december",
}
CONF_CITY = "city"
CONF_LOCATION = "location"
CONF_POSTCODE = "postcode"
CONF_STREET_NUMBER = "streetnumber"
CONF_STREET_NUMBER_SUFFIX = "streetnumbersuffix"
CONF_DATE_FORMAT = "dateformat"
CONF_TIMESPAN_IN_DAYS = "timespanindays"
CONF_LOCALE = "locale"
CONF_ID = "id"
CONF_NO_TRASH_TEXT = "notrashtext"
CONF_DIFTAR_CODE = "diftarcode"
SENSOR_PREFIX = "Afvalinfo "
ATTR_ERROR = "error"
ATTR_LAST_UPDATE = "last_update"
ATTR_HIDDEN = "hidden"
ATTR_IS_COLLECTION_DATE_TODAY = "is_collection_date_today"
ATTR_DAYS_UNTIL_COLLECTION_DATE = "days_until_collection_date"
ATTR_YEAR_MONTH_DAY_DATE = "year_month_day_date"
ATTR_FRIENDLY_NAME = "friendly_name"
ATTR_LAST_COLLECTION_DATE = "last_collection_date"
ATTR_TOTAL_COLLECTIONS_THIS_YEAR = "total_collections_this_year"
_LOGGER = logging.getLogger(__name__)
MIN_TIME_BETWEEN_UPDATES = timedelta(hours=2, minutes=30)

View File

@@ -0,0 +1,82 @@
from ..const.const import (
MONTH_TO_NUMBER,
SENSOR_LOCATIONS_TO_URL,
_LOGGER,
)
from datetime import date, datetime, timedelta
import urllib.request
import urllib.error
import requests
class TrashApiAfval(object):
def get_data(
self,
location,
postcode,
street_number,
street_number_suffix,
diftar_code,
resources,
):
_LOGGER.debug("Updating Waste collection dates")
try:
API_ENDPOINT = SENSOR_LOCATIONS_TO_URL["trashapi"][0].format(
location, postcode, street_number, street_number_suffix, diftar_code
)
r = requests.get(url=API_ENDPOINT)
dataList = r.json()
# Place all possible values in the dictionary even if they are not necessary
waste_dict = {}
# _LOGGER.warning(dataList)
for data in dataList:
# find gft.
if "gft" in resources and data["name"].lower() == "gft":
waste_dict["gft"] = data["date"].split("T")[0]
# find kerstboom.
if "kerstboom" in resources and data["name"].lower() == "kerstboom":
waste_dict["kerstboom"] = data["date"].split("T")[0]
# find papier
if "papier" in resources and data["name"].lower() == "papier":
waste_dict["papier"] = data["date"].split("T")[0]
# find pbd.
if "pbd" in resources and data["name"].lower() == "pbd":
waste_dict["pbd"] = data["date"].split("T")[0]
# find restafval.
if "restafval" in resources and data["name"].lower() == "restafval":
if (
date.today()
<= datetime.strptime(
data["date"].split("T")[0], "%Y-%m-%d"
).date()
):
waste_dict["restafval"] = data["date"].split("T")[0]
else:
waste_dict["restafvaldiftardate"] = data["date"].split("T")[0]
waste_dict["restafvaldiftarcollections"] = data["totalThisYear"]
# find takken
if "takken" in resources and data["name"].lower() == "takken":
waste_dict["takken"] = data["date"].split("T")[0]
# find textiel
if "textiel" in resources and data["name"].lower() == "textiel":
waste_dict["textiel"] = data["date"].split("T")[0]
return waste_dict
except urllib.error.URLError as exc:
_LOGGER.error("Error occurred while fetching data: %r", exc.reason)
return False
except Exception as exc:
_LOGGER.error(
"""Error occurred. Please check the address with postcode: %r and huisnummer: %r%r on the website of your local waste collector in the gemeente: %r. It's probably a faulty address or the website of the waste collector is unreachable. If the address is working on the website of the local waste collector and this error still occured, please report the issue in the Github repository https://github.com/heyajohnny/afvalinfo with details of the location that isn't working""",
postcode,
street_number,
street_number_suffix,
location,
)
return False

View File

@@ -0,0 +1,15 @@
{
"domain": "afvalinfo",
"name": "Afvalinfo",
"version": "1.0.9",
"documentation": "https://github.com/heyajohnny/afvalinfo",
"issue_tracker": "https://github.com/heyajohnny/afvalinfo/issues",
"dependencies": [],
"codeowners": [
"@heyajohnny"
],
"requirements": [
"Babel==2.8.0",
"python-dateutil==2.8.1"
]
}

View File

@@ -0,0 +1,372 @@
#!/usr/bin/env python3
"""
Sensor component for Afvalinfo
Author: Johnny Visser
"""
import voluptuous as vol
from datetime import datetime, date, timedelta
from dateutil.relativedelta import relativedelta
import urllib.error
from babel import Locale
from babel.dates import format_date, format_datetime, format_time
import re
from .const.const import (
MIN_TIME_BETWEEN_UPDATES,
_LOGGER,
CONF_CITY,
CONF_LOCATION,
CONF_POSTCODE,
CONF_STREET_NUMBER,
CONF_STREET_NUMBER_SUFFIX,
CONF_DATE_FORMAT,
CONF_TIMESPAN_IN_DAYS,
CONF_NO_TRASH_TEXT,
CONF_DIFTAR_CODE,
CONF_LOCALE,
CONF_ID,
SENSOR_PREFIX,
ATTR_ERROR,
ATTR_LAST_UPDATE,
ATTR_HIDDEN,
ATTR_DAYS_UNTIL_COLLECTION_DATE,
ATTR_IS_COLLECTION_DATE_TODAY,
ATTR_YEAR_MONTH_DAY_DATE,
ATTR_FRIENDLY_NAME,
ATTR_LAST_COLLECTION_DATE,
ATTR_TOTAL_COLLECTIONS_THIS_YEAR,
SENSOR_TYPES,
)
from .location.trashapi import TrashApiAfval
from .sensortomorrow import AfvalInfoTomorrowSensor
from .sensortoday import AfvalInfoTodaySensor
from homeassistant.components.sensor import PLATFORM_SCHEMA
import homeassistant.helpers.config_validation as cv
from homeassistant.const import CONF_RESOURCES
from homeassistant.util import Throttle
from homeassistant.helpers.entity import Entity
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_RESOURCES, default=[]): vol.All(cv.ensure_list),
vol.Optional(CONF_CITY, default=""): cv.string,
vol.Optional(CONF_LOCATION, default="sliedrecht"): cv.string,
vol.Required(CONF_POSTCODE, default="3361AB"): cv.string,
vol.Required(CONF_STREET_NUMBER, default="1"): cv.string,
vol.Optional(CONF_STREET_NUMBER_SUFFIX, default=""): cv.string,
vol.Optional(CONF_DATE_FORMAT, default="%d-%m-%Y"): cv.string,
vol.Optional(CONF_TIMESPAN_IN_DAYS, default="365"): cv.string,
vol.Optional(CONF_LOCALE, default="en"): cv.string,
vol.Optional(CONF_ID, default=""): cv.string,
vol.Optional(CONF_NO_TRASH_TEXT, default="none"): cv.string,
vol.Optional(CONF_DIFTAR_CODE, default=""): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
_LOGGER.debug("Setup Afvalinfo sensor")
location = config.get(CONF_CITY).lower().strip()
if len(location) == 0:
location = config.get(CONF_LOCATION).lower().strip()
postcode = config.get(CONF_POSTCODE).strip()
street_number = config.get(CONF_STREET_NUMBER)
street_number_suffix = config.get(CONF_STREET_NUMBER_SUFFIX)
date_format = config.get(CONF_DATE_FORMAT).strip()
timespan_in_days = config.get(CONF_TIMESPAN_IN_DAYS)
locale = config.get(CONF_LOCALE)
id_name = config.get(CONF_ID)
no_trash_text = config.get(CONF_NO_TRASH_TEXT)
diftar_code = config.get(CONF_DIFTAR_CODE)
try:
resources = config[CONF_RESOURCES].copy()
# filter the types from the dict if it's a dictionary
if isinstance(resources[0], dict):
resourcesMinusTodayAndTomorrow = [obj["type"] for obj in resources]
else:
resourcesMinusTodayAndTomorrow = resources
if "trash_type_today" in resourcesMinusTodayAndTomorrow:
resourcesMinusTodayAndTomorrow.remove("trash_type_today")
if "trash_type_tomorrow" in resourcesMinusTodayAndTomorrow:
resourcesMinusTodayAndTomorrow.remove("trash_type_tomorrow")
data = AfvalinfoData(
location,
postcode,
street_number,
street_number_suffix,
diftar_code,
resourcesMinusTodayAndTomorrow,
)
except urllib.error.HTTPError as error:
_LOGGER.error(error.reason)
return False
entities = []
for resource in config[CONF_RESOURCES]:
# old way, before 20220204
if type(resource) == str:
sensor_type = resource.lower()
sensor_friendly_name = sensor_type
# new way
else:
sensor_type = resource["type"].lower()
if "friendly_name" in resource.keys():
sensor_friendly_name = resource["friendly_name"]
else:
# If no friendly name is provided, use the sensor_type as friendly name
sensor_friendly_name = sensor_type
# if sensor_type not in SENSOR_TYPES:
if (
sensor_type.title().lower() != "trash_type_today"
and sensor_type.title().lower() != "trash_type_tomorrow"
):
entities.append(
AfvalinfoSensor(
data,
sensor_type,
sensor_friendly_name,
date_format,
timespan_in_days,
locale,
id_name,
)
)
# Add sensor -trash_type_today
if sensor_type.title().lower() == "trash_type_today":
today = AfvalInfoTodaySensor(
data,
sensor_type,
sensor_friendly_name,
entities,
id_name,
no_trash_text,
)
entities.append(today)
# Add sensor -trash_type_tomorrow
if sensor_type.title().lower() == "trash_type_tomorrow":
tomorrow = AfvalInfoTomorrowSensor(
data,
sensor_type,
sensor_friendly_name,
entities,
id_name,
no_trash_text,
)
entities.append(tomorrow)
add_entities(entities)
class AfvalinfoData(object):
def __init__(
self,
location,
postcode,
street_number,
street_number_suffix,
diftar_code,
resources,
):
self.data = None
self.location = location
self.postcode = postcode
self.street_number = street_number
self.street_number_suffix = street_number_suffix
self.diftar_code = diftar_code
self.resources = resources
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
_LOGGER.debug("Updating Waste collection dates")
self.data = TrashApiAfval().get_data(
self.location,
self.postcode,
self.street_number,
self.street_number_suffix,
self.diftar_code,
self.resources,
)
class AfvalinfoSensor(Entity):
def __init__(
self,
data,
sensor_type,
sensor_friendly_name,
date_format,
timespan_in_days,
locale,
id_name,
):
self.data = data
self.type = sensor_type
self.friendly_name = sensor_friendly_name
self.date_format = date_format
self.timespan_in_days = timespan_in_days
self.locale = locale
self._name = sensor_friendly_name
self.entity_id = "sensor." + (
(
SENSOR_PREFIX
+ (id_name + " " if len(id_name) > 0 else "")
+ sensor_friendly_name
)
.lower()
.replace(" ", "_")
)
self._attr_unique_id = (
SENSOR_PREFIX
+ (id_name + " " if len(id_name) > 0 else "")
+ sensor_friendly_name
)
self._icon = SENSOR_TYPES[sensor_type][1]
self._hidden = False
self._error = False
self._state = None
self._last_update = None
self._days_until_collection_date = None
self._is_collection_date_today = False
self._year_month_day_date = None
self._last_collection_date = None
self._total_collections_this_year = None
@property
def name(self):
return self._name
@property
def icon(self):
return self._icon
@property
def state(self):
return self._state
@property
def extra_state_attributes(self):
return {
ATTR_ERROR: self._error,
ATTR_FRIENDLY_NAME: self.friendly_name,
ATTR_YEAR_MONTH_DAY_DATE: self._year_month_day_date,
ATTR_LAST_UPDATE: self._last_update,
ATTR_HIDDEN: self._hidden,
ATTR_DAYS_UNTIL_COLLECTION_DATE: self._days_until_collection_date,
ATTR_IS_COLLECTION_DATE_TODAY: self._is_collection_date_today,
ATTR_LAST_COLLECTION_DATE: self._last_collection_date,
ATTR_TOTAL_COLLECTIONS_THIS_YEAR: self._total_collections_this_year,
}
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
self.data.update()
waste_data = self.data.data
self._error = False
try:
if waste_data:
if self.type in waste_data:
collection_date = datetime.strptime(
waste_data[self.type], "%Y-%m-%d"
).date()
# Date in date format "%Y-%m-%d"
self._year_month_day_date = str(collection_date)
if collection_date:
# Set the values of the sensor
self._last_update = datetime.today().strftime("%d-%m-%Y %H:%M")
# Is the collection date today?
self._is_collection_date_today = date.today() == collection_date
if (
self.type == "restafval"
and "restafvaldiftardate" in waste_data
):
self._last_collection_date = str(
datetime.strptime(
waste_data["restafvaldiftardate"], "%Y-%m-%d"
).date()
)
self._total_collections_this_year = waste_data[
"restafvaldiftarcollections"
]
# Days until collection date
delta = collection_date - date.today()
self._days_until_collection_date = delta.days
# Only show the value if the date is lesser than or equal to (today + timespan_in_days)
if collection_date <= date.today() + relativedelta(
days=int(self.timespan_in_days)
):
# if the date does not contain a named day or month, return the date as normal
if (
self.date_format.find("a") == -1
and self.date_format.find("A") == -1
and self.date_format.find("b") == -1
and self.date_format.find("B") == -1
):
self._state = collection_date.strftime(self.date_format)
# else convert the named values to the locale names
else:
edited_date_format = self.date_format.replace(
"%a", "EEE"
)
edited_date_format = edited_date_format.replace(
"%A", "EEEE"
)
edited_date_format = edited_date_format.replace(
"%b", "MMM"
)
edited_date_format = edited_date_format.replace(
"%B", "MMMM"
)
# half babel, half date string... something like EEEE 04-MMMM-2020
half_babel_half_date = collection_date.strftime(
edited_date_format
)
# replace the digits with qquoted digits 01 --> '01'
half_babel_half_date = re.sub(
r"(\d+)", r"'\1'", half_babel_half_date
)
# transform the EEE, EEEE etc... to a real locale date, with babel
locale_date = format_date(
collection_date,
half_babel_half_date,
locale=self.locale,
)
self._state = locale_date
else:
self._hidden = True
else:
raise ValueError()
else:
raise ValueError()
else:
raise ValueError()
except ValueError:
self._error = True
# self._state = None
# self._hidden = True
# self._days_until_collection_date = None
# self._year_month_day_date = None
# self._is_collection_date_today = False
# self._last_collection_date = None
# self._total_collections_this_year = None
self._last_update = datetime.today().strftime("%d-%m-%Y %H:%M")

View File

@@ -0,0 +1,85 @@
#!/usr/bin/env python3
from datetime import datetime, date, timedelta
from .const.const import (
_LOGGER,
ATTR_LAST_UPDATE,
ATTR_FRIENDLY_NAME,
ATTR_YEAR_MONTH_DAY_DATE,
SENSOR_TYPES,
SENSOR_PREFIX,
)
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
class AfvalInfoTodaySensor(Entity):
def __init__(
self, data, sensor_type, sensor_friendly_name, entities, id_name, no_trash_text
):
self.data = data
self.type = sensor_type
self.friendly_name = sensor_friendly_name
self._last_update = None
self._name = sensor_friendly_name
self.entity_id = "sensor." + (
(
SENSOR_PREFIX
+ (id_name + " " if len(id_name) > 0 else "")
+ sensor_friendly_name
)
.lower()
.replace(" ", "_")
)
self._attr_unique_id = (
SENSOR_PREFIX
+ (id_name + " " if len(id_name) > 0 else "")
+ sensor_friendly_name
)
self._no_trash_text = no_trash_text
self._state = None
self._icon = SENSOR_TYPES[sensor_type][1]
self._entities = entities
@property
def name(self):
return self._name
@property
def icon(self):
return self._icon
@property
def state(self):
return self._state
@property
def extra_state_attributes(self):
return {ATTR_LAST_UPDATE: self._last_update}
@Throttle(timedelta(minutes=1))
def update(self):
self.data.update()
self._last_update = datetime.today().strftime("%d-%m-%Y %H:%M")
# use a tempState to change the real state only on a change...
tempState = self._no_trash_text
numberOfMatches = 0
today = str(date.today().strftime("%Y-%m-%d"))
for entity in self._entities:
if entity.extra_state_attributes.get(ATTR_YEAR_MONTH_DAY_DATE) == today:
# reset tempState to empty string
if numberOfMatches == 0:
tempState = ""
numberOfMatches = numberOfMatches + 1
# add trash friendly name or if no friendly name is provided, trash type to string
tempState = (
(
tempState
+ ", "
+ entity.extra_state_attributes.get(ATTR_FRIENDLY_NAME)
)
).strip()
if tempState.startswith(", "):
tempState = tempState[2:]
# only change state if the new state is different than the last state
if tempState != self._state:
self._state = tempState

View File

@@ -0,0 +1,85 @@
#!/usr/bin/env python3
from datetime import datetime, date, timedelta
from .const.const import (
_LOGGER,
ATTR_LAST_UPDATE,
ATTR_FRIENDLY_NAME,
ATTR_YEAR_MONTH_DAY_DATE,
SENSOR_TYPES,
SENSOR_PREFIX,
)
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
class AfvalInfoTomorrowSensor(Entity):
def __init__(
self, data, sensor_type, sensor_friendly_name, entities, id_name, no_trash_text
):
self.data = data
self.type = sensor_type
self.friendly_name = sensor_friendly_name
self._last_update = None
self._name = sensor_friendly_name
self.entity_id = "sensor." + (
(
SENSOR_PREFIX
+ (id_name + " " if len(id_name) > 0 else "")
+ sensor_friendly_name
)
.lower()
.replace(" ", "_")
)
self._attr_unique_id = (
SENSOR_PREFIX
+ (id_name + " " if len(id_name) > 0 else "")
+ sensor_friendly_name
)
self._no_trash_text = no_trash_text
self._state = None
self._icon = SENSOR_TYPES[sensor_type][1]
self._entities = entities
@property
def name(self):
return self._name
@property
def icon(self):
return self._icon
@property
def state(self):
return self._state
@property
def extra_state_attributes(self):
return {ATTR_LAST_UPDATE: self._last_update}
@Throttle(timedelta(minutes=1))
def update(self):
self.data.update()
self._last_update = datetime.today().strftime("%d-%m-%Y %H:%M")
# use a tempState to change the real state only on a change...
tempState = self._no_trash_text
numberOfMatches = 0
tomorrow = str((date.today() + timedelta(days=1)).strftime("%Y-%m-%d"))
for entity in self._entities:
if entity.extra_state_attributes.get(ATTR_YEAR_MONTH_DAY_DATE) == tomorrow:
# reset tempState to empty string
if numberOfMatches == 0:
tempState = ""
numberOfMatches = numberOfMatches + 1
# add trash name to string
tempState = (
(
tempState
+ ", "
+ entity.extra_state_attributes.get(ATTR_FRIENDLY_NAME)
)
).strip()
if tempState.startswith(", "):
tempState = tempState[2:]
# only change state if the new state is different than the last state
if tempState != self._state:
self._state = tempState

View File

@@ -0,0 +1,57 @@
from datetime import datetime
import re
import requests
from ..common.main_functions import _waste_type_rename
from ..const.const import _LOGGER, SENSOR_COLLECTORS_DEAFVALAPP
def get_waste_data_raw(
provider,
postal_code,
street_number,
suffix,
):
if provider not in SENSOR_COLLECTORS_DEAFVALAPP.keys():
raise ValueError(f"Invalid provider: {provider}, please verify")
corrected_postal_code_parts = re.search(r"(\d\d\d\d) ?([A-z][A-z])", postal_code)
corrected_postal_code = (
corrected_postal_code_parts[1] + corrected_postal_code_parts[2].upper()
)
try:
url = SENSOR_COLLECTORS_DEAFVALAPP[provider].format(
corrected_postal_code,
street_number,
suffix,
)
raw_response = requests.get(url)
except requests.exceptions.RequestException as err:
raise ValueError(err) from err
try:
response = raw_response.text
except ValueError as e:
raise ValueError(f"Invalid and/or no data received from {url}") from e
if not response:
_LOGGER.error("No waste data found!")
return
waste_data_raw = []
for rows in response.strip().split("\n"):
for ophaaldatum in rows.split(";")[1:-1]:
temp = {"type": _waste_type_rename(rows.split(";")[0].strip().lower())}
temp["date"] = datetime.strptime(ophaaldatum, "%d-%m-%Y").strftime(
"%Y-%m-%d"
)
waste_data_raw.append(temp)
return waste_data_raw
if __name__ == "__main__":
print("Yell something at a mountain!")

View File

@@ -0,0 +1,72 @@
from datetime import datetime
import re
import requests
from ..common.main_functions import _waste_type_rename
from ..const.const import _LOGGER, SENSOR_COLLECTORS_ICALENDAR
def get_waste_data_raw(
provider,
postal_code,
street_number,
suffix,
): # sourcery skip: avoid-builtin-shadow
if provider not in SENSOR_COLLECTORS_ICALENDAR.keys():
raise ValueError(f"Invalid provider: {provider}, please verify")
DATE_PATTERN = re.compile(r"^\d{8}")
try:
url = SENSOR_COLLECTORS_ICALENDAR[provider].format(
provider,
postal_code,
street_number,
suffix,
datetime.now().strftime("%Y-%m-%d"),
)
raw_response = requests.get(url)
except requests.exceptions.RequestException as err:
raise ValueError(err) from err
try:
response = raw_response.text
except ValueError as exc:
raise ValueError(f"Invalid and/or no data received from {url}") from exc
if not response:
_LOGGER.error("No waste data found!")
return
waste_data_raw = []
date = None
type = None
for line in response.splitlines():
key, value = line.split(":", 2)
field = key.split(";")[0]
if field == "BEGIN" and value == "VEVENT":
date = None
type = None
elif field == "SUMMARY":
type = value.strip().lower()
elif field == "DTSTART":
if DATE_PATTERN.match(value):
date = f"{value[:4]}-{value[4:6]}-{value[6:8]}"
else:
_LOGGER.debug(f"Unsupported date format: {value}")
elif field == "END" and value == "VEVENT":
if date and type:
waste_data_raw.append({"type": type, "date": date})
else:
_LOGGER.debug(
f"No date or type extracted from event: date={date}, type={type}"
)
return waste_data_raw
if __name__ == "__main__":
print("Yell something at a mountain!")

View File

@@ -0,0 +1,122 @@
from ..common.waste_data_transformer import WasteDataTransformer
from ..const.const import (
_LOGGER,
SENSOR_COLLECTORS_AFVALWIJZER,
SENSOR_COLLECTORS_DEAFVALAPP,
SENSOR_COLLECTORS_ICALENDAR,
SENSOR_COLLECTORS_OPZET,
SENSOR_COLLECTORS_RD4,
SENSOR_COLLECTORS_XIMMIO,
)
try:
from . import deafvalapp, icalendar, mijnafvalwijzer, opzet, rd4, ximmio
except ImportError as err:
_LOGGER.error(f"Import error {err.args}")
class MainCollector(object):
def __init__(
self,
provider,
postal_code,
street_number,
suffix,
exclude_pickup_today,
exclude_list,
default_label,
):
self.provider = provider.strip().lower()
self.postal_code = postal_code.strip().upper()
self.street_number = street_number.strip()
self.suffix = suffix.strip().lower()
self.exclude_pickup_today = exclude_pickup_today.strip()
self.exclude_list = exclude_list.strip().lower()
self.default_label = default_label.strip()
try:
if provider in SENSOR_COLLECTORS_AFVALWIJZER:
waste_data_raw = mijnafvalwijzer.get_waste_data_raw(
self.provider,
self.postal_code,
self.street_number,
self.suffix,
)
elif provider in SENSOR_COLLECTORS_DEAFVALAPP.keys():
waste_data_raw = deafvalapp.get_waste_data_raw(
self.provider,
self.postal_code,
self.street_number,
self.suffix,
)
elif provider in SENSOR_COLLECTORS_ICALENDAR.keys():
waste_data_raw = icalendar.get_waste_data_raw(
self.provider,
self.postal_code,
self.street_number,
self.suffix,
)
elif provider in SENSOR_COLLECTORS_OPZET.keys():
waste_data_raw = opzet.get_waste_data_raw(
self.provider,
self.postal_code,
self.street_number,
self.suffix,
)
elif provider in SENSOR_COLLECTORS_RD4.keys():
waste_data_raw = rd4.get_waste_data_raw(
self.provider,
self.postal_code,
self.street_number,
self.suffix,
)
elif provider in SENSOR_COLLECTORS_XIMMIO.keys():
waste_data_raw = ximmio.get_waste_data_raw(
self.provider,
self.postal_code,
self.street_number,
self.suffix,
)
else:
_LOGGER.error(f"Unknown provider: {provider}")
return False
except ValueError as err:
_LOGGER.error(f"Check afvalwijzer platform settings {err.args}")
##########################################################################
# COMMON CODE
##########################################################################
self._waste_data = WasteDataTransformer(
waste_data_raw,
self.exclude_pickup_today,
self.exclude_list,
self.default_label,
)
##########################################################################
# PROPERTIES FOR EXECUTION
##########################################################################
@property
def waste_data_with_today(self):
return self._waste_data.waste_data_with_today
@property
def waste_data_without_today(self):
return self._waste_data.waste_data_without_today
@property
def waste_data_provider(self):
return self._waste_data.waste_data_provider
@property
def waste_types_provider(self):
return self._waste_data.waste_types_provider
@property
def waste_data_custom(self):
return self._waste_data.waste_data_custom
@property
def waste_types_custom(self):
return self._waste_data.waste_types_custom

View File

@@ -0,0 +1,58 @@
from datetime import datetime
import requests
from ..common.main_functions import _waste_type_rename
from ..const.const import (
_LOGGER,
SENSOR_COLLECTOR_TO_URL,
SENSOR_COLLECTORS_AFVALWIJZER,
)
def get_waste_data_raw(
provider,
postal_code,
street_number,
suffix,
):
if provider not in SENSOR_COLLECTORS_AFVALWIJZER:
raise ValueError(f"Invalid provider: {provider}, please verify")
if provider == "rova":
provider = "inzamelkalender.rova"
try:
url = SENSOR_COLLECTOR_TO_URL["afvalwijzer_data_default"][0].format(
provider,
postal_code,
street_number,
suffix,
datetime.now().strftime("%Y-%m-%d"),
)
raw_response = requests.get(url)
except requests.exceptions.RequestException as err:
raise ValueError(err) from err
try:
response = raw_response.json()
except ValueError as e:
raise ValueError(f"Invalid and/or no data received from {url}") from e
if not response:
_LOGGER.error("Address not found!")
return
try:
waste_data_raw = (
response["ophaaldagen"]["data"] + response["ophaaldagenNext"]["data"]
)
except KeyError as exc:
raise KeyError(f"Invalid and/or no data received from {url}") from exc
return waste_data_raw
if __name__ == "__main__":
print("Yell something at a mountain!")

View File

@@ -0,0 +1,69 @@
from datetime import datetime
import requests
from ..common.main_functions import _waste_type_rename
from ..const.const import _LOGGER, SENSOR_COLLECTORS_OPZET
def get_waste_data_raw(
provider,
postal_code,
street_number,
suffix,
):
if provider not in SENSOR_COLLECTORS_OPZET.keys():
raise ValueError(f"Invalid provider: {provider}, please verify")
try:
bag_id = None
_verify = provider != "suez"
url = f"{SENSOR_COLLECTORS_OPZET[provider]}/rest/adressen/{postal_code}-{street_number}"
raw_response = requests.get(url, verify=_verify)
except requests.exceptions.RequestException as err:
raise ValueError(err) from err
try:
response = raw_response.json()
except ValueError as e:
raise ValueError(f"Invalid and/or no data received from {url}") from e
if not response:
_LOGGER.error("No waste data found!")
return
try:
if len(response) > 1 and suffix:
for item in response:
if (
item["huisletter"] == suffix
or item["huisnummerToevoeging"] == suffix
):
bag_id = item["bagId"]
break
else:
bag_id = response[0]["bagId"]
url = f"{SENSOR_COLLECTORS_OPZET[provider]}/rest/adressen/{bag_id}/afvalstromen"
waste_data_raw_temp = requests.get(url, verify=_verify).json()
waste_data_raw = []
for item in waste_data_raw_temp:
if not item["ophaaldatum"]:
continue
waste_type = item["menu_title"]
if not waste_type:
continue
temp = {"type": _waste_type_rename(item["menu_title"].strip().lower())}
temp["date"] = datetime.strptime(item["ophaaldatum"], "%Y-%m-%d").strftime(
"%Y-%m-%d"
)
waste_data_raw.append(temp)
except ValueError as exc:
raise ValueError(f"Invalid and/or no data received from {url}") from exc
return waste_data_raw
if __name__ == "__main__":
print("Yell something at a mountain!")

View File

@@ -0,0 +1,74 @@
from datetime import datetime
import re
import requests
from ..common.main_functions import _waste_type_rename
from ..const.const import _LOGGER, SENSOR_COLLECTORS_RD4
def get_waste_data_raw(
provider,
postal_code,
street_number,
suffix,
):
if provider not in SENSOR_COLLECTORS_RD4.keys():
raise ValueError(f"Invalid provider: {provider}, please verify")
TODAY = datetime.now()
YEAR_CURRENT = TODAY.year
corrected_postal_code_parts = re.search(r"(\d\d\d\d) ?([A-z][A-z])", postal_code)
corrected_postal_code = (
f"{corrected_postal_code_parts[1]}+{corrected_postal_code_parts[2].upper()}"
)
try:
url = SENSOR_COLLECTORS_RD4[provider].format(
corrected_postal_code,
street_number,
suffix,
YEAR_CURRENT,
)
raw_response = requests.get(url)
except requests.exceptions.RequestException as err:
raise ValueError(err) from err
try:
response = raw_response.json()
except ValueError as e:
raise ValueError(f"Invalid and/or no data received from {url}") from e
if not response:
_LOGGER.error("No waste data found!")
return
if not response["success"]:
_LOGGER.error("Address not found!")
return
try:
waste_data_raw_temp = response["data"]["items"][0]
except KeyError as exc:
raise KeyError(f"Invalid and/or no data received from {url}") from exc
waste_data_raw = []
for item in waste_data_raw_temp:
if not item["date"]:
continue
waste_type = item["type"]
if not waste_type:
continue
temp = {"type": _waste_type_rename(item["type"].strip().lower())}
temp["date"] = datetime.strptime(item["date"], "%Y-%m-%d").strftime("%Y-%m-%d")
waste_data_raw.append(temp)
return waste_data_raw
if __name__ == "__main__":
print("Yell something at a mountain!")

View File

@@ -0,0 +1,83 @@
from datetime import datetime, timedelta
import requests
from ..common.main_functions import _waste_type_rename
from ..const.const import _LOGGER, SENSOR_COLLECTOR_TO_URL, SENSOR_COLLECTORS_XIMMIO
def get_waste_data_raw(
provider,
postal_code,
street_number,
suffix,
):
if provider not in SENSOR_COLLECTORS_XIMMIO.keys():
raise ValueError(f"Invalid provider: {provider}, please verify")
collectors = ("avalex", "meerlanden", "rad", "westland")
provider_url = "ximmio02" if provider in collectors else "ximmio01"
TODAY = datetime.now().strftime("%d-%m-%Y")
DATE_TODAY = datetime.strptime(TODAY, "%d-%m-%Y")
DATE_TOMORROW = datetime.strptime(TODAY, "%d-%m-%Y") + timedelta(days=1)
DATE_TODAY_NEXT_YEAR = (DATE_TODAY.date() + timedelta(days=365)).strftime(
"%Y-%m-%d"
)
##########################################################################
# First request: get uniqueId and community
##########################################################################
try:
url = SENSOR_COLLECTOR_TO_URL[provider_url][0]
companyCode = SENSOR_COLLECTORS_XIMMIO[provider]
data = {
"postCode": postal_code,
"houseNumber": street_number,
"companyCode": companyCode,
}
raw_response = requests.post(url=url, data=data)
uniqueId = raw_response.json()["dataList"][0]["UniqueId"]
community = raw_response.json()["dataList"][0]["Community"]
except requests.exceptions.RequestException as err:
raise ValueError(err) from err
##########################################################################
# Second request: get the dates
##########################################################################
try:
url = SENSOR_COLLECTOR_TO_URL[provider_url][1]
data = {
"companyCode": companyCode,
"startDate": DATE_TODAY.date(),
"endDate": DATE_TODAY_NEXT_YEAR,
"community": community,
"uniqueAddressID": uniqueId,
}
raw_response = requests.post(url=url, data=data).json()
except requests.exceptions.RequestException as err:
raise ValueError(err) from err
if not raw_response:
_LOGGER.error("Address not found!")
return
try:
response = raw_response["dataList"]
except KeyError as e:
raise KeyError(f"Invalid and/or no data received from {url}") from e
waste_data_raw = []
for item in response:
temp = {"type": _waste_type_rename(item["_pickupTypeText"].strip().lower())}
temp["date"] = datetime.strptime(
sorted(item["pickupDates"])[0], "%Y-%m-%dT%H:%M:%S"
).strftime("%Y-%m-%d")
waste_data_raw.append(temp)
return waste_data_raw
if __name__ == "__main__":
print("Yell something at a mountain!")

View File

@@ -0,0 +1,66 @@
from datetime import datetime, timedelta
from ..const.const import _LOGGER
class DaySensorData(object):
##########################################################################
# INIT
##########################################################################
def __init__(
self,
waste_data_formatted,
default_label,
):
TODAY = datetime.now().strftime("%d-%m-%Y")
self.waste_data_formatted = sorted(
waste_data_formatted, key=lambda d: d["date"]
)
self.today_date = datetime.strptime(TODAY, "%d-%m-%Y")
self.tomorrow_date = datetime.strptime(TODAY, "%d-%m-%Y") + timedelta(days=1)
self.day_after_tomorrow_date = datetime.strptime(TODAY, "%d-%m-%Y") + timedelta(
days=2
)
self.default_label = default_label
self.waste_data_today = self.__gen_day_sensor(self.today_date)
self.waste_data_tomorrow = self.__gen_day_sensor(self.tomorrow_date)
self.waste_data_dot = self.__gen_day_sensor(self.day_after_tomorrow_date)
self.data = self._gen_day_sensor_data()
##########################################################################
# GENERATE TODAY, TOMORROW, DOT SENSOR(S)
##########################################################################
# Generate sensor data per date
def __gen_day_sensor(self, date):
day = []
try:
for waste in self.waste_data_formatted:
item_date = waste["date"]
if item_date == date:
item_name = waste["type"]
day.append(item_name)
if not day:
day.append(self.default_label)
except Exception as err:
_LOGGER.error(f"Other error occurred __gen_day_sensor: {err}")
return day
# Generate sensor data for today, tomorrow, day after tomorrow
def _gen_day_sensor_data(self):
day_sensor = {}
try:
day_sensor["today"] = ", ".join(self.waste_data_today)
day_sensor["tomorrow"] = ", ".join(self.waste_data_tomorrow)
day_sensor["day_after_tomorrow"] = ", ".join(self.waste_data_dot)
except Exception as err:
_LOGGER.error(f"Other error occurred _gen_day_sensor_data: {err}")
return day_sensor
@property
def day_sensor_data(self):
return self.data

View File

@@ -0,0 +1,80 @@
def _waste_type_rename(item_name):
# DEAFVALAPP
if item_name == "gemengde plastics":
item_name = "plastic"
if item_name == "zak_blauw":
item_name = "restafval"
if item_name == "pbp":
item_name = "pmd"
if item_name == "rest":
item_name = "restafval"
if item_name == "kerstboom":
item_name = "kerstbomen"
# OPZET
if item_name == "snoeiafval":
item_name = "takken"
if item_name == "sloop":
item_name = "grofvuil"
if item_name == "groente":
item_name = "gft"
if item_name == "groente-, fruit en tuinafval":
item_name = "gft"
if item_name == "groente, fruit- en tuinafval":
item_name = "gft"
if item_name == "kca":
item_name = "chemisch"
if item_name == "tariefzak restafval":
item_name = "restafvalzakken"
if item_name == "restafvalzakken":
item_name = "restafvalzakken"
if item_name == "rest":
item_name = "restafval"
if item_name == "plastic, blik & drinkpakken overbetuwe":
item_name = "pmd"
if item_name == "papier en karton":
item_name = "papier"
if item_name == "kerstb":
item_name = "kerstboom"
# RD4
if item_name == "pruning":
item_name = "takken"
if item_name == "residual_waste":
item_name = "restafval"
if item_name == "best_bag":
item_name = "best-tas"
if item_name == "paper":
item_name = "papier"
if item_name == "christmas_trees":
item_name = "kerstbomen"
# XIMMIO
if item_name == "branches":
item_name = "takken"
if item_name == "bulklitter":
item_name = "grofvuil"
if item_name == "bulkygardenwaste":
item_name = "tuinafval"
if item_name == "glass":
item_name = "glas"
if item_name == "green":
item_name = "gft"
if item_name == "grey":
item_name = "restafval"
if item_name == "kca":
item_name = "chemisch"
if item_name == "plastic":
item_name = "plastic"
if item_name == "packages":
item_name = "pmd"
if item_name == "paper":
item_name = "papier"
if item_name == "remainder":
item_name = "restwagen"
if item_name == "textile":
item_name = "textiel"
if item_name == "tree":
item_name = "kerstbomen"
return item_name
if __name__ == "__main__":
print("Yell something at a mountain!")

View File

@@ -0,0 +1,76 @@
from datetime import datetime
from ..const.const import _LOGGER
class NextSensorData(object):
##########################################################################
# INIT
##########################################################################
def __init__(self, waste_data_after_date_selected, default_label):
self.waste_data_after_date_selected = sorted(
waste_data_after_date_selected, key=lambda d: d["date"]
)
TODAY = datetime.now().strftime("%d-%m-%Y")
self.today_date = datetime.strptime(TODAY, "%d-%m-%Y")
self.default_label = default_label
self.next_waste_date = self.__get_next_waste_date()
self.next_waste_in_days = self.__get_next_waste_in_days()
self.next_waste_type = self.__get_next_waste_type()
self.data = self._gen_next_sensor_data()
##########################################################################
# GENERATE NEXT SENSOR(S)
##########################################################################
# Generate sensor next_waste_date
def __get_next_waste_date(self):
next_waste_date = self.default_label
try:
next_waste_date = self.waste_data_after_date_selected[0]["date"]
except Exception as err:
_LOGGER.error(f"Other error occurred _get_next_waste_date: {err}")
return next_waste_date
# Generate sensor next_waste_in_days
def __get_next_waste_in_days(self):
next_waste_in_days = self.default_label
try:
next_waste_in_days = abs(self.today_date - self.next_waste_date).days # type: ignore
except Exception as err:
_LOGGER.error(f"Other error occurred _get_next_waste_in_days: {err}")
return next_waste_in_days
# Generate sensor next_waste_type
def __get_next_waste_type(self):
next_waste_type = []
try:
for waste in self.waste_data_after_date_selected:
item_date = waste["date"]
if item_date == self.next_waste_date:
item_name = waste["type"]
next_waste_type.append(item_name)
if not next_waste_type:
next_waste_type.append(self.default_label)
except Exception as err:
_LOGGER.error(f"Other error occurred _get_next_waste_type: {err}")
return next_waste_type
# Generate sensor data for custom sensors
def _gen_next_sensor_data(self):
next_sensor = {}
try:
next_sensor["next_date"] = self.next_waste_date
next_sensor["next_in_days"] = self.next_waste_in_days
next_sensor["next_type"] = ", ".join(self.next_waste_type)
except Exception as err:
_LOGGER.error(f"Other error occurred _gen_next_sensor_data: {err}")
return next_sensor
@property
def next_sensor_data(self):
return self.data

View File

@@ -0,0 +1,181 @@
from datetime import datetime, timedelta
from ..common.day_sensor_data import DaySensorData
from ..common.next_sensor_data import NextSensorData
from ..const.const import _LOGGER
# import sys
# def excepthook(type, value, traceback):
# _LOGGER.error(value)
# sys.excepthook = excepthook
class WasteDataTransformer(object):
##########################################################################
# INIT
##########################################################################
def __init__(
self,
waste_data_raw,
exclude_pickup_today,
exclude_list,
default_label,
):
self.waste_data_raw = waste_data_raw
self.exclude_pickup_today = exclude_pickup_today
self.exclude_list = exclude_list.strip().lower()
self.default_label = default_label
TODAY = datetime.now().strftime("%d-%m-%Y")
self.DATE_TODAY = datetime.strptime(TODAY, "%d-%m-%Y")
self.DATE_TOMORROW = datetime.strptime(TODAY, "%d-%m-%Y") + timedelta(days=1)
(
self._waste_data_with_today,
self._waste_data_without_today,
) = self.__structure_waste_data() # type: ignore
(
self._waste_data_provider,
self._waste_types_provider,
self._waste_data_custom,
self._waste_types_custom,
) = self.__gen_sensor_waste_data()
##########################################################################
# STRUCTURE ALL WASTE DATA IN CUSTOM FORMAT
#########################################################################
def __structure_waste_data(self):
try:
waste_data_with_today = {}
waste_data_without_today = {}
for item in self.waste_data_raw:
item_date = datetime.strptime(item["date"], "%Y-%m-%d")
item_name = item["type"].strip().lower()
if (
item_name not in self.exclude_list
and item_name not in waste_data_with_today
and item_date >= self.DATE_TODAY
):
waste_data_with_today[item_name] = item_date
for item in self.waste_data_raw:
item_date = datetime.strptime(item["date"], "%Y-%m-%d")
item_name = item["type"].strip().lower()
if (
item_name not in self.exclude_list
and item_name not in waste_data_without_today
and item_date > self.DATE_TODAY
):
waste_data_without_today[item_name] = item_date
try:
for item in self.waste_data_raw:
item_name = item["type"].strip().lower()
if item_name not in self.exclude_list:
if item_name not in waste_data_with_today.keys():
waste_data_with_today[item_name] = self.default_label
if item_name not in waste_data_without_today.keys():
waste_data_without_today[item_name] = self.default_label
except Exception as err:
_LOGGER.error(f"Other error occurred: {err}")
return waste_data_with_today, waste_data_without_today
except Exception as err:
_LOGGER.error(f"Other error occurred: {err}")
##########################################################################
# GENERATE REQUIRED DATA FOR HASS SENSORS
##########################################################################
def __gen_sensor_waste_data(self):
if self.exclude_pickup_today.casefold() in ("false", "no"):
date_selected = self.DATE_TODAY
waste_data_provider = self._waste_data_with_today
else:
date_selected = self.DATE_TOMORROW
waste_data_provider = self._waste_data_without_today
try:
waste_types_provider = sorted(
{
waste["type"]
for waste in self.waste_data_raw
if waste["type"] not in self.exclude_list
}
)
except Exception as err:
_LOGGER.error(f"Other error occurred waste_types_provider: {err}")
try:
waste_data_formatted = [
{
"type": waste["type"],
"date": datetime.strptime(waste["date"], "%Y-%m-%d"),
}
for waste in self.waste_data_raw
if waste["type"] in waste_types_provider
]
except Exception as err:
_LOGGER.error(f"Other error occurred waste_data_formatted: {err}")
days = DaySensorData(waste_data_formatted, self.default_label)
try:
waste_data_after_date_selected = list(
filter(
lambda waste: waste["date"] >= date_selected, waste_data_formatted
)
)
except Exception as err:
_LOGGER.error(f"Other error occurred waste_data_after_date_selected: {err}")
next_data = NextSensorData(waste_data_after_date_selected, self.default_label)
try:
waste_data_custom = {**next_data.next_sensor_data, **days.day_sensor_data}
except Exception as err:
_LOGGER.error(f"Other error occurred waste_data_custom: {err}")
try:
waste_types_custom = list(sorted(waste_data_custom.keys()))
except Exception as err:
_LOGGER.error(f"Other error occurred waste_types_custom: {err}")
return (
waste_data_provider,
waste_types_provider,
waste_data_custom,
waste_types_custom,
)
##########################################################################
# PROPERTIES FOR EXECUTION
##########################################################################
@property
def waste_data_with_today(self):
return self._waste_data_with_today
@property
def waste_data_without_today(self):
return self._waste_data_without_today
@property
def waste_data_provider(self):
return self._waste_data_provider
@property
def waste_types_provider(self):
return self._waste_types_provider
@property
def waste_data_custom(self):
return self._waste_data_custom
@property
def waste_types_custom(self):
return self._waste_types_custom

View File

@@ -0,0 +1,131 @@
from datetime import timedelta
import logging
_LOGGER = logging.getLogger(__name__)
API = "api"
NAME = "afvalwijzer"
VERSION = "2022.11.02"
ISSUE_URL = "https://github.com/xirixiz/homeassistant-afvalwijzer/issues"
SENSOR_COLLECTOR_TO_URL = {
"afvalwijzer_data_default": [
"https://api.{0}.nl/webservices/appsinput/?apikey=5ef443e778f41c4f75c69459eea6e6ae0c2d92de729aa0fc61653815fbd6a8ca&method=postcodecheck&postcode={1}&street=&huisnummer={2}&toevoeging={3}&app_name=afvalwijzer&platform=web&afvaldata={4}&langs=nl&"
],
"afvalstoffendienstkalender": [
"https://{0}.afvalstoffendienstkalender.nl/nl/{1}/{2}/"
],
"afvalstoffendienstkalender-s-hertogenbosch": [
"https://afvalstoffendienstkalender.nl/nl/{0}/{1}/"
],
"ximmio01": [
"https://wasteapi.ximmio.com/api/FetchAdress",
"https://wasteapi.ximmio.com/api/GetCalendar",
],
"ximmio02": [
"https://wasteprod2api.ximmio.com/api/FetchAdress",
"https://wasteprod2api.ximmio.com/api/GetCalendar",
],
}
SENSOR_COLLECTORS_OPZET = {
"alkmaar": "https://www.stadswerk072.nl",
"alphenaandenrijn": "https://afvalkalender.alphenaandenrijn.nl",
"berkelland": "https://afvalkalender.gemeenteberkelland.nl",
"blink": "https://mijnblink.nl",
"cranendonck": "https://afvalkalender.cranendonck.nl",
"cyclus": "https://afvalkalender.cyclusnv.nl",
"dar": "https://afvalkalender.dar.nl",
"denhaag": "https://huisvuilkalender.denhaag.nl",
"gad": "https://inzamelkalender.gad.nl",
"hvc": "https://inzamelkalender.hvcgroep.nl",
"lingewaard": "https://afvalwijzer.lingewaard.nl",
"middelburg-vlissingen": "https://afvalwijzer.middelburgvlissingen.nl",
"montfoort": "https://afvalkalender.cyclusnv.nl",
"peelenmaas": "https://afvalkalender.peelenmaas.nl",
"prezero": "https://inzamelwijzer.prezero.nl",
"purmerend": "https://afvalkalender.purmerend.nl",
"rmn": "https://inzamelschema.rmn.nl",
"schouwen-duiveland": "https://afvalkalender.schouwen-duiveland.nl",
"spaarnelanden": "https://afvalwijzer.spaarnelanden.nl",
"sudwestfryslan": "https://afvalkalender.sudwestfryslan.nl",
"suez": "https://inzamelwijzer.prezero.nl",
"venray": "https://afvalkalender.venray.nl",
"voorschoten": "https://afvalkalender.voorschoten.nl",
"waalre": "https://afvalkalender.waalre.nl",
"zrd": "https://afvalkalender.zrd.nl",
}
SENSOR_COLLECTORS_ICALENDAR = {
"eemsdelta": "https://www.eemsdelta.nl/trash-calendar/download/{1}/{2}",
}
SENSOR_COLLECTORS_AFVALWIJZER = [
"mijnafvalwijzer",
"afvalstoffendienstkalender",
"afvalstoffendienstkalender-s-hertogenbosch",
"rova",
]
SENSOR_COLLECTORS_XIMMIO = {
"acv": "f8e2844a-095e-48f9-9f98-71fceb51d2c3",
"almere": "53d8db94-7945-42fd-9742-9bbc71dbe4c1",
"areareiniging": "adc418da-d19b-11e5-ab30-625662870761",
"avalex": "f7a74ad1-fdbf-4a43-9f91-44644f4d4222",
"avri": "78cd4156-394b-413d-8936-d407e334559a",
"bar": "bb58e633-de14-4b2a-9941-5bc419f1c4b0",
"hellendoorn": "24434f5b-7244-412b-9306-3a2bd1e22bc1",
"meerlanden": "800bf8d7-6dd1-4490-ba9d-b419d6dc8a45",
"meppel": "b7a594c7-2490-4413-88f9-94749a3ec62a",
"rad": "13a2cad9-36d0-4b01-b877-efcb421a864d",
"twentemilieu": "8d97bb56-5afd-4cbc-a651-b4f7314264b4",
"waardlanden": "942abcf6-3775-400d-ae5d-7380d728b23c",
"westland": "6fc75608-126a-4a50-9241-a002ce8c8a6c",
"ximmio": "800bf8d7-6dd1-4490-ba9d-b419d6dc8a45",
"reinis": "9dc25c8a-175a-4a41-b7a1-83f237a80b77",
}
SENSOR_COLLECTORS_RD4 = {
"rd4": "https://data.rd4.nl/api/v1/waste-calendar?postal_code={0}&house_number={1}&house_number_extension={2}&year={3}",
}
SENSOR_COLLECTORS_DEAFVALAPP = {
"deafvalapp": "https://dataservice.deafvalapp.nl/dataservice/DataServiceServlet?service=OPHAALSCHEMA&land=NL&postcode={0}&straatId=0&huisnr={1}&huisnrtoev={2}",
}
CONF_COLLECTOR = "provider"
CONF_API_TOKEN = "api_token"
CONF_POSTAL_CODE = "postal_code"
CONF_STREET_NUMBER = "street_number"
CONF_SUFFIX = "suffix"
CONF_DATE_FORMAT = "date_format"
CONF_EXCLUDE_PICKUP_TODAY = "exclude_pickup_today"
CONF_DEFAULT_LABEL = "default_label"
CONF_ID = "id"
CONF_EXCLUDE_LIST = "exclude_list"
SENSOR_PREFIX = "afvalwijzer "
SENSOR_ICON = "mdi:recycle"
ATTR_LAST_UPDATE = "last_update"
ATTR_IS_COLLECTION_DATE_TODAY = "is_collection_date_today"
ATTR_IS_COLLECTION_DATE_TOMORROW = "is_collection_date_tomorrow"
ATTR_IS_COLLECTION_DATE_DAY_AFTER_TOMORROW = "is_collection_date_day_after_tomorrow"
ATTR_DAYS_UNTIL_COLLECTION_DATE = "days_until_collection_date"
ATTR_YEAR_MONTH_DAY_DATE = "year_month_day_date"
MIN_TIME_BETWEEN_UPDATES = timedelta(hours=1)
PARALLEL_UPDATES = 1
SCAN_INTERVAL = timedelta(seconds=30)
DOMAIN = "afvalwijzer"
DOMAIN_DATA = "afvalwijzer_data"
STARTUP_MESSAGE = f"""
-------------------------------------------------------------------,
Afvalwijzer - {VERSION},
This is a custom integration!,
If you have any issues with this you need to open an issue here:,
https://github.com/xirixiz/homeassistant-afvalwijzer/issues,
-------------------------------------------------------------------,
"""

View File

@@ -0,0 +1,14 @@
{
"domain": "afvalwijzer",
"name": "Afvalwijzer",
"version": "2022.11.02",
"iot_class": "cloud_polling",
"documentation": "https://github.com/xirixiz/homeassistant-afvalwijzer/blob/master/README.md",
"issue_tracker": "https://github.com/xirixiz/homeassistant-afvalwijzer/issues",
"config_flow": false,
"dependencies": [],
"codeowners": [
"@xirixiz"
],
"requirements": []
}

View File

@@ -0,0 +1,146 @@
#!/usr/bin/env python3
"""
Sensor component Afvalwijzer
Author: Bram van Dartel - xirixiz
"""
from functools import partial
from homeassistant.components.sensor import PLATFORM_SCHEMA
import homeassistant.helpers.config_validation as cv
from homeassistant.util import Throttle
import voluptuous as vol
from .collector.main_collector import MainCollector
from .const.const import (
_LOGGER,
CONF_COLLECTOR,
CONF_DEFAULT_LABEL,
CONF_EXCLUDE_LIST,
CONF_EXCLUDE_PICKUP_TODAY,
CONF_ID,
CONF_POSTAL_CODE,
CONF_STREET_NUMBER,
CONF_SUFFIX,
MIN_TIME_BETWEEN_UPDATES,
PARALLEL_UPDATES,
SCAN_INTERVAL,
STARTUP_MESSAGE,
)
from .sensor_custom import CustomSensor
from .sensor_provider import ProviderSensor
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(
CONF_COLLECTOR, default="mijnafvalwijzer"
): cv.string,
vol.Required(CONF_POSTAL_CODE, default="1234AB"): cv.string,
vol.Required(CONF_STREET_NUMBER, default="5"): cv.string,
vol.Optional(CONF_SUFFIX, default=""): cv.string,
vol.Optional(CONF_EXCLUDE_PICKUP_TODAY, default="true"): cv.string,
vol.Optional(CONF_EXCLUDE_LIST, default=""): cv.string,
vol.Optional(CONF_DEFAULT_LABEL, default="Geen"): cv.string,
vol.Optional(CONF_ID.strip().lower(), default=""): cv.string,
}
)
_LOGGER.info(STARTUP_MESSAGE)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
provider = config.get(CONF_COLLECTOR)
postal_code = config.get(CONF_POSTAL_CODE)
street_number = config.get(CONF_STREET_NUMBER)
suffix = config.get(CONF_SUFFIX)
exclude_pickup_today = config.get(CONF_EXCLUDE_PICKUP_TODAY)
exclude_list = config.get(CONF_EXCLUDE_LIST)
default_label = config.get(CONF_DEFAULT_LABEL)
_LOGGER.debug(f"Afvalwijzer provider = {provider}")
_LOGGER.debug(f"Afvalwijzer zipcode = {postal_code}")
_LOGGER.debug(f"Afvalwijzer street_number = {street_number}")
try:
collector = await hass.async_add_executor_job(
partial(
MainCollector,
provider,
postal_code,
street_number,
suffix,
exclude_pickup_today,
exclude_list,
default_label,
)
)
except ValueError as err:
_LOGGER.error(f"Check afvalwijzer platform settings {err.args}")
fetch_data = AfvalwijzerData(config)
waste_types_provider = collector.waste_types_provider
_LOGGER.debug(f"Generating waste_types_provider list = {waste_types_provider}")
waste_types_custom = collector.waste_types_custom
_LOGGER.debug(f"Generating waste_types_custom list = {waste_types_custom}")
entities = []
for waste_type in waste_types_provider:
_LOGGER.debug(f"Adding sensor provider: {waste_type}")
entities.append(ProviderSensor(hass, waste_type, fetch_data, config))
for waste_type in waste_types_custom:
_LOGGER.debug(f"Adding sensor custom: {waste_type}")
entities.append(CustomSensor(hass, waste_type, fetch_data, config))
_LOGGER.debug(f"Entities appended = {entities}")
async_add_entities(entities)
class AfvalwijzerData(object):
def __init__(self, config):
self.config = config
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
provider = self.config.get(CONF_COLLECTOR)
postal_code = self.config.get(CONF_POSTAL_CODE)
street_number = self.config.get(CONF_STREET_NUMBER)
suffix = self.config.get(CONF_SUFFIX)
exclude_pickup_today = self.config.get(CONF_EXCLUDE_PICKUP_TODAY)
default_label = self.config.get(CONF_DEFAULT_LABEL)
exclude_list = self.config.get(CONF_EXCLUDE_LIST)
try:
collector = MainCollector(
provider,
postal_code,
street_number,
suffix,
exclude_pickup_today,
exclude_list,
default_label,
)
except ValueError as err:
_LOGGER.error(f"Check afvalwijzer platform settings {err.args}")
# waste data provider update - with today
try:
self.waste_data_with_today = collector.waste_data_with_today
except ValueError as err:
_LOGGER.error(f"Check waste_data_provider {err.args}")
self.waste_data_with_today = default_label
# waste data provider update - without today
try:
self.waste_data_without_today = collector.waste_data_without_today
except ValueError as err:
_LOGGER.error(f"Check waste_data_provider {err.args}")
self.waste_data_without_today = default_label
# waste data custom update
try:
self.waste_data_custom = collector.waste_data_custom
except ValueError as err:
_LOGGER.error(f"Check waste_data_custom {err.args}")
self.waste_data_custom = default_label

View File

@@ -0,0 +1,106 @@
#!/usr/bin/env python3
from datetime import datetime
import hashlib
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
from .const.const import (
_LOGGER,
ATTR_LAST_UPDATE,
ATTR_YEAR_MONTH_DAY_DATE,
CONF_DEFAULT_LABEL,
CONF_ID,
CONF_POSTAL_CODE,
CONF_STREET_NUMBER,
CONF_SUFFIX,
MIN_TIME_BETWEEN_UPDATES,
PARALLEL_UPDATES,
SENSOR_ICON,
SENSOR_PREFIX,
)
class CustomSensor(Entity):
def __init__(self, hass, waste_type, fetch_data, config):
self.hass = hass
self.waste_type = waste_type
self.fetch_data = fetch_data
self.config = config
self._id_name = self.config.get(CONF_ID)
self._default_label = self.config.get(CONF_DEFAULT_LABEL)
self._last_update = None
self._name = (
SENSOR_PREFIX + (f"{self._id_name} " if len(self._id_name) > 0 else "")
) + self.waste_type
self._state = self.config.get(CONF_DEFAULT_LABEL)
self._icon = SENSOR_ICON
self._year_month_day_date = None
self._unique_id = hashlib.sha1(
f"{self.waste_type}{self.config.get(CONF_ID)}{self.config.get(CONF_POSTAL_CODE)}{self.config.get(CONF_STREET_NUMBER)}{self.config.get(CONF_SUFFIX,'')}".encode(
"utf-8"
)
).hexdigest()
@property
def name(self):
return self._name
@property
def unique_id(self):
return self._unique_id
@property
def icon(self):
return self._icon
@property
def state(self):
return self._state
@property
def extra_state_attributes(self):
if self._year_month_day_date is not None:
return {
ATTR_LAST_UPDATE: self._last_update,
ATTR_YEAR_MONTH_DAY_DATE: self._year_month_day_date,
}
else:
return {
ATTR_LAST_UPDATE: self._last_update,
}
@Throttle(MIN_TIME_BETWEEN_UPDATES)
async def async_update(self):
await self.hass.async_add_executor_job(self.fetch_data.update)
waste_data_custom = self.fetch_data.waste_data_custom
try:
# Add attribute, set the last updated status of the sensor
self._last_update = datetime.now().strftime("%d-%m-%Y %H:%M")
if isinstance(waste_data_custom[self.waste_type], datetime):
_LOGGER.debug(
f"Generating state via AfvalwijzerCustomSensor for = {self.waste_type} with value {waste_data_custom[self.waste_type].date()}"
)
# Add the US date format
collection_date_us = waste_data_custom[self.waste_type].date()
self._year_month_day_date = str(collection_date_us)
# Add the NL date format as default state
self._state = datetime.strftime(
waste_data_custom[self.waste_type].date(), "%d-%m-%Y"
)
else:
_LOGGER.debug(
f"Generating state via AfvalwijzerCustomSensor for = {self.waste_type} with value {waste_data_custom[self.waste_type]}"
)
# Add non-date as default state
self._state = str(waste_data_custom[self.waste_type])
except ValueError:
_LOGGER.debug("ValueError AfvalwijzerCustomSensor - unable to set value!")
self._state = self._default_label
self._year_month_day_date = None
self._last_update = datetime.now().strftime("%d-%m-%Y %H:%M")

View File

@@ -0,0 +1,138 @@
#!/usr/bin/env python3
from datetime import date, datetime, timedelta
import hashlib
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
from .const.const import (
_LOGGER,
ATTR_DAYS_UNTIL_COLLECTION_DATE,
ATTR_IS_COLLECTION_DATE_DAY_AFTER_TOMORROW,
ATTR_IS_COLLECTION_DATE_TODAY,
ATTR_IS_COLLECTION_DATE_TOMORROW,
ATTR_LAST_UPDATE,
ATTR_YEAR_MONTH_DAY_DATE,
CONF_DEFAULT_LABEL,
CONF_EXCLUDE_PICKUP_TODAY,
CONF_ID,
CONF_POSTAL_CODE,
CONF_STREET_NUMBER,
CONF_SUFFIX,
MIN_TIME_BETWEEN_UPDATES,
PARALLEL_UPDATES,
SENSOR_ICON,
SENSOR_PREFIX,
)
class ProviderSensor(Entity):
def __init__(self, hass, waste_type, fetch_data, config):
self.hass = hass
self.waste_type = waste_type
self.fetch_data = fetch_data
self.config = config
self._id_name = self.config.get(CONF_ID)
self._default_label = self.config.get(CONF_DEFAULT_LABEL)
self._exclude_pickup_today = self.config.get(CONF_EXCLUDE_PICKUP_TODAY)
self._name = (
SENSOR_PREFIX
+ (self._id_name + " " if len(self._id_name) > 0 else "")
+ self.waste_type
)
self._icon = SENSOR_ICON
self._state = self.config.get(CONF_DEFAULT_LABEL)
self._last_update = None
self._days_until_collection_date = None
self._is_collection_date_today = False
self._is_collection_date_tomorrow = False
self._is_collection_date_day_after_tomorrow = False
self._year_month_day_date = None
self._unique_id = hashlib.sha1(
f"{self.waste_type}{self.config.get(CONF_ID)}{self.config.get(CONF_POSTAL_CODE)}{self.config.get(CONF_STREET_NUMBER)}{self.config.get(CONF_SUFFIX,'')}".encode(
"utf-8"
)
).hexdigest()
@property
def name(self):
return self._name
@property
def unique_id(self):
return self._unique_id
@property
def icon(self):
return self._icon
@property
def state(self):
return self._state
@property
def extra_state_attributes(self):
return {
ATTR_LAST_UPDATE: self._last_update,
ATTR_DAYS_UNTIL_COLLECTION_DATE: self._days_until_collection_date,
ATTR_IS_COLLECTION_DATE_TODAY: self._is_collection_date_today,
ATTR_IS_COLLECTION_DATE_TOMORROW: self._is_collection_date_tomorrow,
ATTR_IS_COLLECTION_DATE_DAY_AFTER_TOMORROW: self._is_collection_date_day_after_tomorrow,
ATTR_YEAR_MONTH_DAY_DATE: self._year_month_day_date,
}
@Throttle(MIN_TIME_BETWEEN_UPDATES)
async def async_update(self):
await self.hass.async_add_executor_job(self.fetch_data.update)
if self._exclude_pickup_today.casefold() in ("false", "no"):
waste_data_provider = self.fetch_data.waste_data_with_today
else:
waste_data_provider = self.fetch_data.waste_data_without_today
try:
if not waste_data_provider or self.waste_type not in waste_data_provider:
raise (ValueError)
# Add attribute, set the last updated status of the sensor
self._last_update = datetime.now().strftime("%d-%m-%Y %H:%M")
if isinstance(waste_data_provider[self.waste_type], datetime):
_LOGGER.debug(
f"Generating state via AfvalwijzerCustomSensor for = {self.waste_type} with value {waste_data_provider[self.waste_type].date()}"
)
# Add the US date format
collection_date_us = waste_data_provider[self.waste_type].date()
self._year_month_day_date = str(collection_date_us)
# Add the days until the collection date
delta = collection_date_us - date.today()
self._days_until_collection_date = delta.days
# Check if the collection days are in today, tomorrow and/or the day after tomorrow
self._is_collection_date_today = date.today() == collection_date_us
self._is_collection_date_tomorrow = (
date.today() + timedelta(days=1) == collection_date_us
)
self._is_collection_date_day_after_tomorrow = (
date.today() + timedelta(days=2) == collection_date_us
)
# Add the NL date format as default state
self._state = datetime.strftime(
waste_data_provider[self.waste_type].date(), "%d-%m-%Y"
)
else:
_LOGGER.debug(
f"Generating state via AfvalwijzerCustomSensor for = {self.waste_type} with value {waste_data_provider[self.waste_type]}"
)
# Add non-date as default state
self._state = str(waste_data_provider[self.waste_type])
except ValueError:
_LOGGER.debug("ValueError AfvalwijzerProviderSensor - unable to set value!")
self._state = self._default_label
self._days_until_collection_date = None
self._year_month_day_date = None
self._is_collection_date_today = False
self._is_collection_date_tomorrow = False
self._is_collection_date_day_after_tomorrow = False
self._last_update = datetime.now().strftime("%d-%m-%Y %H:%M")

View File

@@ -0,0 +1,397 @@
[
{
"nameType": "gft",
"type": "gft",
"date": "2021-01-02"
},
{
"nameType": "pmd",
"type": "pmd",
"date": "2021-01-05"
},
{
"nameType": "restafval",
"type": "restafval",
"date": "2021-01-08"
},
{
"nameType": "kerstbomen",
"type": "kerstbomen",
"date": "2021-01-09"
},
{
"nameType": "gft",
"type": "gft",
"date": "2021-01-15"
},
{
"nameType": "pmd",
"type": "pmd",
"date": "2021-01-19"
},
{
"nameType": "papier",
"type": "papier",
"date": "2021-01-20"
},
{
"nameType": "gft",
"type": "gft",
"date": "2021-01-29"
},
{
"nameType": "pmd",
"type": "pmd",
"date": "2021-02-02"
},
{
"nameType": "restafval",
"type": "restafval",
"date": "2021-02-05"
},
{
"nameType": "gft",
"type": "gft",
"date": "2021-02-12"
},
{
"nameType": "pmd",
"type": "pmd",
"date": "2021-02-16"
},
{
"nameType": "papier",
"type": "papier",
"date": "2021-02-17"
},
{
"nameType": "gft",
"type": "gft",
"date": "2021-02-26"
},
{
"nameType": "pmd",
"type": "pmd",
"date": "2021-03-02"
},
{
"nameType": "restafval",
"type": "restafval",
"date": "2021-03-05"
},
{
"nameType": "gft",
"type": "gft",
"date": "2021-03-12"
},
{
"nameType": "pmd",
"type": "pmd",
"date": "2021-03-16"
},
{
"nameType": "papier",
"type": "papier",
"date": "2021-03-17"
},
{
"nameType": "gft",
"type": "gft",
"date": "2021-03-26"
},
{
"nameType": "pmd",
"type": "pmd",
"date": "2021-03-30"
},
{
"nameType": "restafval",
"type": "restafval",
"date": "2021-04-02"
},
{
"nameType": "gft",
"type": "gft",
"date": "2021-04-09"
},
{
"nameType": "pmd",
"type": "pmd",
"date": "2021-04-13"
},
{
"nameType": "papier",
"type": "papier",
"date": "2021-04-21"
},
{
"nameType": "gft",
"type": "gft",
"date": "2021-04-23"
},
{
"nameType": "restafval",
"type": "restafval",
"date": "2021-04-30"
},
{
"nameType": "pmd",
"type": "pmd",
"date": "2021-04-30"
},
{
"nameType": "gft",
"type": "gft",
"date": "2021-05-07"
},
{
"nameType": "pmd",
"type": "pmd",
"date": "2021-05-11"
},
{
"nameType": "papier",
"type": "papier",
"date": "2021-05-19"
},
{
"nameType": "gft",
"type": "gft",
"date": "2021-05-21"
},
{
"nameType": "pmd",
"type": "pmd",
"date": "2021-05-25"
},
{
"nameType": "restafval",
"type": "restafval",
"date": "2021-05-28"
},
{
"nameType": "gft",
"type": "gft",
"date": "2021-06-04"
},
{
"nameType": "pmd",
"type": "pmd",
"date": "2021-06-08"
},
{
"nameType": "papier",
"type": "papier",
"date": "2021-06-16"
},
{
"nameType": "gft",
"type": "gft",
"date": "2021-06-18"
},
{
"nameType": "pmd",
"type": "pmd",
"date": "2021-06-22"
},
{
"nameType": "restafval",
"type": "restafval",
"date": "2021-06-25"
},
{
"nameType": "gft",
"type": "gft",
"date": "2021-07-02"
},
{
"nameType": "pmd",
"type": "pmd",
"date": "2021-07-06"
},
{
"nameType": "gft",
"type": "gft",
"date": "2021-07-16"
},
{
"nameType": "pmd",
"type": "pmd",
"date": "2021-07-20"
},
{
"nameType": "papier",
"type": "papier",
"date": "2021-07-21"
},
{
"nameType": "restafval",
"type": "restafval",
"date": "2021-07-23"
},
{
"nameType": "gft",
"type": "gft",
"date": "2021-07-30"
},
{
"nameType": "pmd",
"type": "pmd",
"date": "2021-08-03"
},
{
"nameType": "gft",
"type": "gft",
"date": "2021-08-13"
},
{
"nameType": "pmd",
"type": "pmd",
"date": "2021-08-17"
},
{
"nameType": "papier",
"type": "papier",
"date": "2021-08-18"
},
{
"nameType": "restafval",
"type": "restafval",
"date": "2021-08-20"
},
{
"nameType": "gft",
"type": "gft",
"date": "2021-08-27"
},
{
"nameType": "pmd",
"type": "pmd",
"date": "2021-08-31"
},
{
"nameType": "gft",
"type": "gft",
"date": "2021-09-10"
},
{
"nameType": "pmd",
"type": "pmd",
"date": "2021-09-14"
},
{
"nameType": "papier",
"type": "papier",
"date": "2021-09-15"
},
{
"nameType": "restafval",
"type": "restafval",
"date": "2021-09-17"
},
{
"nameType": "gft",
"type": "gft",
"date": "2021-09-24"
},
{
"nameType": "pmd",
"type": "pmd",
"date": "2021-09-28"
},
{
"nameType": "gft",
"type": "gft",
"date": "2021-10-08"
},
{
"nameType": "pmd",
"type": "pmd",
"date": "2021-10-12"
},
{
"nameType": "restafval",
"type": "restafval",
"date": "2021-10-15"
},
{
"nameType": "papier",
"type": "papier",
"date": "2021-10-20"
},
{
"nameType": "gft",
"type": "gft",
"date": "2021-10-22"
},
{
"nameType": "pmd",
"type": "pmd",
"date": "2021-10-26"
},
{
"nameType": "gft",
"type": "gft",
"date": "2021-11-05"
},
{
"nameType": "pmd",
"type": "pmd",
"date": "2021-11-09"
},
{
"nameType": "restafval",
"type": "restafval",
"date": "2021-11-12"
},
{
"nameType": "papier",
"type": "papier",
"date": "2021-11-17"
},
{
"nameType": "gft",
"type": "gft",
"date": "2021-11-19"
},
{
"nameType": "pmd",
"type": "pmd",
"date": "2021-11-19"
},
{
"nameType": "gft",
"type": "gft",
"date": "2021-12-03"
},
{
"nameType": "pmd",
"type": "pmd",
"date": "2021-12-07"
},
{
"nameType": "restafval",
"type": "restafval",
"date": "2021-12-10"
},
{
"nameType": "papier",
"type": "papier",
"date": "2021-12-15"
},
{
"nameType": "gft",
"type": "gft",
"date": "2021-12-17"
},
{
"nameType": "pmd",
"type": "pmd",
"date": "2021-12-21"
},
{
"nameType": "gft",
"type": "gft",
"date": "2021-12-31"
}
]

View File

@@ -0,0 +1,110 @@
#!/usr/bin/env python3
"""
Sensor component for AfvalDienst
Author: Bram van Dartel - xirixiz
import afvalwijzer
from afvalwijzer.collector.mijnafvalwijzer import AfvalWijzer
AfvalWijzer().get_data('','','')
python3 -m afvalwijzer.tests.test_module
"""
from ..collector.main_collector import MainCollector
# provider = "afvalstoffendienstkalender"
# api_token = "5ef443e778f41c4f75c69459eea6e6ae0c2d92de729aa0fc61653815fbd6a8ca"
# Afvalstoffendienstkalender
# postal_code = "5391KE"
# street_number = "1"
# Common
suffix = ""
exclude_pickup_today = "True"
default_label = "Geen"
exclude_list = ""
# DeAfvalapp
# provider = "deafvalapp"
# postal_code = "6105CN"
# street_number = "1"
# Icalendar
# provider = "eemsdelta"
# postal_code = "9991AB"
# street_number = "2"
# Afvalwijzer
# provider = "mijnafvalwijzer"
# postal_code = "5146eg"
# street_number = "1"
# Opzet
# provider = "prezero"
# postal_code = "6665CN"
# street_number = "1"
# RD4
# provider = "rd4"
# postal_code = "6301ET"
# street_number = "24"
# suffix = "C"
# Ximmio
provider = "meerlanden"
postal_code = "2121xt"
street_number = "38"
# Ximmio
# provider = "acv"
# postal_code = "6713CG"
# street_number = "11"
# postal_code = postal_code.strip().upper()
collector = MainCollector(
provider,
postal_code,
street_number,
suffix,
exclude_pickup_today,
exclude_list,
default_label,
)
# MainCollector(
# provider,
# postal_code,
# street_number,
# suffix,
# exclude_pickup_today,
# exclude_list,
# default_label,
# )
# data = XimmioCollector().get_waste_data_provider("meerlanden", postal_code2, street_number2, suffix, default_label, exclude_list)
# data2 = MijnAfvalWijzerCollector().get_waste_data_provider("mijnafvalwijzer", postal_code, street_number, suffix, default_label, exclude_list)
#########################################################################################################
print("\n")
print(collector.waste_data_with_today)
print(collector.waste_data_without_today)
print(collector.waste_data_custom)
print(collector.waste_types_provider)
print(collector.waste_types_custom)
print("\n")
# for key, value in afval1.items():
# print(key, value)
# print("\n")
# for key, value in afval2.items():
# print(key, value)

View File

@@ -0,0 +1,263 @@
"""
HACS gives you a powerful UI to handle downloads of all your custom needs.
For more details about this integration, please refer to the documentation at
https://hacs.xyz/
"""
from __future__ import annotations
import os
from typing import Any
from aiogithubapi import AIOGitHubAPIException, GitHub, GitHubAPI
from aiogithubapi.const import ACCEPT_HEADERS
from awesomeversion import AwesomeVersion
from homeassistant.components.lovelace.system_health import system_health_info
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import Platform, __version__ as HAVERSION
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.discovery import async_load_platform
from homeassistant.helpers.event import async_call_later
from homeassistant.helpers.start import async_at_start
from homeassistant.loader import async_get_integration
import voluptuous as vol
from .base import HacsBase
from .const import DOMAIN, MINIMUM_HA_VERSION, STARTUP
from .enums import ConfigurationType, HacsDisabledReason, HacsStage, LovelaceMode
from .frontend import async_register_frontend
from .utils.configuration_schema import hacs_config_combined
from .utils.data import HacsData
from .utils.queue_manager import QueueManager
from .utils.version import version_left_higher_or_equal_then_right
from .websocket import async_register_websocket_commands
CONFIG_SCHEMA = vol.Schema({DOMAIN: hacs_config_combined()}, extra=vol.ALLOW_EXTRA)
async def async_initialize_integration(
hass: HomeAssistant,
*,
config_entry: ConfigEntry | None = None,
config: dict[str, Any] | None = None,
) -> bool:
"""Initialize the integration"""
hass.data[DOMAIN] = hacs = HacsBase()
hacs.enable_hacs()
if config is not None:
if DOMAIN not in config:
return True
if hacs.configuration.config_type == ConfigurationType.CONFIG_ENTRY:
return True
hacs.configuration.update_from_dict(
{
"config_type": ConfigurationType.YAML,
**config[DOMAIN],
"config": config[DOMAIN],
}
)
if config_entry is not None:
if config_entry.source == SOURCE_IMPORT:
hass.async_create_task(hass.config_entries.async_remove(config_entry.entry_id))
return False
hacs.configuration.update_from_dict(
{
"config_entry": config_entry,
"config_type": ConfigurationType.CONFIG_ENTRY,
**config_entry.data,
**config_entry.options,
}
)
integration = await async_get_integration(hass, DOMAIN)
hacs.set_stage(None)
hacs.log.info(STARTUP, integration.version)
clientsession = async_get_clientsession(hass)
hacs.integration = integration
hacs.version = integration.version
hacs.configuration.dev = integration.version == "0.0.0"
hacs.hass = hass
hacs.queue = QueueManager(hass=hass)
hacs.data = HacsData(hacs=hacs)
hacs.system.running = True
hacs.session = clientsession
hacs.core.lovelace_mode = LovelaceMode.YAML
try:
lovelace_info = await system_health_info(hacs.hass)
hacs.core.lovelace_mode = LovelaceMode(lovelace_info.get("mode", "yaml"))
except BaseException: # lgtm [py/catch-base-exception] pylint: disable=broad-except
# If this happens, the users YAML is not valid, we assume YAML mode
pass
hacs.log.debug("Configuration type: %s", hacs.configuration.config_type)
hacs.core.config_path = hacs.hass.config.path()
if hacs.core.ha_version is None:
hacs.core.ha_version = AwesomeVersion(HAVERSION)
## Legacy GitHub client
hacs.github = GitHub(
hacs.configuration.token,
clientsession,
headers={
"User-Agent": f"HACS/{hacs.version}",
"Accept": ACCEPT_HEADERS["preview"],
},
)
## New GitHub client
hacs.githubapi = GitHubAPI(
token=hacs.configuration.token,
session=clientsession,
**{"client_name": f"HACS/{hacs.version}"},
)
async def async_startup():
"""HACS startup tasks."""
hacs.enable_hacs()
for location in (
hass.config.path("custom_components/custom_updater.py"),
hass.config.path("custom_components/custom_updater/__init__.py"),
):
if os.path.exists(location):
hacs.log.critical(
"This cannot be used with custom_updater. "
"To use this you need to remove custom_updater form %s",
location,
)
hacs.disable_hacs(HacsDisabledReason.CONSTRAINS)
return False
if not version_left_higher_or_equal_then_right(
hacs.core.ha_version.string,
MINIMUM_HA_VERSION,
):
hacs.log.critical(
"You need HA version %s or newer to use this integration.",
MINIMUM_HA_VERSION,
)
hacs.disable_hacs(HacsDisabledReason.CONSTRAINS)
return False
if not await hacs.data.restore():
hacs.disable_hacs(HacsDisabledReason.RESTORE)
return False
can_update = await hacs.async_can_update()
hacs.log.debug("Can update %s repositories", can_update)
hacs.set_active_categories()
async_register_websocket_commands(hass)
async_register_frontend(hass, hacs)
if hacs.configuration.config_type == ConfigurationType.YAML:
hass.async_create_task(
async_load_platform(hass, Platform.SENSOR, DOMAIN, {}, hacs.configuration.config)
)
hacs.log.info("Update entities are only supported when using UI configuration")
else:
hass.config_entries.async_setup_platforms(
config_entry,
[Platform.SENSOR, Platform.UPDATE]
if hacs.configuration.experimental
else [Platform.SENSOR],
)
hacs.set_stage(HacsStage.SETUP)
if hacs.system.disabled:
return False
# Schedule startup tasks
async_at_start(hass=hass, at_start_cb=hacs.startup_tasks)
hacs.set_stage(HacsStage.WAITING)
hacs.log.info("Setup complete, waiting for Home Assistant before startup tasks starts")
return not hacs.system.disabled
async def async_try_startup(_=None):
"""Startup wrapper for yaml config."""
try:
startup_result = await async_startup()
except AIOGitHubAPIException:
startup_result = False
if not startup_result:
if (
hacs.configuration.config_type == ConfigurationType.YAML
or hacs.system.disabled_reason != HacsDisabledReason.INVALID_TOKEN
):
hacs.log.info("Could not setup HACS, trying again in 15 min")
async_call_later(hass, 900, async_try_startup)
return
hacs.enable_hacs()
await async_try_startup()
# Mischief managed!
return True
async def async_setup(hass: HomeAssistant, config: dict[str, Any]) -> bool:
"""Set up this integration using yaml."""
return await async_initialize_integration(hass=hass, config=config)
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
"""Set up this integration using UI."""
config_entry.async_on_unload(config_entry.add_update_listener(async_reload_entry))
setup_result = await async_initialize_integration(hass=hass, config_entry=config_entry)
hacs: HacsBase = hass.data[DOMAIN]
return setup_result and not hacs.system.disabled
async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
"""Handle removal of an entry."""
hacs: HacsBase = hass.data[DOMAIN]
# Clear out pending queue
hacs.queue.clear()
for task in hacs.recuring_tasks:
# Cancel all pending tasks
task()
# Store data
await hacs.data.async_write(force=True)
try:
if hass.data.get("frontend_panels", {}).get("hacs"):
hacs.log.info("Removing sidepanel")
hass.components.frontend.async_remove_panel("hacs")
except AttributeError:
pass
platforms = ["sensor"]
if hacs.configuration.experimental:
platforms.append("update")
unload_ok = await hass.config_entries.async_unload_platforms(config_entry, platforms)
hacs.set_stage(None)
hacs.disable_hacs(HacsDisabledReason.REMOVED)
hass.data.pop(DOMAIN, None)
return unload_ok
async def async_reload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> None:
"""Reload the HACS config entry."""
await async_unload_entry(hass, config_entry)
await async_setup_entry(hass, config_entry)

View File

@@ -0,0 +1,986 @@
"""Base HACS class."""
from __future__ import annotations
import asyncio
from dataclasses import asdict, dataclass, field
from datetime import timedelta
import gzip
import logging
import math
import os
import pathlib
import shutil
from typing import TYPE_CHECKING, Any, Awaitable, Callable
from aiogithubapi import (
AIOGitHubAPIException,
GitHub,
GitHubAPI,
GitHubAuthenticationException,
GitHubException,
GitHubNotModifiedException,
GitHubRatelimitException,
)
from aiogithubapi.objects.repository import AIOGitHubAPIRepository
from aiohttp.client import ClientSession, ClientTimeout
from awesomeversion import AwesomeVersion
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
from homeassistant.const import EVENT_HOMEASSISTANT_FINAL_WRITE, Platform
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.issue_registry import async_create_issue, IssueSeverity
from homeassistant.loader import Integration
from homeassistant.util import dt
from .const import DOMAIN, TV
from .enums import (
ConfigurationType,
HacsCategory,
HacsDisabledReason,
HacsDispatchEvent,
HacsGitHubRepo,
HacsStage,
LovelaceMode,
)
from .exceptions import (
AddonRepositoryException,
HacsException,
HacsExecutionStillInProgress,
HacsExpectedException,
HacsRepositoryArchivedException,
HacsRepositoryExistException,
HomeAssistantCoreRepositoryException,
)
from .repositories import RERPOSITORY_CLASSES
from .utils.decode import decode_content
from .utils.json import json_loads
from .utils.logger import LOGGER
from .utils.queue_manager import QueueManager
from .utils.store import async_load_from_store, async_save_to_store
if TYPE_CHECKING:
from .repositories.base import HacsRepository
from .utils.data import HacsData
from .validate.manager import ValidationManager
@dataclass
class RemovedRepository:
"""Removed repository."""
repository: str | None = None
reason: str | None = None
link: str | None = None
removal_type: str = None # archived, not_compliant, critical, dev, broken
acknowledged: bool = False
def update_data(self, data: dict):
"""Update data of the repository."""
for key in data:
if data[key] is None:
continue
if key in (
"reason",
"link",
"removal_type",
"acknowledged",
):
self.__setattr__(key, data[key])
def to_json(self):
"""Return a JSON representation of the data."""
return {
"repository": self.repository,
"reason": self.reason,
"link": self.link,
"removal_type": self.removal_type,
"acknowledged": self.acknowledged,
}
@dataclass
class HacsConfiguration:
"""HacsConfiguration class."""
appdaemon_path: str = "appdaemon/apps/"
appdaemon: bool = False
config: dict[str, Any] = field(default_factory=dict)
config_entry: ConfigEntry | None = None
config_type: ConfigurationType | None = None
country: str = "ALL"
debug: bool = False
dev: bool = False
experimental: bool = False
frontend_repo_url: str = ""
frontend_repo: str = ""
netdaemon_path: str = "netdaemon/apps/"
netdaemon: bool = False
plugin_path: str = "www/community/"
python_script_path: str = "python_scripts/"
python_script: bool = False
release_limit: int = 5
sidepanel_icon: str = "hacs:hacs"
sidepanel_title: str = "HACS"
theme_path: str = "themes/"
theme: bool = False
token: str = None
def to_json(self) -> str:
"""Return a json string."""
return asdict(self)
def update_from_dict(self, data: dict) -> None:
"""Set attributes from dicts."""
if not isinstance(data, dict):
raise HacsException("Configuration is not valid.")
for key in data:
self.__setattr__(key, data[key])
@dataclass
class HacsCore:
"""HACS Core info."""
config_path: pathlib.Path | None = None
ha_version: AwesomeVersion | None = None
lovelace_mode = LovelaceMode("yaml")
@dataclass
class HacsCommon:
"""Common for HACS."""
categories: set[str] = field(default_factory=set)
renamed_repositories: dict[str, str] = field(default_factory=dict)
archived_repositories: list[str] = field(default_factory=list)
ignored_repositories: list[str] = field(default_factory=list)
skip: list[str] = field(default_factory=list)
@dataclass
class HacsStatus:
"""HacsStatus."""
startup: bool = True
new: bool = False
@dataclass
class HacsSystem:
"""HACS System info."""
disabled_reason: HacsDisabledReason | None = None
running: bool = False
stage = HacsStage.SETUP
action: bool = False
@property
def disabled(self) -> bool:
"""Return if HACS is disabled."""
return self.disabled_reason is not None
@dataclass
class HacsRepositories:
"""HACS Repositories."""
_default_repositories: set[str] = field(default_factory=set)
_repositories: list[HacsRepository] = field(default_factory=list)
_repositories_by_full_name: dict[str, HacsRepository] = field(default_factory=dict)
_repositories_by_id: dict[str, HacsRepository] = field(default_factory=dict)
_removed_repositories: list[RemovedRepository] = field(default_factory=list)
@property
def list_all(self) -> list[HacsRepository]:
"""Return a list of repositories."""
return self._repositories
@property
def list_removed(self) -> list[RemovedRepository]:
"""Return a list of removed repositories."""
return self._removed_repositories
@property
def list_downloaded(self) -> list[HacsRepository]:
"""Return a list of downloaded repositories."""
return [repo for repo in self._repositories if repo.data.installed]
def register(self, repository: HacsRepository, default: bool = False) -> None:
"""Register a repository."""
repo_id = str(repository.data.id)
if repo_id == "0":
return
if registered_repo := self._repositories_by_id.get(repo_id):
if registered_repo.data.full_name == repository.data.full_name:
return
self.unregister(registered_repo)
registered_repo.data.full_name = repository.data.full_name
registered_repo.data.new = False
repository = registered_repo
if repository not in self._repositories:
self._repositories.append(repository)
self._repositories_by_id[repo_id] = repository
self._repositories_by_full_name[repository.data.full_name_lower] = repository
if default:
self.mark_default(repository)
def unregister(self, repository: HacsRepository) -> None:
"""Unregister a repository."""
repo_id = str(repository.data.id)
if repo_id == "0":
return
if not self.is_registered(repository_id=repo_id):
return
if self.is_default(repo_id):
self._default_repositories.remove(repo_id)
if repository in self._repositories:
self._repositories.remove(repository)
self._repositories_by_id.pop(repo_id, None)
self._repositories_by_full_name.pop(repository.data.full_name_lower, None)
def mark_default(self, repository: HacsRepository) -> None:
"""Mark a repository as default."""
repo_id = str(repository.data.id)
if repo_id == "0":
return
if not self.is_registered(repository_id=repo_id):
return
self._default_repositories.add(repo_id)
def set_repository_id(self, repository, repo_id):
"""Update a repository id."""
existing_repo_id = str(repository.data.id)
if existing_repo_id == repo_id:
return
if existing_repo_id != "0":
raise ValueError(
f"The repo id for {repository.data.full_name_lower} "
f"is already set to {existing_repo_id}"
)
repository.data.id = repo_id
self.register(repository)
def is_default(self, repository_id: str | None = None) -> bool:
"""Check if a repository is default."""
if not repository_id:
return False
return repository_id in self._default_repositories
def is_registered(
self,
repository_id: str | None = None,
repository_full_name: str | None = None,
) -> bool:
"""Check if a repository is registered."""
if repository_id is not None:
return repository_id in self._repositories_by_id
if repository_full_name is not None:
return repository_full_name in self._repositories_by_full_name
return False
def is_downloaded(
self,
repository_id: str | None = None,
repository_full_name: str | None = None,
) -> bool:
"""Check if a repository is registered."""
if repository_id is not None:
repo = self.get_by_id(repository_id)
if repository_full_name is not None:
repo = self.get_by_full_name(repository_full_name)
if repo is None:
return False
return repo.data.installed
def get_by_id(self, repository_id: str | None) -> HacsRepository | None:
"""Get repository by id."""
if not repository_id:
return None
return self._repositories_by_id.get(str(repository_id))
def get_by_full_name(self, repository_full_name: str | None) -> HacsRepository | None:
"""Get repository by full name."""
if not repository_full_name:
return None
return self._repositories_by_full_name.get(repository_full_name.lower())
def is_removed(self, repository_full_name: str) -> bool:
"""Check if a repository is removed."""
return repository_full_name in (
repository.repository for repository in self._removed_repositories
)
def removed_repository(self, repository_full_name: str) -> RemovedRepository:
"""Get repository by full name."""
if self.is_removed(repository_full_name):
if removed := [
repository
for repository in self._removed_repositories
if repository.repository == repository_full_name
]:
return removed[0]
removed = RemovedRepository(repository=repository_full_name)
self._removed_repositories.append(removed)
return removed
class HacsBase:
"""Base HACS class."""
common = HacsCommon()
configuration = HacsConfiguration()
core = HacsCore()
data: HacsData | None = None
frontend_version: str | None = None
github: GitHub | None = None
githubapi: GitHubAPI | None = None
hass: HomeAssistant | None = None
integration: Integration | None = None
log: logging.Logger = LOGGER
queue: QueueManager | None = None
recuring_tasks = []
repositories: HacsRepositories = HacsRepositories()
repository: AIOGitHubAPIRepository | None = None
session: ClientSession | None = None
stage: HacsStage | None = None
status = HacsStatus()
system = HacsSystem()
validation: ValidationManager | None = None
version: str | None = None
@property
def integration_dir(self) -> pathlib.Path:
"""Return the HACS integration dir."""
return self.integration.file_path
def set_stage(self, stage: HacsStage | None) -> None:
"""Set HACS stage."""
if stage and self.stage == stage:
return
self.stage = stage
if stage is not None:
self.log.info("Stage changed: %s", self.stage)
self.async_dispatch(HacsDispatchEvent.STAGE, {"stage": self.stage})
def disable_hacs(self, reason: HacsDisabledReason) -> None:
"""Disable HACS."""
if self.system.disabled_reason == reason:
return
self.system.disabled_reason = reason
if reason != HacsDisabledReason.REMOVED:
self.log.error("HACS is disabled - %s", reason)
if (
reason == HacsDisabledReason.INVALID_TOKEN
and self.configuration.config_type == ConfigurationType.CONFIG_ENTRY
):
self.configuration.config_entry.state = ConfigEntryState.SETUP_ERROR
self.configuration.config_entry.reason = "Authentication failed"
self.hass.add_job(self.configuration.config_entry.async_start_reauth, self.hass)
def enable_hacs(self) -> None:
"""Enable HACS."""
if self.system.disabled_reason is not None:
self.system.disabled_reason = None
self.log.info("HACS is enabled")
def enable_hacs_category(self, category: HacsCategory) -> None:
"""Enable HACS category."""
if category not in self.common.categories:
self.log.info("Enable category: %s", category)
self.common.categories.add(category)
def disable_hacs_category(self, category: HacsCategory) -> None:
"""Disable HACS category."""
if category in self.common.categories:
self.log.info("Disabling category: %s", category)
self.common.categories.pop(category)
async def async_save_file(self, file_path: str, content: Any) -> bool:
"""Save a file."""
def _write_file():
with open(
file_path,
mode="w" if isinstance(content, str) else "wb",
encoding="utf-8" if isinstance(content, str) else None,
errors="ignore" if isinstance(content, str) else None,
) as file_handler:
file_handler.write(content)
# Create gz for .js files
if os.path.isfile(file_path):
if file_path.endswith(".js"):
with open(file_path, "rb") as f_in:
with gzip.open(file_path + ".gz", "wb") as f_out:
shutil.copyfileobj(f_in, f_out)
# LEGACY! Remove with 2.0
if "themes" in file_path and file_path.endswith(".yaml"):
filename = file_path.split("/")[-1]
base = file_path.split("/themes/")[0]
combined = f"{base}/themes/{filename}"
if os.path.exists(combined):
self.log.info("Removing old theme file %s", combined)
os.remove(combined)
try:
await self.hass.async_add_executor_job(_write_file)
except BaseException as error: # lgtm [py/catch-base-exception] pylint: disable=broad-except
self.log.error("Could not write data to %s - %s", file_path, error)
return False
return os.path.exists(file_path)
async def async_can_update(self) -> int:
"""Helper to calculate the number of repositories we can fetch data for."""
try:
response = await self.async_github_api_method(self.githubapi.rate_limit)
if ((limit := response.data.resources.core.remaining or 0) - 1000) >= 10:
return math.floor((limit - 1000) / 10)
reset = dt.as_local(dt.utc_from_timestamp(response.data.resources.core.reset))
self.log.info(
"GitHub API ratelimited - %s remaining (%s)",
response.data.resources.core.remaining,
f"{reset.hour}:{reset.minute}:{reset.second}",
)
self.disable_hacs(HacsDisabledReason.RATE_LIMIT)
except BaseException as exception: # lgtm [py/catch-base-exception] pylint: disable=broad-except
self.log.exception(exception)
return 0
async def async_github_get_hacs_default_file(self, filename: str) -> list:
"""Get the content of a default file."""
response = await self.async_github_api_method(
method=self.githubapi.repos.contents.get,
repository=HacsGitHubRepo.DEFAULT,
path=filename,
)
if response is None:
return []
return json_loads(decode_content(response.data.content))
async def async_github_api_method(
self,
method: Callable[[], Awaitable[TV]],
*args,
raise_exception: bool = True,
**kwargs,
) -> TV | None:
"""Call a GitHub API method"""
_exception = None
try:
return await method(*args, **kwargs)
except GitHubAuthenticationException as exception:
self.disable_hacs(HacsDisabledReason.INVALID_TOKEN)
_exception = exception
except GitHubRatelimitException as exception:
self.disable_hacs(HacsDisabledReason.RATE_LIMIT)
_exception = exception
except GitHubNotModifiedException as exception:
raise exception
except GitHubException as exception:
_exception = exception
except BaseException as exception: # lgtm [py/catch-base-exception] pylint: disable=broad-except
self.log.exception(exception)
_exception = exception
if raise_exception and _exception is not None:
raise HacsException(_exception)
return None
async def async_register_repository(
self,
repository_full_name: str,
category: HacsCategory,
*,
check: bool = True,
ref: str | None = None,
repository_id: str | None = None,
default: bool = False,
) -> None:
"""Register a repository."""
if repository_full_name in self.common.skip:
if repository_full_name != HacsGitHubRepo.INTEGRATION:
raise HacsExpectedException(f"Skipping {repository_full_name}")
if repository_full_name == "home-assistant/core":
raise HomeAssistantCoreRepositoryException()
if repository_full_name == "home-assistant/addons" or repository_full_name.startswith(
"hassio-addons/"
):
raise AddonRepositoryException()
if category not in RERPOSITORY_CLASSES:
raise HacsException(f"{category} is not a valid repository category.")
if (renamed := self.common.renamed_repositories.get(repository_full_name)) is not None:
repository_full_name = renamed
repository: HacsRepository = RERPOSITORY_CLASSES[category](self, repository_full_name)
if check:
try:
await repository.async_registration(ref)
if self.status.new:
repository.data.new = False
if repository.validate.errors:
self.common.skip.append(repository.data.full_name)
if not self.status.startup:
self.log.error("Validation for %s failed.", repository_full_name)
if self.system.action:
raise HacsException(
f"::error:: Validation for {repository_full_name} failed."
)
return repository.validate.errors
if self.system.action:
repository.logger.info("%s Validation completed", repository.string)
else:
repository.logger.info("%s Registration completed", repository.string)
except (HacsRepositoryExistException, HacsRepositoryArchivedException):
return
except AIOGitHubAPIException as exception:
self.common.skip.append(repository.data.full_name)
raise HacsException(
f"Validation for {repository_full_name} failed with {exception}."
) from exception
if repository_id is not None:
repository.data.id = repository_id
else:
if self.hass is not None and ((check and repository.data.new) or self.status.new):
self.async_dispatch(
HacsDispatchEvent.REPOSITORY,
{
"action": "registration",
"repository": repository.data.full_name,
"repository_id": repository.data.id,
},
)
self.repositories.register(repository, default)
async def startup_tasks(self, _=None) -> None:
"""Tasks that are started after setup."""
self.set_stage(HacsStage.STARTUP)
try:
repository = self.repositories.get_by_full_name(HacsGitHubRepo.INTEGRATION)
if repository is None:
await self.async_register_repository(
repository_full_name=HacsGitHubRepo.INTEGRATION,
category=HacsCategory.INTEGRATION,
default=True,
)
repository = self.repositories.get_by_full_name(HacsGitHubRepo.INTEGRATION)
if repository is None:
raise HacsException("Unknown error")
repository.data.installed = True
repository.data.installed_version = self.integration.version.string
repository.data.new = False
repository.data.releases = True
self.repository = repository.repository_object
self.repositories.mark_default(repository)
except HacsException as exception:
if "403" in str(exception):
self.log.critical(
"GitHub API is ratelimited, or the token is wrong.",
)
else:
self.log.critical("Could not load HACS! - %s", exception)
self.disable_hacs(HacsDisabledReason.LOAD_HACS)
if critical := await async_load_from_store(self.hass, "critical"):
for repo in critical:
if not repo["acknowledged"]:
self.log.critical("URGENT!: Check the HACS panel!")
self.hass.components.persistent_notification.create(
title="URGENT!", message="**Check the HACS panel!**"
)
break
self.recuring_tasks.append(
self.hass.helpers.event.async_track_time_interval(
self.async_get_all_category_repositories, timedelta(hours=3)
)
)
self.recuring_tasks.append(
self.hass.helpers.event.async_track_time_interval(
self.async_update_all_repositories, timedelta(hours=25)
)
)
self.recuring_tasks.append(
self.hass.helpers.event.async_track_time_interval(
self.async_check_rate_limit, timedelta(minutes=5)
)
)
self.recuring_tasks.append(
self.hass.helpers.event.async_track_time_interval(
self.async_prosess_queue, timedelta(minutes=10)
)
)
self.recuring_tasks.append(
self.hass.helpers.event.async_track_time_interval(
self.async_update_downloaded_repositories, timedelta(hours=2)
)
)
self.recuring_tasks.append(
self.hass.helpers.event.async_track_time_interval(
self.async_handle_critical_repositories, timedelta(hours=2)
)
)
self.hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_FINAL_WRITE, self.data.async_force_write
)
self.status.startup = False
self.async_dispatch(HacsDispatchEvent.STATUS, {})
await self.async_handle_removed_repositories()
await self.async_get_all_category_repositories()
await self.async_update_downloaded_repositories()
self.set_stage(HacsStage.RUNNING)
self.async_dispatch(HacsDispatchEvent.RELOAD, {"force": True})
await self.async_handle_critical_repositories()
await self.async_prosess_queue()
self.async_dispatch(HacsDispatchEvent.STATUS, {})
async def async_download_file(self, url: str, *, headers: dict | None = None) -> bytes | None:
"""Download files, and return the content."""
if url is None:
return None
if "tags/" in url:
url = url.replace("tags/", "")
self.log.debug("Downloading %s", url)
timeouts = 0
while timeouts < 5:
try:
request = await self.session.get(
url=url,
timeout=ClientTimeout(total=60),
headers=headers,
)
# Make sure that we got a valid result
if request.status == 200:
return await request.read()
raise HacsException(
f"Got status code {request.status} when trying to download {url}"
)
except asyncio.TimeoutError:
self.log.warning(
"A timeout of 60! seconds was encountered while downloading %s, "
"using over 60 seconds to download a single file is not normal. "
"This is not a problem with HACS but how your host communicates with GitHub. "
"Retrying up to 5 times to mask/hide your host/network problems to "
"stop the flow of issues opened about it. "
"Tries left %s",
url,
(4 - timeouts),
)
timeouts += 1
await asyncio.sleep(1)
continue
except BaseException as exception: # lgtm [py/catch-base-exception] pylint: disable=broad-except
self.log.exception("Download failed - %s", exception)
return None
async def async_recreate_entities(self) -> None:
"""Recreate entities."""
if self.configuration == ConfigurationType.YAML or not self.configuration.experimental:
return
platforms = [Platform.SENSOR, Platform.UPDATE]
await self.hass.config_entries.async_unload_platforms(
entry=self.configuration.config_entry,
platforms=platforms,
)
self.hass.config_entries.async_setup_platforms(self.configuration.config_entry, platforms)
@callback
def async_dispatch(self, signal: HacsDispatchEvent, data: dict | None = None) -> None:
"""Dispatch a signal with data."""
async_dispatcher_send(self.hass, signal, data)
def set_active_categories(self) -> None:
"""Set the active categories."""
self.common.categories = set()
for category in (HacsCategory.INTEGRATION, HacsCategory.PLUGIN):
self.enable_hacs_category(HacsCategory(category))
if HacsCategory.PYTHON_SCRIPT in self.hass.config.components:
self.enable_hacs_category(HacsCategory.PYTHON_SCRIPT)
if self.hass.services.has_service("frontend", "reload_themes"):
self.enable_hacs_category(HacsCategory.THEME)
if self.configuration.appdaemon:
self.enable_hacs_category(HacsCategory.APPDAEMON)
if self.configuration.netdaemon:
self.enable_hacs_category(HacsCategory.NETDAEMON)
async def async_get_all_category_repositories(self, _=None) -> None:
"""Get all category repositories."""
if self.system.disabled:
return
self.log.info("Loading known repositories")
await asyncio.gather(
*[
self.async_get_category_repositories(HacsCategory(category))
for category in self.common.categories or []
]
)
async def async_get_category_repositories(self, category: HacsCategory) -> None:
"""Get repositories from category."""
if self.system.disabled:
return
try:
repositories = await self.async_github_get_hacs_default_file(category)
except HacsException:
return
for repo in repositories:
if self.common.renamed_repositories.get(repo):
repo = self.common.renamed_repositories[repo]
if self.repositories.is_removed(repo):
continue
if repo in self.common.archived_repositories:
continue
repository = self.repositories.get_by_full_name(repo)
if repository is not None:
self.repositories.mark_default(repository)
if self.status.new and self.configuration.dev:
# Force update for new installations
self.queue.add(repository.common_update())
continue
self.queue.add(
self.async_register_repository(
repository_full_name=repo,
category=category,
default=True,
)
)
async def async_update_all_repositories(self, _=None) -> None:
"""Update all repositories."""
if self.system.disabled:
return
self.log.debug("Starting recurring background task for all repositories")
for repository in self.repositories.list_all:
if repository.data.category in self.common.categories:
self.queue.add(repository.common_update())
self.async_dispatch(HacsDispatchEvent.REPOSITORY, {"action": "reload"})
self.log.debug("Recurring background task for all repositories done")
async def async_check_rate_limit(self, _=None) -> None:
"""Check rate limit."""
if not self.system.disabled or self.system.disabled_reason != HacsDisabledReason.RATE_LIMIT:
return
self.log.debug("Checking if ratelimit has lifted")
can_update = await self.async_can_update()
self.log.debug("Ratelimit indicate we can update %s", can_update)
if can_update > 0:
self.enable_hacs()
await self.async_prosess_queue()
async def async_prosess_queue(self, _=None) -> None:
"""Process the queue."""
if self.system.disabled:
self.log.debug("HACS is disabled")
return
if not self.queue.has_pending_tasks:
self.log.debug("Nothing in the queue")
return
if self.queue.running:
self.log.debug("Queue is already running")
return
async def _handle_queue():
if not self.queue.has_pending_tasks:
await self.data.async_write()
return
can_update = await self.async_can_update()
self.log.debug(
"Can update %s repositories, " "items in queue %s",
can_update,
self.queue.pending_tasks,
)
if can_update != 0:
try:
await self.queue.execute(can_update)
except HacsExecutionStillInProgress:
return
await _handle_queue()
await _handle_queue()
async def async_handle_removed_repositories(self, _=None) -> None:
"""Handle removed repositories."""
if self.system.disabled:
return
need_to_save = False
self.log.info("Loading removed repositories")
try:
removed_repositories = await self.async_github_get_hacs_default_file(
HacsCategory.REMOVED
)
except HacsException:
return
for item in removed_repositories:
removed = self.repositories.removed_repository(item["repository"])
removed.update_data(item)
for removed in self.repositories.list_removed:
if (repository := self.repositories.get_by_full_name(removed.repository)) is None:
continue
if repository.data.full_name in self.common.ignored_repositories:
continue
if repository.data.installed:
if removed.removal_type != "critical":
if self.configuration.experimental:
async_create_issue(
hass=self.hass,
domain=DOMAIN,
issue_id=f"removed_{repository.data.id}",
is_fixable=False,
issue_domain=DOMAIN,
severity=IssueSeverity.WARNING,
translation_key="removed",
translation_placeholders={
"name": repository.data.full_name,
"reason": removed.reason,
"repositry_id": repository.data.id,
},
)
self.log.warning(
"You have '%s' installed with HACS "
"this repository has been removed from HACS, please consider removing it. "
"Removal reason (%s)",
repository.data.full_name,
removed.reason,
)
else:
need_to_save = True
repository.remove()
if need_to_save:
await self.data.async_write()
async def async_update_downloaded_repositories(self, _=None) -> None:
"""Execute the task."""
if self.system.disabled:
return
self.log.info("Starting recurring background task for downloaded repositories")
for repository in self.repositories.list_downloaded:
if repository.data.category in self.common.categories:
self.queue.add(repository.update_repository(ignore_issues=True))
self.log.debug("Recurring background task for downloaded repositories done")
async def async_handle_critical_repositories(self, _=None) -> None:
"""Handle critical repositories."""
critical_queue = QueueManager(hass=self.hass)
instored = []
critical = []
was_installed = False
try:
critical = await self.async_github_get_hacs_default_file("critical")
except GitHubNotModifiedException:
return
except HacsException:
pass
if not critical:
self.log.debug("No critical repositories")
return
stored_critical = await async_load_from_store(self.hass, "critical")
for stored in stored_critical or []:
instored.append(stored["repository"])
stored_critical = []
for repository in critical:
removed_repo = self.repositories.removed_repository(repository["repository"])
removed_repo.removal_type = "critical"
repo = self.repositories.get_by_full_name(repository["repository"])
stored = {
"repository": repository["repository"],
"reason": repository["reason"],
"link": repository["link"],
"acknowledged": True,
}
if repository["repository"] not in instored:
if repo is not None and repo.data.installed:
self.log.critical(
"Removing repository %s, it is marked as critical",
repository["repository"],
)
was_installed = True
stored["acknowledged"] = False
# Remove from HACS
critical_queue.add(repo.uninstall())
repo.remove()
stored_critical.append(stored)
removed_repo.update_data(stored)
# Uninstall
await critical_queue.execute()
# Save to FS
await async_save_to_store(self.hass, "critical", stored_critical)
# Restart HASS
if was_installed:
self.log.critical("Restarting Home Assistant")
self.hass.async_create_task(self.hass.async_stop(100))

View File

@@ -0,0 +1,182 @@
"""Adds config flow for HACS."""
from aiogithubapi import GitHubDeviceAPI, GitHubException
from aiogithubapi.common.const import OAUTH_USER_LOGIN
from awesomeversion import AwesomeVersion
from homeassistant import config_entries
from homeassistant.const import __version__ as HAVERSION
from homeassistant.core import callback
from homeassistant.helpers import aiohttp_client
from homeassistant.helpers.event import async_call_later
from homeassistant.loader import async_get_integration
import voluptuous as vol
from .base import HacsBase
from .const import CLIENT_ID, DOMAIN, MINIMUM_HA_VERSION
from .enums import ConfigurationType
from .utils.configuration_schema import RELEASE_LIMIT, hacs_config_option_schema
from .utils.logger import LOGGER
class HacsFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Config flow for HACS."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
def __init__(self):
"""Initialize."""
self._errors = {}
self.device = None
self.activation = None
self.log = LOGGER
self._progress_task = None
self._login_device = None
self._reauth = False
async def async_step_user(self, user_input):
"""Handle a flow initialized by the user."""
self._errors = {}
if self._async_current_entries():
return self.async_abort(reason="single_instance_allowed")
if self.hass.data.get(DOMAIN):
return self.async_abort(reason="single_instance_allowed")
if user_input:
if [x for x in user_input if not user_input[x]]:
self._errors["base"] = "acc"
return await self._show_config_form(user_input)
return await self.async_step_device(user_input)
## Initial form
return await self._show_config_form(user_input)
async def async_step_device(self, _user_input):
"""Handle device steps"""
async def _wait_for_activation(_=None):
if self._login_device is None or self._login_device.expires_in is None:
async_call_later(self.hass, 1, _wait_for_activation)
return
response = await self.device.activation(device_code=self._login_device.device_code)
self.activation = response.data
self.hass.async_create_task(
self.hass.config_entries.flow.async_configure(flow_id=self.flow_id)
)
if not self.activation:
integration = await async_get_integration(self.hass, DOMAIN)
if not self.device:
self.device = GitHubDeviceAPI(
client_id=CLIENT_ID,
session=aiohttp_client.async_get_clientsession(self.hass),
**{"client_name": f"HACS/{integration.version}"},
)
async_call_later(self.hass, 1, _wait_for_activation)
try:
response = await self.device.register()
self._login_device = response.data
return self.async_show_progress(
step_id="device",
progress_action="wait_for_device",
description_placeholders={
"url": OAUTH_USER_LOGIN,
"code": self._login_device.user_code,
},
)
except GitHubException as exception:
self.log.error(exception)
return self.async_abort(reason="github")
return self.async_show_progress_done(next_step_id="device_done")
async def _show_config_form(self, user_input):
"""Show the configuration form to edit location data."""
if not user_input:
user_input = {}
if AwesomeVersion(HAVERSION) < MINIMUM_HA_VERSION:
return self.async_abort(
reason="min_ha_version",
description_placeholders={"version": MINIMUM_HA_VERSION},
)
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required("acc_logs", default=user_input.get("acc_logs", False)): bool,
vol.Required("acc_addons", default=user_input.get("acc_addons", False)): bool,
vol.Required(
"acc_untested", default=user_input.get("acc_untested", False)
): bool,
vol.Required("acc_disable", default=user_input.get("acc_disable", False)): bool,
}
),
errors=self._errors,
)
async def async_step_device_done(self, _user_input):
"""Handle device steps"""
if self._reauth:
existing_entry = self.hass.config_entries.async_get_entry(self.context["entry_id"])
self.hass.config_entries.async_update_entry(
existing_entry, data={"token": self.activation.access_token}
)
await self.hass.config_entries.async_reload(existing_entry.entry_id)
return self.async_abort(reason="reauth_successful")
return self.async_create_entry(title="", data={"token": self.activation.access_token})
async def async_step_reauth(self, user_input=None):
"""Perform reauth upon an API authentication error."""
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(self, user_input=None):
"""Dialog that informs the user that reauth is required."""
if user_input is None:
return self.async_show_form(
step_id="reauth_confirm",
data_schema=vol.Schema({}),
)
self._reauth = True
return await self.async_step_device(None)
@staticmethod
@callback
def async_get_options_flow(config_entry):
return HacsOptionsFlowHandler(config_entry)
class HacsOptionsFlowHandler(config_entries.OptionsFlow):
"""HACS config flow options handler."""
def __init__(self, config_entry):
"""Initialize HACS options flow."""
self.config_entry = config_entry
async def async_step_init(self, _user_input=None):
"""Manage the options."""
return await self.async_step_user()
async def async_step_user(self, user_input=None):
"""Handle a flow initialized by the user."""
hacs: HacsBase = self.hass.data.get(DOMAIN)
if user_input is not None:
limit = int(user_input.get(RELEASE_LIMIT, 5))
if limit <= 0 or limit > 100:
return self.async_abort(reason="release_limit_value")
return self.async_create_entry(title="", data=user_input)
if hacs is None or hacs.configuration is None:
return self.async_abort(reason="not_setup")
if hacs.configuration.config_type == ConfigurationType.YAML:
schema = {vol.Optional("not_in_use", default=""): str}
else:
schema = hacs_config_option_schema(self.config_entry.options)
del schema["frontend_repo"]
del schema["frontend_repo_url"]
return self.async_show_form(step_id="user", data_schema=vol.Schema(schema))

View File

@@ -0,0 +1,289 @@
"""Constants for HACS"""
from typing import TypeVar
from aiogithubapi.common.const import ACCEPT_HEADERS
NAME_SHORT = "HACS"
DOMAIN = "hacs"
CLIENT_ID = "395a8e669c5de9f7c6e8"
MINIMUM_HA_VERSION = "2022.10.0"
TV = TypeVar("TV")
PACKAGE_NAME = "custom_components.hacs"
DEFAULT_CONCURRENT_TASKS = 15
DEFAULT_CONCURRENT_BACKOFF_TIME = 1
HACS_ACTION_GITHUB_API_HEADERS = {
"User-Agent": "HACS/action",
"Accept": ACCEPT_HEADERS["preview"],
}
VERSION_STORAGE = "6"
STORENAME = "hacs"
HACS_SYSTEM_ID = "0717a0cd-745c-48fd-9b16-c8534c9704f9-bc944b0f-fd42-4a58-a072-ade38d1444cd"
STARTUP = """
-------------------------------------------------------------------
HACS (Home Assistant Community Store)
Version: %s
This is a custom integration
If you have any issues with this you need to open an issue here:
https://github.com/hacs/integration/issues
-------------------------------------------------------------------
"""
LOCALE = [
"ALL",
"AF",
"AL",
"DZ",
"AS",
"AD",
"AO",
"AI",
"AQ",
"AG",
"AR",
"AM",
"AW",
"AU",
"AT",
"AZ",
"BS",
"BH",
"BD",
"BB",
"BY",
"BE",
"BZ",
"BJ",
"BM",
"BT",
"BO",
"BQ",
"BA",
"BW",
"BV",
"BR",
"IO",
"BN",
"BG",
"BF",
"BI",
"KH",
"CM",
"CA",
"CV",
"KY",
"CF",
"TD",
"CL",
"CN",
"CX",
"CC",
"CO",
"KM",
"CG",
"CD",
"CK",
"CR",
"HR",
"CU",
"CW",
"CY",
"CZ",
"CI",
"DK",
"DJ",
"DM",
"DO",
"EC",
"EG",
"SV",
"GQ",
"ER",
"EE",
"ET",
"FK",
"FO",
"FJ",
"FI",
"FR",
"GF",
"PF",
"TF",
"GA",
"GM",
"GE",
"DE",
"GH",
"GI",
"GR",
"GL",
"GD",
"GP",
"GU",
"GT",
"GG",
"GN",
"GW",
"GY",
"HT",
"HM",
"VA",
"HN",
"HK",
"HU",
"IS",
"IN",
"ID",
"IR",
"IQ",
"IE",
"IM",
"IL",
"IT",
"JM",
"JP",
"JE",
"JO",
"KZ",
"KE",
"KI",
"KP",
"KR",
"KW",
"KG",
"LA",
"LV",
"LB",
"LS",
"LR",
"LY",
"LI",
"LT",
"LU",
"MO",
"MK",
"MG",
"MW",
"MY",
"MV",
"ML",
"MT",
"MH",
"MQ",
"MR",
"MU",
"YT",
"MX",
"FM",
"MD",
"MC",
"MN",
"ME",
"MS",
"MA",
"MZ",
"MM",
"NA",
"NR",
"NP",
"NL",
"NC",
"NZ",
"NI",
"NE",
"NG",
"NU",
"NF",
"MP",
"NO",
"OM",
"PK",
"PW",
"PS",
"PA",
"PG",
"PY",
"PE",
"PH",
"PN",
"PL",
"PT",
"PR",
"QA",
"RO",
"RU",
"RW",
"RE",
"BL",
"SH",
"KN",
"LC",
"MF",
"PM",
"VC",
"WS",
"SM",
"ST",
"SA",
"SN",
"RS",
"SC",
"SL",
"SG",
"SX",
"SK",
"SI",
"SB",
"SO",
"ZA",
"GS",
"SS",
"ES",
"LK",
"SD",
"SR",
"SJ",
"SZ",
"SE",
"CH",
"SY",
"TW",
"TJ",
"TZ",
"TH",
"TL",
"TG",
"TK",
"TO",
"TT",
"TN",
"TR",
"TM",
"TC",
"TV",
"UG",
"UA",
"AE",
"GB",
"US",
"UM",
"UY",
"UZ",
"VU",
"VE",
"VN",
"VG",
"VI",
"WF",
"EH",
"YE",
"ZM",
"ZW",
]

View File

@@ -0,0 +1,82 @@
"""Diagnostics support for HACS."""
from __future__ import annotations
from typing import Any
from aiogithubapi import GitHubException
from homeassistant.components.diagnostics import async_redact_data
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from .base import HacsBase
from .const import DOMAIN
from .utils.configuration_schema import TOKEN
async def async_get_config_entry_diagnostics(
hass: HomeAssistant,
entry: ConfigEntry,
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
hacs: HacsBase = hass.data[DOMAIN]
data = {
"entry": entry.as_dict(),
"hacs": {
"stage": hacs.stage,
"version": hacs.version,
"disabled_reason": hacs.system.disabled_reason,
"new": hacs.status.new,
"startup": hacs.status.startup,
"categories": hacs.common.categories,
"renamed_repositories": hacs.common.renamed_repositories,
"archived_repositories": hacs.common.archived_repositories,
"ignored_repositories": hacs.common.ignored_repositories,
"lovelace_mode": hacs.core.lovelace_mode,
"configuration": {},
},
"custom_repositories": [
repo.data.full_name
for repo in hacs.repositories.list_all
if not hacs.repositories.is_default(str(repo.data.id))
],
"repositories": [],
}
for key in (
"appdaemon",
"country",
"debug",
"dev",
"experimental",
"netdaemon",
"python_script",
"release_limit",
"theme",
):
data["hacs"]["configuration"][key] = getattr(hacs.configuration, key, None)
for repository in hacs.repositories.list_downloaded:
data["repositories"].append(
{
"data": repository.data.to_json(),
"integration_manifest": repository.integration_manifest,
"repository_manifest": repository.repository_manifest.to_dict(),
"ref": repository.ref,
"paths": {
"localpath": repository.localpath.replace(hacs.core.config_path, "/config"),
"local": repository.content.path.local.replace(
hacs.core.config_path, "/config"
),
"remote": repository.content.path.remote,
},
}
)
try:
rate_limit_response = await hacs.githubapi.rate_limit()
data["rate_limit"] = rate_limit_response.data.as_dict
except GitHubException as exception:
data["rate_limit"] = str(exception)
return async_redact_data(data, (TOKEN,))

View File

@@ -0,0 +1,119 @@
"""HACS Base entities."""
from __future__ import annotations
from typing import TYPE_CHECKING, Any
from homeassistant.core import callback
from homeassistant.helpers.device_registry import DeviceEntryType
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity
from .const import DOMAIN, HACS_SYSTEM_ID, NAME_SHORT
from .enums import HacsDispatchEvent, HacsGitHubRepo
if TYPE_CHECKING:
from .base import HacsBase
from .repositories.base import HacsRepository
def system_info(hacs: HacsBase) -> dict:
"""Return system info."""
return {
"identifiers": {(DOMAIN, HACS_SYSTEM_ID)},
"name": NAME_SHORT,
"manufacturer": "hacs.xyz",
"model": "",
"sw_version": str(hacs.version),
"configuration_url": "homeassistant://hacs",
"entry_type": DeviceEntryType.SERVICE,
}
class HacsBaseEntity(Entity):
"""Base HACS entity."""
repository: HacsRepository | None = None
_attr_should_poll = False
def __init__(self, hacs: HacsBase) -> None:
"""Initialize."""
self.hacs = hacs
async def async_added_to_hass(self) -> None:
"""Register for status events."""
self.async_on_remove(
async_dispatcher_connect(
self.hass,
HacsDispatchEvent.REPOSITORY,
self._update_and_write_state,
)
)
@callback
def _update(self) -> None:
"""Update the sensor."""
async def async_update(self) -> None:
"""Manual updates of the sensor."""
self._update()
@callback
def _update_and_write_state(self, _: Any) -> None:
"""Update the entity and write state."""
self._update()
self.async_write_ha_state()
class HacsSystemEntity(HacsBaseEntity):
"""Base system entity."""
_attr_icon = "hacs:hacs"
_attr_unique_id = HACS_SYSTEM_ID
@property
def device_info(self) -> dict[str, any]:
"""Return device information about HACS."""
return system_info(self.hacs)
class HacsRepositoryEntity(HacsBaseEntity):
"""Base repository entity."""
def __init__(
self,
hacs: HacsBase,
repository: HacsRepository,
) -> None:
"""Initialize."""
super().__init__(hacs=hacs)
self.repository = repository
self._attr_unique_id = str(repository.data.id)
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self.hacs.repositories.is_downloaded(repository_id=str(self.repository.data.id))
@property
def device_info(self) -> dict[str, any]:
"""Return device information about HACS."""
if self.repository.data.full_name == HacsGitHubRepo.INTEGRATION:
return system_info(self.hacs)
return {
"identifiers": {(DOMAIN, str(self.repository.data.id))},
"name": self.repository.display_name,
"model": self.repository.data.category,
"manufacturer": ", ".join(
author.replace("@", "") for author in self.repository.data.authors
),
"configuration_url": "homeassistant://hacs",
"entry_type": DeviceEntryType.SERVICE,
}
@callback
def _update_and_write_state(self, data: dict) -> None:
"""Update the entity and write state."""
if data.get("repository_id") == self.repository.data.id:
self._update()
self.async_write_ha_state()

View File

@@ -0,0 +1,89 @@
"""Helper constants."""
# pylint: disable=missing-class-docstring
import sys
if sys.version_info.minor >= 11:
# Needs Python 3.11
from enum import StrEnum ## pylint: disable=no-name-in-module
else:
try:
# https://github.com/home-assistant/core/blob/dev/homeassistant/backports/enum.py
# Considered internal to Home Assistant, can be removed whenever.
from homeassistant.backports.enum import StrEnum
except ImportError:
from enum import Enum
class StrEnum(str, Enum):
pass
class HacsGitHubRepo(StrEnum):
"""HacsGitHubRepo."""
DEFAULT = "hacs/default"
INTEGRATION = "hacs/integration"
class HacsCategory(StrEnum):
APPDAEMON = "appdaemon"
INTEGRATION = "integration"
LOVELACE = "lovelace"
PLUGIN = "plugin" # Kept for legacy purposes
NETDAEMON = "netdaemon"
PYTHON_SCRIPT = "python_script"
THEME = "theme"
REMOVED = "removed"
def __str__(self):
return str(self.value)
class HacsDispatchEvent(StrEnum):
"""HacsDispatchEvent."""
CONFIG = "hacs_dispatch_config"
ERROR = "hacs_dispatch_error"
RELOAD = "hacs_dispatch_reload"
REPOSITORY = "hacs_dispatch_repository"
REPOSITORY_DOWNLOAD_PROGRESS = "hacs_dispatch_repository_download_progress"
STAGE = "hacs_dispatch_stage"
STARTUP = "hacs_dispatch_startup"
STATUS = "hacs_dispatch_status"
class RepositoryFile(StrEnum):
"""Repository file names."""
HACS_JSON = "hacs.json"
MAINIFEST_JSON = "manifest.json"
class ConfigurationType(StrEnum):
YAML = "yaml"
CONFIG_ENTRY = "config_entry"
class LovelaceMode(StrEnum):
"""Lovelace Modes."""
STORAGE = "storage"
AUTO = "auto"
AUTO_GEN = "auto-gen"
YAML = "yaml"
class HacsStage(StrEnum):
SETUP = "setup"
STARTUP = "startup"
WAITING = "waiting"
RUNNING = "running"
BACKGROUND = "background"
class HacsDisabledReason(StrEnum):
RATE_LIMIT = "rate_limit"
REMOVED = "removed"
INVALID_TOKEN = "invalid_token"
CONSTRAINS = "constrains"
LOAD_HACS = "load_hacs"
RESTORE = "restore"

View File

@@ -0,0 +1,49 @@
"""Custom Exceptions for HACS."""
class HacsException(Exception):
"""Super basic."""
class HacsRepositoryArchivedException(HacsException):
"""For repositories that are archived."""
class HacsNotModifiedException(HacsException):
"""For responses that are not modified."""
class HacsExpectedException(HacsException):
"""For stuff that are expected."""
class HacsRepositoryExistException(HacsException):
"""For repositories that are already exist."""
class HacsExecutionStillInProgress(HacsException):
"""Exception to raise if execution is still in progress."""
class AddonRepositoryException(HacsException):
"""Exception to raise when user tries to add add-on repository."""
exception_message = (
"The repository does not seem to be a integration, "
"but an add-on repository. HACS does not manage add-ons."
)
def __init__(self) -> None:
super().__init__(self.exception_message)
class HomeAssistantCoreRepositoryException(HacsException):
"""Exception to raise when user tries to add the home-assistant/core repository."""
exception_message = (
"You can not add homeassistant/core, to use core integrations "
"check the Home Assistant documentation for how to add them."
)
def __init__(self) -> None:
super().__init__(self.exception_message)

View File

@@ -0,0 +1,107 @@
""""Starting setup task: Frontend"."""
from __future__ import annotations
from typing import TYPE_CHECKING
from aiohttp import web
from homeassistant.components.http import HomeAssistantView
from homeassistant.core import HomeAssistant, callback
from .const import DOMAIN
from .hacs_frontend import locate_dir, VERSION as FE_VERSION
from .hacs_frontend_experimental import (
locate_dir as experimental_locate_dir,
VERSION as EXPERIMENTAL_FE_VERSION,
)
URL_BASE = "/hacsfiles"
if TYPE_CHECKING:
from .base import HacsBase
@callback
def async_register_frontend(hass: HomeAssistant, hacs: HacsBase) -> None:
"""Register the frontend."""
# Register themes
hass.http.register_static_path(f"{URL_BASE}/themes", hass.config.path("themes"))
# Register frontend
if hacs.configuration.frontend_repo_url:
hacs.log.warning(
"<HacsFrontend> Frontend development mode enabled. Do not run in production!"
)
hass.http.register_view(HacsFrontendDev())
elif hacs.configuration.experimental:
hacs.log.info("<HacsFrontend> Using experimental frontend")
hass.http.register_static_path(
f"{URL_BASE}/frontend", experimental_locate_dir(), cache_headers=False
)
else:
#
hass.http.register_static_path(f"{URL_BASE}/frontend", locate_dir(), cache_headers=False)
# Custom iconset
hass.http.register_static_path(
f"{URL_BASE}/iconset.js", str(hacs.integration_dir / "iconset.js")
)
if "frontend_extra_module_url" not in hass.data:
hass.data["frontend_extra_module_url"] = set()
hass.data["frontend_extra_module_url"].add(f"{URL_BASE}/iconset.js")
# Register www/community for all other files
use_cache = hacs.core.lovelace_mode == "storage"
hacs.log.info(
"<HacsFrontend> %s mode, cache for /hacsfiles/: %s",
hacs.core.lovelace_mode,
use_cache,
)
hass.http.register_static_path(
URL_BASE,
hass.config.path("www/community"),
cache_headers=use_cache,
)
hacs.frontend_version = (
FE_VERSION if not hacs.configuration.experimental else EXPERIMENTAL_FE_VERSION
)
# Add to sidepanel if needed
if DOMAIN not in hass.data.get("frontend_panels", {}):
hass.components.frontend.async_register_built_in_panel(
component_name="custom",
sidebar_title=hacs.configuration.sidepanel_title,
sidebar_icon=hacs.configuration.sidepanel_icon,
frontend_url_path=DOMAIN,
config={
"_panel_custom": {
"name": "hacs-frontend",
"embed_iframe": True,
"trust_external": False,
"js_url": f"/hacsfiles/frontend/entrypoint.js?hacstag={hacs.frontend_version}",
}
},
require_admin=True,
)
class HacsFrontendDev(HomeAssistantView):
"""Dev View Class for HACS."""
requires_auth = False
name = "hacs_files:frontend"
url = r"/hacsfiles/frontend/{requested_file:.+}"
async def get(self, request, requested_file): # pylint: disable=unused-argument
"""Handle HACS Web requests."""
hacs: HacsBase = request.app["hass"].data.get(DOMAIN)
requested = requested_file.split("/")[-1]
request = await hacs.session.get(f"{hacs.configuration.frontend_repo_url}/{requested}")
if request.status == 200:
result = await request.read()
response = web.Response(body=result)
response.headers["Content-Type"] = "application/javascript"
return response

View File

@@ -0,0 +1,5 @@
"""HACS Frontend"""
from .version import VERSION
def locate_dir():
return __path__[0]

File diff suppressed because one or more lines are too long

Binary file not shown.

View File

@@ -0,0 +1,23 @@
import{a as t,r as i,n as a}from"./main-ad130be7.js";import{L as n,s}from"./c.82eccc94.js";let r=t([a("ha-list-item")],(function(t,a){return{F:class extends a{constructor(...i){super(...i),t(this)}},d:[{kind:"get",static:!0,key:"styles",value:function(){return[s,i`
:host {
padding-left: var(--mdc-list-side-padding, 20px);
padding-right: var(--mdc-list-side-padding, 20px);
}
:host([graphic="avatar"]:not([twoLine])),
:host([graphic="icon"]:not([twoLine])) {
height: 48px;
}
span.material-icons:first-of-type {
margin-inline-start: 0px !important;
margin-inline-end: var(
--mdc-list-item-graphic-margin,
16px
) !important;
direction: var(--direction);
}
span.material-icons:last-of-type {
margin-inline-start: auto !important;
margin-inline-end: 0px !important;
direction: var(--direction);
}
`]}}]}}),n);const e=t=>`https://brands.home-assistant.io/${t.useFallback?"_/":""}${t.domain}/${t.darkOptimized?"dark_":""}${t.type}.png`,o=t=>t.split("/")[4],p=t=>t.startsWith("https://brands.home-assistant.io/");export{r as H,e as b,o as e,p as i};

Binary file not shown.

View File

@@ -0,0 +1,24 @@
import{a as e,h as t,Y as i,e as n,i as o,$ as r,L as l,N as a,r as d,n as s}from"./main-ad130be7.js";import"./c.9b92f489.js";e([s("ha-button-menu")],(function(e,t){class s extends t{constructor(...t){super(...t),e(this)}}return{F:s,d:[{kind:"field",key:i,value:void 0},{kind:"field",decorators:[n()],key:"corner",value:()=>"TOP_START"},{kind:"field",decorators:[n()],key:"menuCorner",value:()=>"START"},{kind:"field",decorators:[n({type:Number})],key:"x",value:()=>null},{kind:"field",decorators:[n({type:Number})],key:"y",value:()=>null},{kind:"field",decorators:[n({type:Boolean})],key:"multi",value:()=>!1},{kind:"field",decorators:[n({type:Boolean})],key:"activatable",value:()=>!1},{kind:"field",decorators:[n({type:Boolean})],key:"disabled",value:()=>!1},{kind:"field",decorators:[n({type:Boolean})],key:"fixed",value:()=>!1},{kind:"field",decorators:[o("mwc-menu",!0)],key:"_menu",value:void 0},{kind:"get",key:"items",value:function(){var e;return null===(e=this._menu)||void 0===e?void 0:e.items}},{kind:"get",key:"selected",value:function(){var e;return null===(e=this._menu)||void 0===e?void 0:e.selected}},{kind:"method",key:"focus",value:function(){var e,t;null!==(e=this._menu)&&void 0!==e&&e.open?this._menu.focusItemAtIndex(0):null===(t=this._triggerButton)||void 0===t||t.focus()}},{kind:"method",key:"render",value:function(){return r`
<div @click=${this._handleClick}>
<slot name="trigger" @slotchange=${this._setTriggerAria}></slot>
</div>
<mwc-menu
.corner=${this.corner}
.menuCorner=${this.menuCorner}
.fixed=${this.fixed}
.multi=${this.multi}
.activatable=${this.activatable}
.y=${this.y}
.x=${this.x}
>
<slot></slot>
</mwc-menu>
`}},{kind:"method",key:"firstUpdated",value:function(e){l(a(s.prototype),"firstUpdated",this).call(this,e),"rtl"===document.dir&&this.updateComplete.then((()=>{this.querySelectorAll("mwc-list-item").forEach((e=>{const t=document.createElement("style");t.innerHTML="span.material-icons:first-of-type { margin-left: var(--mdc-list-item-graphic-margin, 32px) !important; margin-right: 0px !important;}",e.shadowRoot.appendChild(t)}))}))}},{kind:"method",key:"_handleClick",value:function(){this.disabled||(this._menu.anchor=this,this._menu.show())}},{kind:"get",key:"_triggerButton",value:function(){return this.querySelector('ha-icon-button[slot="trigger"], mwc-button[slot="trigger"]')}},{kind:"method",key:"_setTriggerAria",value:function(){this._triggerButton&&(this._triggerButton.ariaHasPopup="menu")}},{kind:"get",static:!0,key:"styles",value:function(){return d`
:host {
display: inline-block;
position: relative;
}
::slotted([disabled]) {
color: var(--disabled-text-color);
}
`}}]}}),t);

Binary file not shown.

View File

@@ -0,0 +1,390 @@
import{a as e,h as t,e as i,g as a,t as s,$ as o,j as r,R as n,w as l,r as h,n as c,m as d,L as p,N as u,o as v,b as f,aI as b,ai as m,c as k,E as g,aJ as y,aC as w,aK as x,aL as $,d as _,s as R}from"./main-ad130be7.js";import{f as z}from"./c.3243a8b0.js";import{c as j}from"./c.4a97632a.js";import"./c.f1291e50.js";import"./c.2d5ed670.js";import"./c.97b7c4b0.js";import{r as F}from"./c.4204ca09.js";import{i as P}from"./c.21c042d4.js";import{s as I}from"./c.2645c235.js";import"./c.a5f69ed4.js";import"./c.3f859915.js";import"./c.9b92f489.js";import"./c.82eccc94.js";import"./c.8e28b461.js";import"./c.4feb0cb8.js";import"./c.0ca5587f.js";import"./c.5d3ce9d6.js";import"./c.f6611997.js";import"./c.743a15a1.js";import"./c.4266acdb.js";e([c("ha-tab")],(function(e,t){return{F:class extends t{constructor(...t){super(...t),e(this)}},d:[{kind:"field",decorators:[i({type:Boolean,reflect:!0})],key:"active",value:()=>!1},{kind:"field",decorators:[i({type:Boolean,reflect:!0})],key:"narrow",value:()=>!1},{kind:"field",decorators:[i()],key:"name",value:void 0},{kind:"field",decorators:[a("mwc-ripple")],key:"_ripple",value:void 0},{kind:"field",decorators:[s()],key:"_shouldRenderRipple",value:()=>!1},{kind:"method",key:"render",value:function(){return o`
<div
tabindex="0"
role="tab"
aria-selected=${this.active}
aria-label=${r(this.name)}
@focus=${this.handleRippleFocus}
@blur=${this.handleRippleBlur}
@mousedown=${this.handleRippleActivate}
@mouseup=${this.handleRippleDeactivate}
@mouseenter=${this.handleRippleMouseEnter}
@mouseleave=${this.handleRippleMouseLeave}
@touchstart=${this.handleRippleActivate}
@touchend=${this.handleRippleDeactivate}
@touchcancel=${this.handleRippleDeactivate}
@keydown=${this._handleKeyDown}
>
${this.narrow?o`<slot name="icon"></slot>`:""}
<span class="name">${this.name}</span>
${this._shouldRenderRipple?o`<mwc-ripple></mwc-ripple>`:""}
</div>
`}},{kind:"field",key:"_rippleHandlers",value(){return new n((()=>(this._shouldRenderRipple=!0,this._ripple)))}},{kind:"method",key:"_handleKeyDown",value:function(e){13===e.keyCode&&e.target.click()}},{kind:"method",decorators:[l({passive:!0})],key:"handleRippleActivate",value:function(e){this._rippleHandlers.startPress(e)}},{kind:"method",key:"handleRippleDeactivate",value:function(){this._rippleHandlers.endPress()}},{kind:"method",key:"handleRippleMouseEnter",value:function(){this._rippleHandlers.startHover()}},{kind:"method",key:"handleRippleMouseLeave",value:function(){this._rippleHandlers.endHover()}},{kind:"method",key:"handleRippleFocus",value:function(){this._rippleHandlers.startFocus()}},{kind:"method",key:"handleRippleBlur",value:function(){this._rippleHandlers.endFocus()}},{kind:"get",static:!0,key:"styles",value:function(){return h`
div {
padding: 0 32px;
display: flex;
flex-direction: column;
text-align: center;
box-sizing: border-box;
align-items: center;
justify-content: center;
width: 100%;
height: var(--header-height);
cursor: pointer;
position: relative;
outline: none;
}
.name {
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
max-width: 100%;
}
:host([active]) {
color: var(--primary-color);
}
:host(:not([narrow])[active]) div {
border-bottom: 2px solid var(--primary-color);
}
:host([narrow]) {
min-width: 0;
display: flex;
justify-content: center;
overflow: hidden;
}
:host([narrow]) div {
padding: 0 4px;
}
`}}]}}),t),e([c("hass-tabs-subpage")],(function(e,t){class a extends t{constructor(...t){super(...t),e(this)}}return{F:a,d:[{kind:"field",decorators:[i({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[i({type:Boolean})],key:"supervisor",value:()=>!1},{kind:"field",decorators:[i({attribute:!1})],key:"localizeFunc",value:void 0},{kind:"field",decorators:[i({type:String,attribute:"back-path"})],key:"backPath",value:void 0},{kind:"field",decorators:[i()],key:"backCallback",value:void 0},{kind:"field",decorators:[i({type:Boolean,attribute:"main-page"})],key:"mainPage",value:()=>!1},{kind:"field",decorators:[i({attribute:!1})],key:"route",value:void 0},{kind:"field",decorators:[i({attribute:!1})],key:"tabs",value:void 0},{kind:"field",decorators:[i({type:Boolean,reflect:!0})],key:"narrow",value:()=>!1},{kind:"field",decorators:[i({type:Boolean,reflect:!0,attribute:"is-wide"})],key:"isWide",value:()=>!1},{kind:"field",decorators:[i({type:Boolean,reflect:!0})],key:"rtl",value:()=>!1},{kind:"field",decorators:[s()],key:"_activeTab",value:void 0},{kind:"field",decorators:[F(".content")],key:"_savedScrollPos",value:void 0},{kind:"field",key:"_getTabs",value(){return d(((e,t,i,a,s,r,n)=>{const l=e.filter((e=>(!e.component||e.core||P(this.hass,e.component))&&(!e.advancedOnly||i)));if(l.length<2){if(1===l.length){const e=l[0];return[e.translationKey?n(e.translationKey):e.name]}return[""]}return l.map((e=>o`
<a href=${e.path}>
<ha-tab
.hass=${this.hass}
.active=${e.path===(null==t?void 0:t.path)}
.narrow=${this.narrow}
.name=${e.translationKey?n(e.translationKey):e.name}
>
${e.iconPath?o`<ha-svg-icon
slot="icon"
.path=${e.iconPath}
></ha-svg-icon>`:""}
</ha-tab>
</a>
`))}))}},{kind:"method",key:"willUpdate",value:function(e){if(e.has("route")&&(this._activeTab=this.tabs.find((e=>`${this.route.prefix}${this.route.path}`.includes(e.path)))),e.has("hass")){const t=e.get("hass");t&&t.language===this.hass.language||(this.rtl=j(this.hass))}p(u(a.prototype),"willUpdate",this).call(this,e)}},{kind:"method",key:"render",value:function(){var e,t;const i=this._getTabs(this.tabs,this._activeTab,null===(e=this.hass.userData)||void 0===e?void 0:e.showAdvanced,this.hass.config.components,this.hass.language,this.narrow,this.localizeFunc||this.hass.localize),a=i.length>1;return o`
<div class="toolbar">
${this.mainPage||!this.backPath&&null!==(t=history.state)&&void 0!==t&&t.root?o`
<ha-menu-button
.hassio=${this.supervisor}
.hass=${this.hass}
.narrow=${this.narrow}
></ha-menu-button>
`:this.backPath?o`
<a href=${this.backPath}>
<ha-icon-button-arrow-prev
.hass=${this.hass}
></ha-icon-button-arrow-prev>
</a>
`:o`
<ha-icon-button-arrow-prev
.hass=${this.hass}
@click=${this._backTapped}
></ha-icon-button-arrow-prev>
`}
${this.narrow||!a?o`<div class="main-title">
<slot name="header">${a?"":i[0]}</slot>
</div>`:""}
${a?o`
<div id="tabbar" class=${v({"bottom-bar":this.narrow})}>
${i}
</div>
`:""}
<div id="toolbar-icon">
<slot name="toolbar-icon"></slot>
</div>
</div>
<div
class="content ${v({tabs:a})}"
@scroll=${this._saveScrollPos}
>
<slot></slot>
</div>
<div id="fab" class=${v({tabs:a})}>
<slot name="fab"></slot>
</div>
`}},{kind:"method",decorators:[l({passive:!0})],key:"_saveScrollPos",value:function(e){this._savedScrollPos=e.target.scrollTop}},{kind:"method",key:"_backTapped",value:function(){this.backCallback?this.backCallback():history.back()}},{kind:"get",static:!0,key:"styles",value:function(){return h`
:host {
display: block;
height: 100%;
background-color: var(--primary-background-color);
}
:host([narrow]) {
width: 100%;
position: fixed;
}
ha-menu-button {
margin-right: 24px;
}
.toolbar {
display: flex;
align-items: center;
font-size: 20px;
height: var(--header-height);
background-color: var(--sidebar-background-color);
font-weight: 400;
border-bottom: 1px solid var(--divider-color);
padding: 0 16px;
box-sizing: border-box;
}
.toolbar a {
color: var(--sidebar-text-color);
text-decoration: none;
}
.bottom-bar a {
width: 25%;
}
#tabbar {
display: flex;
font-size: 14px;
overflow: hidden;
}
#tabbar > a {
overflow: hidden;
max-width: 45%;
}
#tabbar.bottom-bar {
position: absolute;
bottom: 0;
left: 0;
padding: 0 16px;
box-sizing: border-box;
background-color: var(--sidebar-background-color);
border-top: 1px solid var(--divider-color);
justify-content: space-around;
z-index: 2;
font-size: 12px;
width: 100%;
padding-bottom: env(safe-area-inset-bottom);
}
#tabbar:not(.bottom-bar) {
flex: 1;
justify-content: center;
}
:host(:not([narrow])) #toolbar-icon {
min-width: 40px;
}
ha-menu-button,
ha-icon-button-arrow-prev,
::slotted([slot="toolbar-icon"]) {
display: flex;
flex-shrink: 0;
pointer-events: auto;
color: var(--sidebar-icon-color);
}
.main-title {
flex: 1;
max-height: var(--header-height);
line-height: 20px;
color: var(--sidebar-text-color);
margin: var(--main-title-margin, 0 0 0 24px);
}
.content {
position: relative;
width: calc(
100% - env(safe-area-inset-left) - env(safe-area-inset-right)
);
margin-left: env(safe-area-inset-left);
margin-right: env(safe-area-inset-right);
height: calc(100% - 1px - var(--header-height));
height: calc(
100% - 1px - var(--header-height) - env(safe-area-inset-bottom)
);
overflow: auto;
-webkit-overflow-scrolling: touch;
}
:host([narrow]) .content.tabs {
height: calc(100% - 2 * var(--header-height));
height: calc(
100% - 2 * var(--header-height) - env(safe-area-inset-bottom)
);
}
#fab {
position: fixed;
right: calc(16px + env(safe-area-inset-right));
bottom: calc(16px + env(safe-area-inset-bottom));
z-index: 1;
}
:host([narrow]) #fab.tabs {
bottom: calc(84px + env(safe-area-inset-bottom));
}
#fab[is-wide] {
bottom: 24px;
right: 24px;
}
:host([rtl]) #fab {
right: auto;
left: calc(16px + env(safe-area-inset-left));
}
:host([rtl][is-wide]) #fab {
bottom: 24px;
left: 24px;
right: auto;
}
`}}]}}),t);let E=e([c("hacs-store-panel")],(function(e,t){return{F:class extends t{constructor(...t){super(...t),e(this)}},d:[{kind:"field",decorators:[i({attribute:!1})],key:"filters",value:()=>({})},{kind:"field",decorators:[i({attribute:!1})],key:"hacs",value:void 0},{kind:"field",decorators:[i()],key:"_searchInput",value:()=>""},{kind:"field",decorators:[i({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[i({attribute:!1})],key:"narrow",value:void 0},{kind:"field",decorators:[i({attribute:!1})],key:"isWide",value:void 0},{kind:"field",decorators:[i({attribute:!1})],key:"route",value:void 0},{kind:"field",decorators:[i({attribute:!1})],key:"sections",value:void 0},{kind:"field",decorators:[i()],key:"section",value:void 0},{kind:"field",key:"_repositoriesInActiveSection",value(){return d(((e,t)=>[(null==e?void 0:e.filter((e=>{var i,a,s;return(null===(i=this.hacs.sections)||void 0===i||null===(a=i.find((e=>e.id===t)))||void 0===a||null===(s=a.categories)||void 0===s?void 0:s.includes(e.category))&&e.installed})))||[],(null==e?void 0:e.filter((e=>{var i,a,s;return(null===(i=this.hacs.sections)||void 0===i||null===(a=i.find((e=>e.id===t)))||void 0===a||null===(s=a.categories)||void 0===s?void 0:s.includes(e.category))&&e.new&&!e.installed})))||[]]))}},{kind:"get",key:"allRepositories",value:function(){const[e,t]=this._repositoriesInActiveSection(this.hacs.repositories,this.section);return t.concat(e)}},{kind:"field",key:"_filterRepositories",value:()=>d(z)},{kind:"get",key:"visibleRepositories",value:function(){const e=this.allRepositories.filter((e=>{var t,i;return null===(t=this.filters[this.section])||void 0===t||null===(i=t.find((t=>t.id===e.category)))||void 0===i?void 0:i.checked}));return this._filterRepositories(e,this._searchInput)}},{kind:"method",key:"firstUpdated",value:async function(){this.addEventListener("filter-change",(e=>this._updateFilters(e)))}},{kind:"method",key:"_updateFilters",value:function(e){var t;const i=null===(t=this.filters[this.section])||void 0===t?void 0:t.find((t=>t.id===e.detail.id));this.filters[this.section].find((e=>e.id===i.id)).checked=!i.checked,this.requestUpdate()}},{kind:"method",key:"render",value:function(){var e;if(!this.hacs)return o``;const t=this._repositoriesInActiveSection(this.hacs.repositories,this.section)[1];if(!this.filters[this.section]&&this.hacs.info.categories){var i;const e=null===(i=f(this.hacs.language,this.route))||void 0===i?void 0:i.categories;this.filters[this.section]=[],null==e||e.filter((e=>{var t;return null===(t=this.hacs.info)||void 0===t?void 0:t.categories.includes(e)})).forEach((e=>{this.filters[this.section].push({id:e,value:e,checked:!0})}))}return o`<hass-tabs-subpage
back-path="/hacs/entry"
.hass=${this.hass}
.narrow=${this.narrow}
.route=${this.route}
.tabs=${this.hacs.sections}
hasFab
>
<ha-icon-overflow-menu
slot="toolbar-icon"
narrow
.hass=${this.hass}
.items=${[{path:b,label:this.hacs.localize("menu.documentation"),action:()=>m.open("https://hacs.xyz/","_blank","noreferrer=true")},{path:k,label:"GitHub",action:()=>m.open("https://github.com/hacs","_blank","noreferrer=true")},{path:g,label:this.hacs.localize("menu.open_issue"),action:()=>m.open("https://hacs.xyz/docs/issues","_blank","noreferrer=true")},{path:y,label:this.hacs.localize("menu.custom_repositories"),disabled:this.hacs.info.disabled_reason,action:()=>this.dispatchEvent(new CustomEvent("hacs-dialog",{detail:{type:"custom-repositories",repositories:this.hacs.repositories},bubbles:!0,composed:!0}))},{path:w,label:this.hacs.localize("menu.about"),action:()=>I(this,this.hacs)}]}
>
</ha-icon-overflow-menu>
${this.narrow?o`
<search-input
.hass=${this.hass}
class="header"
slot="header"
.label=${this.hacs.localize("search.downloaded")}
.filter=${this._searchInput||""}
@value-changed=${this._inputValueChanged}
></search-input>
`:o`<div class="search">
<search-input
.hass=${this.hass}
.label=${0===t.length?this.hacs.localize("search.downloaded"):this.hacs.localize("search.downloaded_new")}
.filter=${this._searchInput||""}
@value-changed=${this._inputValueChanged}
></search-input>
</div>`}
<div class="content ${this.narrow?"narrow-content":""}">
${(null===(e=this.filters[this.section])||void 0===e?void 0:e.length)>1?o`<div class="filters">
<hacs-filter
.hacs=${this.hacs}
.filters="${this.filters[this.section]}"
></hacs-filter>
</div>`:""}
${null!=t&&t.length?o`<ha-alert .rtl=${j(this.hass)}>
${this.hacs.localize("store.new_repositories_note")}
<mwc-button
class="max-content"
slot="action"
.label=${this.hacs.localize("menu.dismiss")}
@click=${this._clearAllNewRepositories}
>
</mwc-button>
</ha-alert> `:""}
<div class="container ${this.narrow?"narrow":""}">
${void 0===this.hacs.repositories?"":0===this.allRepositories.length?this._renderEmpty():0===this.visibleRepositories.length?this._renderNoResultsFound():this._renderRepositories()}
</div>
</div>
<ha-fab
slot="fab"
.label=${this.hacs.localize("store.explore")}
.extended=${!this.narrow}
@click=${this._addRepository}
>
<ha-svg-icon slot="icon" .path=${x}></ha-svg-icon>
</ha-fab>
</hass-tabs-subpage>`}},{kind:"method",key:"_renderRepositories",value:function(){return this.visibleRepositories.map((e=>o`<hacs-repository-card
.hass=${this.hass}
.hacs=${this.hacs}
.repository=${e}
.narrow=${this.narrow}
?narrow=${this.narrow}
></hacs-repository-card>`))}},{kind:"method",key:"_clearAllNewRepositories",value:async function(){var e;await $(this.hass,{categories:(null===(e=f(this.hacs.language,this.route))||void 0===e?void 0:e.categories)||[]})}},{kind:"method",key:"_renderNoResultsFound",value:function(){return o`<ha-alert
.rtl=${j(this.hass)}
alert-type="warning"
.title="${this.hacs.localize("store.no_repositories")} 😕"
>
${this.hacs.localize("store.no_repositories_found_desc1",{searchInput:this._searchInput})}
<br />
${this.hacs.localize("store.no_repositories_found_desc2")}
</ha-alert>`}},{kind:"method",key:"_renderEmpty",value:function(){return o`<ha-alert
.title="${this.hacs.localize("store.no_repositories")} 😕"
.rtl=${j(this.hass)}
>
${this.hacs.localize("store.no_repositories_desc1")}
<br />
${this.hacs.localize("store.no_repositories_desc2")}
</ha-alert>`}},{kind:"method",key:"_inputValueChanged",value:function(e){this._searchInput=e.detail.value,window.localStorage.setItem("hacs-search",this._searchInput)}},{kind:"method",key:"_addRepository",value:function(){this.dispatchEvent(new CustomEvent("hacs-dialog",{detail:{type:"add-repository",repositories:this.hacs.repositories,section:this.section},bubbles:!0,composed:!0}))}},{kind:"get",static:!0,key:"styles",value:function(){return[_,R,h`
.filter {
border-bottom: 1px solid var(--divider-color);
}
.content {
height: calc(100vh - 128px);
overflow: auto;
}
.narrow-content {
height: calc(100vh - 128px);
}
.container {
display: grid;
grid-template-columns: repeat(auto-fit, minmax(480px, 1fr));
justify-items: center;
grid-gap: 8px 8px;
padding: 8px 16px 16px;
margin-bottom: 64px;
}
ha-svg-icon {
color: var(--hcv-text-color-on-background);
}
hacs-repository-card {
max-width: 500px;
display: flex;
flex-direction: column;
justify-content: space-between;
}
hacs-repository-card[narrow] {
width: 100%;
}
hacs-repository-card[narrow]:last-of-type {
margin-bottom: 64px;
}
ha-alert {
color: var(--hcv-text-color-primary);
display: block;
margin-top: -4px;
}
.narrow {
width: 100%;
display: block;
padding: 0px;
margin: 0;
}
search-input {
display: block;
}
search-input.header {
padding: 0;
}
.bottom-bar {
position: fixed !important;
}
.max-content {
width: max-content;
}
`]}}]}}),t);export{E as HacsStorePanel};

Binary file not shown.

File diff suppressed because one or more lines are too long

Binary file not shown.

View File

@@ -0,0 +1,16 @@
import{a as e,e as t,i,L as a,N as d,$ as r,r as n,n as o}from"./main-ad130be7.js";import{H as s}from"./c.0a1cf8d0.js";e([o("ha-clickable-list-item")],(function(e,o){class s extends o{constructor(...t){super(...t),e(this)}}return{F:s,d:[{kind:"field",decorators:[t()],key:"href",value:void 0},{kind:"field",decorators:[t({type:Boolean})],key:"disableHref",value:()=>!1},{kind:"field",decorators:[t({type:Boolean,reflect:!0})],key:"openNewTab",value:()=>!1},{kind:"field",decorators:[i("a")],key:"_anchor",value:void 0},{kind:"method",key:"render",value:function(){const e=a(d(s.prototype),"render",this).call(this),t=this.href||"";return r`${this.disableHref?r`<a aria-role="option">${e}</a>`:r`<a
aria-role="option"
target=${this.openNewTab?"_blank":""}
href=${t}
>${e}</a
>`}`}},{kind:"method",key:"firstUpdated",value:function(){a(d(s.prototype),"firstUpdated",this).call(this),this.addEventListener("keydown",(e=>{"Enter"!==e.key&&" "!==e.key||this._anchor.click()}))}},{kind:"get",static:!0,key:"styles",value:function(){return[a(d(s),"styles",this),n`
a {
width: 100%;
height: 100%;
display: flex;
align-items: center;
padding-left: var(--mdc-list-side-padding, 20px);
padding-right: var(--mdc-list-side-padding, 20px);
overflow: hidden;
}
`]}}]}}),s);

Binary file not shown.

File diff suppressed because one or more lines are too long

Binary file not shown.

View File

@@ -0,0 +1 @@
const n=(n,o)=>n&&n.config.components.includes(o);export{n as i};

File diff suppressed because one or more lines are too long

Binary file not shown.

View File

@@ -0,0 +1 @@
import{al as e,am as a,aj as s,an as r,ao as u}from"./main-ad130be7.js";async function i(i,o,n){const t=new e("updateLovelaceResources"),l=await a(i),d=`/hacsfiles/${o.full_name.split("/")[1]}`,c=s({repository:o,version:n}),p=l.find((e=>e.url.includes(d)));t.debug({namespace:d,url:c,exsisting:p}),p&&p.url!==c?(t.debug(`Updating exsusting resource for ${d}`),await r(i,{url:c,resource_id:p.id,res_type:p.type})):l.map((e=>e.url)).includes(c)||(t.debug(`Adding ${c} to Lovelace resources`),await u(i,{url:c,res_type:"module"}))}export{i as u};

Binary file not shown.

View File

@@ -0,0 +1 @@
import{m as o}from"./c.f6611997.js";import{a as t}from"./c.4266acdb.js";const n=async(n,s)=>t(n,{title:"Home Assistant Community Store",confirmText:s.localize("common.close"),text:o.html(`\n **${s.localize("dialog_about.integration_version")}:** | ${s.info.version}\n --|--\n **${s.localize("dialog_about.frontend_version")}:** | 20220906112053\n **${s.localize("common.repositories")}:** | ${s.repositories.length}\n **${s.localize("dialog_about.downloaded_repositories")}:** | ${s.repositories.filter((o=>o.installed)).length}\n\n **${s.localize("dialog_about.useful_links")}:**\n\n - [General documentation](https://hacs.xyz/)\n - [Configuration](https://hacs.xyz/docs/configuration/start)\n - [FAQ](https://hacs.xyz/docs/faq/what)\n - [GitHub](https://github.com/hacs)\n - [Discord](https://discord.gg/apgchf8)\n - [Become a GitHub sponsor? ❤️](https://github.com/sponsors/ludeeus)\n - [BuyMe~~Coffee~~Beer? 🍺🙈](https://buymeacoffee.com/ludeeus)\n\n ***\n\n _Everything you find in HACS is **not** tested by Home Assistant, that includes HACS itself.\n The HACS and Home Assistant teams do not support **anything** you find here._`)});export{n as s};

Binary file not shown.

View File

@@ -0,0 +1,61 @@
import{a as r,h as a,e as o,r as e,$ as d,n as t}from"./main-ad130be7.js";r([t("ha-card")],(function(r,a){return{F:class extends a{constructor(...a){super(...a),r(this)}},d:[{kind:"field",decorators:[o()],key:"header",value:void 0},{kind:"field",decorators:[o({type:Boolean,reflect:!0})],key:"outlined",value:()=>!1},{kind:"get",static:!0,key:"styles",value:function(){return e`
:host {
background: var(
--ha-card-background,
var(--card-background-color, white)
);
border-radius: var(--ha-card-border-radius, 4px);
box-shadow: var(
--ha-card-box-shadow,
0px 2px 1px -1px rgba(0, 0, 0, 0.2),
0px 1px 1px 0px rgba(0, 0, 0, 0.14),
0px 1px 3px 0px rgba(0, 0, 0, 0.12)
);
color: var(--primary-text-color);
display: block;
transition: all 0.3s ease-out;
position: relative;
}
:host([outlined]) {
box-shadow: none;
border-width: var(--ha-card-border-width, 1px);
border-style: solid;
border-color: var(
--ha-card-border-color,
var(--divider-color, #e0e0e0)
);
}
.card-header,
:host ::slotted(.card-header) {
color: var(--ha-card-header-color, --primary-text-color);
font-family: var(--ha-card-header-font-family, inherit);
font-size: var(--ha-card-header-font-size, 24px);
letter-spacing: -0.012em;
line-height: 48px;
padding: 12px 16px 16px;
display: block;
margin-block-start: 0px;
margin-block-end: 0px;
font-weight: normal;
}
:host ::slotted(.card-content:not(:first-child)),
slot:not(:first-child)::slotted(.card-content) {
padding-top: 0px;
margin-top: -8px;
}
:host ::slotted(.card-content) {
padding: 16px;
}
:host ::slotted(.card-actions) {
border-top: 1px solid var(--divider-color, #e8e8e8);
padding: 5px 16px;
}
`}},{kind:"method",key:"render",value:function(){return d`
${this.header?d`<h1 class="card-header">${this.header}</h1>`:d``}
<slot></slot>
`}}]}}),a);

Binary file not shown.

View File

@@ -0,0 +1,121 @@
import{a as e,h as t,e as n,t as i,i as o,$ as a,av as d,o as s,L as r,N as l,A as h,ae as c,r as p,n as u}from"./main-ad130be7.js";e([u("ha-expansion-panel")],(function(e,t){class u extends t{constructor(...t){super(...t),e(this)}}return{F:u,d:[{kind:"field",decorators:[n({type:Boolean,reflect:!0})],key:"expanded",value:()=>!1},{kind:"field",decorators:[n({type:Boolean,reflect:!0})],key:"outlined",value:()=>!1},{kind:"field",decorators:[n({type:Boolean,reflect:!0})],key:"leftChevron",value:()=>!1},{kind:"field",decorators:[n()],key:"header",value:void 0},{kind:"field",decorators:[n()],key:"secondary",value:void 0},{kind:"field",decorators:[i()],key:"_showContent",value(){return this.expanded}},{kind:"field",decorators:[o(".container")],key:"_container",value:void 0},{kind:"method",key:"render",value:function(){return a`
<div class="top">
<div
id="summary"
@click=${this._toggleContainer}
@keydown=${this._toggleContainer}
@focus=${this._focusChanged}
@blur=${this._focusChanged}
role="button"
tabindex="0"
aria-expanded=${this.expanded}
aria-controls="sect1"
>
${this.leftChevron?a`
<ha-svg-icon
.path=${d}
class="summary-icon ${s({expanded:this.expanded})}"
></ha-svg-icon>
`:""}
<slot name="header">
<div class="header">
${this.header}
<slot class="secondary" name="secondary">${this.secondary}</slot>
</div>
</slot>
${this.leftChevron?"":a`
<ha-svg-icon
.path=${d}
class="summary-icon ${s({expanded:this.expanded})}"
></ha-svg-icon>
`}
</div>
<slot name="icons"></slot>
</div>
<div
class="container ${s({expanded:this.expanded})}"
@transitionend=${this._handleTransitionEnd}
role="region"
aria-labelledby="summary"
aria-hidden=${!this.expanded}
tabindex="-1"
>
${this._showContent?a`<slot></slot>`:""}
</div>
`}},{kind:"method",key:"willUpdate",value:function(e){r(l(u.prototype),"willUpdate",this).call(this,e),e.has("expanded")&&this.expanded&&(this._showContent=this.expanded,setTimeout((()=>{this.expanded&&(this._container.style.overflow="initial")}),300))}},{kind:"method",key:"_handleTransitionEnd",value:function(){this._container.style.removeProperty("height"),this._container.style.overflow=this.expanded?"initial":"hidden",this._showContent=this.expanded}},{kind:"method",key:"_toggleContainer",value:async function(e){if(e.defaultPrevented)return;if("keydown"===e.type&&"Enter"!==e.key&&" "!==e.key)return;e.preventDefault();const t=!this.expanded;h(this,"expanded-will-change",{expanded:t}),this._container.style.overflow="hidden",t&&(this._showContent=!0,await c());const n=this._container.scrollHeight;this._container.style.height=`${n}px`,t||setTimeout((()=>{this._container.style.height="0px"}),0),this.expanded=t,h(this,"expanded-changed",{expanded:this.expanded})}},{kind:"method",key:"_focusChanged",value:function(e){this.shadowRoot.querySelector(".top").classList.toggle("focused","focus"===e.type)}},{kind:"get",static:!0,key:"styles",value:function(){return p`
:host {
display: block;
}
.top {
display: flex;
align-items: center;
}
.top.focused {
background: var(--input-fill-color);
}
:host([outlined]) {
box-shadow: none;
border-width: 1px;
border-style: solid;
border-color: var(
--ha-card-border-color,
var(--divider-color, #e0e0e0)
);
border-radius: var(--ha-card-border-radius, 4px);
}
.summary-icon {
margin-left: 8px;
}
:host([leftchevron]) .summary-icon {
margin-left: 0;
margin-right: 8px;
}
#summary {
flex: 1;
display: flex;
padding: var(--expansion-panel-summary-padding, 0 8px);
min-height: 48px;
align-items: center;
cursor: pointer;
overflow: hidden;
font-weight: 500;
outline: none;
}
.summary-icon {
transition: transform 150ms cubic-bezier(0.4, 0, 0.2, 1);
direction: var(--direction);
}
.summary-icon.expanded {
transform: rotate(180deg);
}
.header,
::slotted([slot="header"]) {
flex: 1;
}
.container {
padding: var(--expansion-panel-content-padding, 0 8px);
overflow: hidden;
transition: height 300ms cubic-bezier(0.4, 0, 0.2, 1);
height: 0px;
}
.container.expanded {
height: auto;
}
.secondary {
display: block;
color: var(--secondary-text-color);
font-size: 12px;
}
`}}]}}),t);

Binary file not shown.

View File

@@ -0,0 +1,50 @@
import{a as e,h as i,e as t,i as a,$ as n,O as l,z as o,A as s,r as c,n as r,m as d}from"./main-ad130be7.js";import"./c.3f859915.js";e([r("search-input")],(function(e,i){return{F:class extends i{constructor(...i){super(...i),e(this)}},d:[{kind:"field",decorators:[t({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[t()],key:"filter",value:void 0},{kind:"field",decorators:[t({type:Boolean})],key:"suffix",value:()=>!1},{kind:"field",decorators:[t({type:Boolean})],key:"autofocus",value:()=>!1},{kind:"field",decorators:[t({type:String})],key:"label",value:void 0},{kind:"method",key:"focus",value:function(){var e;null===(e=this._input)||void 0===e||e.focus()}},{kind:"field",decorators:[a("ha-textfield",!0)],key:"_input",value:void 0},{kind:"method",key:"render",value:function(){return n`
<ha-textfield
.autofocus=${this.autofocus}
.label=${this.label||"Search"}
.value=${this.filter||""}
icon
.iconTrailing=${this.filter||this.suffix}
@input=${this._filterInputChanged}
>
<slot name="prefix" slot="leadingIcon">
<ha-svg-icon
tabindex="-1"
class="prefix"
.path=${l}
></ha-svg-icon>
</slot>
<div class="trailing" slot="trailingIcon">
${this.filter&&n`
<ha-icon-button
@click=${this._clearSearch}
.label=${this.hass.localize("ui.common.clear")}
.path=${o}
class="clear-button"
></ha-icon-button>
`}
<slot name="suffix"></slot>
</div>
</ha-textfield>
`}},{kind:"method",key:"_filterChanged",value:async function(e){s(this,"value-changed",{value:String(e)})}},{kind:"method",key:"_filterInputChanged",value:async function(e){this._filterChanged(e.target.value)}},{kind:"method",key:"_clearSearch",value:async function(){this._filterChanged("")}},{kind:"get",static:!0,key:"styles",value:function(){return c`
:host {
display: inline-flex;
}
ha-svg-icon,
ha-icon-button {
color: var(--primary-text-color);
}
ha-svg-icon {
outline: none;
}
.clear-button {
--mdc-icon-size: 20px;
}
ha-textfield {
display: inherit;
}
.trailing {
display: flex;
align-items: center;
}
`}}]}}),i);const u=d(((e,i)=>e.filter((e=>h(e.name).includes(h(i))||h(e.description).includes(h(i))||h(e.category).includes(h(i))||h(e.full_name).includes(h(i))||h(e.authors).includes(h(i))||h(e.domain).includes(h(i)))))),h=d((e=>String(e||"").toLocaleLowerCase().replace(/-|_| /g,"")));export{u as f};

Binary file not shown.

View File

@@ -0,0 +1,94 @@
import{a6 as e,a7 as t,a as o,h as i,e as n,$ as a,r,n as l}from"./main-ad130be7.js";e({_template:t`
<style>
:host {
overflow: hidden; /* needed for text-overflow: ellipsis to work on ff */
@apply --layout-vertical;
@apply --layout-center-justified;
@apply --layout-flex;
}
:host([two-line]) {
min-height: var(--paper-item-body-two-line-min-height, 72px);
}
:host([three-line]) {
min-height: var(--paper-item-body-three-line-min-height, 88px);
}
:host > ::slotted(*) {
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
}
:host > ::slotted([secondary]) {
@apply --paper-font-body1;
color: var(--paper-item-body-secondary-color, var(--secondary-text-color));
@apply --paper-item-body-secondary;
}
</style>
<slot></slot>
`,is:"paper-item-body"}),o([l("ha-settings-row")],(function(e,t){return{F:class extends t{constructor(...t){super(...t),e(this)}},d:[{kind:"field",decorators:[n({type:Boolean,reflect:!0})],key:"narrow",value:void 0},{kind:"field",decorators:[n({type:Boolean,attribute:"three-line"})],key:"threeLine",value:()=>!1},{kind:"method",key:"render",value:function(){return a`
<div class="prefix-wrap">
<slot name="prefix"></slot>
<paper-item-body
?two-line=${!this.threeLine}
?three-line=${this.threeLine}
>
<slot name="heading"></slot>
<div secondary><slot name="description"></slot></div>
</paper-item-body>
</div>
<div class="content"><slot></slot></div>
`}},{kind:"get",static:!0,key:"styles",value:function(){return r`
:host {
display: flex;
padding: 0 16px;
align-content: normal;
align-self: auto;
align-items: center;
}
paper-item-body {
padding: 8px 16px 8px 0;
}
paper-item-body[two-line] {
min-height: calc(
var(--paper-item-body-two-line-min-height, 72px) - 16px
);
flex: 1;
}
.content {
display: contents;
}
:host(:not([narrow])) .content {
display: var(--settings-row-content-display, flex);
justify-content: flex-end;
flex: 1;
padding: 16px 0;
}
.content ::slotted(*) {
width: var(--settings-row-content-width);
}
:host([narrow]) {
align-items: normal;
flex-direction: column;
border-top: 1px solid var(--divider-color);
padding-bottom: 8px;
}
::slotted(ha-switch) {
padding: 16px 0;
}
div[secondary] {
white-space: normal;
}
.prefix-wrap {
display: var(--settings-row-prefix-display);
}
:host([narrow]) .prefix-wrap {
display: flex;
align-items: center;
}
`}}]}}),i);

Binary file not shown.

File diff suppressed because one or more lines are too long

Binary file not shown.

View File

@@ -0,0 +1,190 @@
import{a as e,h as t,e as o,$ as r,aM as i,r as a,n as s,o as n,aL as d,d as c}from"./main-ad130be7.js";import"./c.2d5ed670.js";import"./c.9b92f489.js";import"./c.82eccc94.js";import"./c.4feb0cb8.js";import"./c.0ca5587f.js";import"./c.5d3ce9d6.js";e([s("ha-icon-overflow-menu")],(function(e,t){return{F:class extends t{constructor(...t){super(...t),e(this)}},d:[{kind:"field",decorators:[o({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[o({type:Array})],key:"items",value:()=>[]},{kind:"field",decorators:[o({type:Boolean})],key:"narrow",value:()=>!1},{kind:"method",key:"render",value:function(){return r`
${this.narrow?r` <!-- Collapsed representation for small screens -->
<ha-button-menu
@click=${this._handleIconOverflowMenuOpened}
@closed=${this._handleIconOverflowMenuClosed}
class="ha-icon-overflow-menu-overflow"
corner="BOTTOM_START"
absolute
>
<ha-icon-button
.label=${this.hass.localize("ui.common.overflow_menu")}
.path=${i}
slot="trigger"
></ha-icon-button>
${this.items.map((e=>r`
<mwc-list-item
graphic="icon"
.disabled=${e.disabled}
@click=${e.action}
>
<div slot="graphic">
<ha-svg-icon .path=${e.path}></ha-svg-icon>
</div>
${e.label}
</mwc-list-item>
`))}
</ha-button-menu>`:r`
<!-- Icon representation for big screens -->
${this.items.map((e=>e.narrowOnly?"":r`<div>
${e.tooltip?r`<paper-tooltip animation-delay="0" position="left">
${e.tooltip}
</paper-tooltip>`:""}
<ha-icon-button
@click=${e.action}
.label=${e.label}
.path=${e.path}
.disabled=${e.disabled}
></ha-icon-button>
</div> `))}
`}
`}},{kind:"method",key:"_handleIconOverflowMenuOpened",value:function(){const e=this.closest(".mdc-data-table__row");e&&(e.style.zIndex="1")}},{kind:"method",key:"_handleIconOverflowMenuClosed",value:function(){const e=this.closest(".mdc-data-table__row");e&&(e.style.zIndex="")}},{kind:"get",static:!0,key:"styles",value:function(){return a`
:host {
display: flex;
justify-content: flex-end;
}
`}}]}}),t);const l=e=>t=>({kind:"method",placement:"prototype",key:t.key,descriptor:{set(e){this[`__${String(t.key)}`]=e},get(){return this[`__${String(t.key)}`]},enumerable:!0,configurable:!0},finisher(o){const r=o.prototype.connectedCallback;o.prototype.connectedCallback=function(){if(r.call(this),this[t.key]){const o=this.renderRoot.querySelector(e);if(!o)return;o.scrollTop=this[t.key]}}}});e([s("hacs-repository-card")],(function(e,t){return{F:class extends t{constructor(...t){super(...t),e(this)}},d:[{kind:"field",decorators:[o({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[o({attribute:!1})],key:"hacs",value:void 0},{kind:"field",decorators:[o({attribute:!1})],key:"repository",value:void 0},{kind:"field",decorators:[o({type:Boolean})],key:"narrow",value:void 0},{kind:"get",key:"_borderClass",value:function(){const e={};return this.hacs.addedToLovelace(this.hacs,this.repository)&&"pending-restart"!==this.repository.status?this.repository.pending_upgrade?e["status-update"]=!0:this.repository.new&&!this.repository.installed&&(e["status-new"]=!0):e["status-issue"]=!0,0!==Object.keys(e).length&&(e["status-border"]=!0),e}},{kind:"get",key:"_headerClass",value:function(){const e={};return this.hacs.addedToLovelace(this.hacs,this.repository)&&"pending-restart"!==this.repository.status?this.repository.pending_upgrade?e["update-header"]=!0:this.repository.new&&!this.repository.installed?e["new-header"]=!0:e["default-header"]=!0:e["issue-header"]=!0,e}},{kind:"get",key:"_headerTitle",value:function(){return this.hacs.addedToLovelace(this.hacs,this.repository)?"pending-restart"===this.repository.status?this.hacs.localize("repository_card.pending_restart"):this.repository.pending_upgrade?this.hacs.localize("repository_card.pending_update"):this.repository.new&&!this.repository.installed?this.hacs.localize("repository_card.new_repository"):"":this.hacs.localize("repository_card.not_loaded")}},{kind:"method",key:"render",value:function(){return r`
<a href="/hacs/repository/${this.repository.id}">
<ha-card class=${n(this._borderClass)} ?narrow=${this.narrow} outlined>
<div class="card-content">
<div class="group-header">
<div class="status-header ${n(this._headerClass)}">${this._headerTitle}</div>
<div class="title pointer">
<h1>${this.repository.name}</h1>
${"integration"!==this.repository.category?r` <ha-chip>
${this.hacs.localize(`common.${this.repository.category}`)}
</ha-chip>`:""}
</div>
</div>
<div class="description">${this.repository.description}</div>
</div>
<div class="card-actions">
${this.repository.new&&!this.repository.installed?r`<div>
<mwc-button class="status-new" @click=${this._setNotNew}>
${this.hacs.localize("repository_card.dismiss")}
</mwc-button>
</div>`:this.repository.pending_upgrade&&this.hacs.addedToLovelace(this.hacs,this.repository)?r`<div>
<mwc-button class="update-header" @click=${this._updateRepository} raised>
${this.hacs.localize("common.update")}
</mwc-button>
</div> `:""}
</div>
</ha-card>
</a>
`}},{kind:"method",key:"_updateRepository",value:function(e){e.preventDefault(),this.dispatchEvent(new CustomEvent("hacs-dialog",{detail:{type:"update",repository:this.repository.id},bubbles:!0,composed:!0}))}},{kind:"method",key:"_setNotNew",value:async function(e){e.preventDefault(),await d(this.hass,{repository:String(this.repository.id)})}},{kind:"get",static:!0,key:"styles",value:function(){return[c,a`
ha-card {
display: flex;
flex-direction: column;
height: 195px;
width: 480px;
}
.title {
display: flex;
justify-content: space-between;
}
.card-content {
padding: 0 0 3px 0;
height: 100%;
}
.card-actions {
border-top: none;
bottom: 0;
display: flex;
flex-direction: row-reverse;
justify-content: space-between;
align-items: center;
padding: 5px;
}
.group-header {
height: auto;
align-content: center;
}
.group-header h1 {
margin: 0;
padding: 8px 16px;
font-size: 22px;
}
h1 {
margin-top: 0;
min-height: 24px;
}
a {
all: unset;
cursor: pointer;
}
.description {
opacity: var(--dark-primary-opacity);
font-size: 14px;
padding: 8px 16px;
max-height: 52px;
overflow: hidden;
}
.status-new {
border-color: var(--hcv-color-new);
--mdc-theme-primary: var(--hcv-color-new);
}
.status-update {
border-color: var(--hcv-color-update);
}
.status-issue {
border-color: var(--hcv-color-error);
}
.new-header {
background-color: var(--hcv-color-new);
color: var(--hcv-text-color-on-background);
}
.issue-header {
background-color: var(--hcv-color-error);
color: var(--hcv-text-color-on-background);
}
.update-header {
background-color: var(--hcv-color-update);
color: var(--hcv-text-color-on-background);
}
.default-header {
padding: 2px 0 !important;
}
mwc-button.update-header {
--mdc-theme-primary: var(--hcv-color-update);
--mdc-theme-on-primary: var(--hcv-text-color-on-background);
}
.status-border {
border-style: solid;
border-width: min(var(--ha-card-border-width, 1px), 10px);
}
.status-header {
top: 0;
padding: 6px 1px;
margin: -1px;
width: 100%;
font-weight: 500;
text-align: center;
left: 0;
border-top-left-radius: var(--ha-card-border-radius, 4px);
border-top-right-radius: var(--ha-card-border-radius, 4px);
}
ha-card[narrow] {
width: calc(100% - 24px);
margin: 11px;
}
ha-chip {
padding: 4px;
margin-top: 3px;
}
`]}}]}}),t);export{l as r};

Binary file not shown.

View File

@@ -0,0 +1 @@
import{A as o}from"./main-ad130be7.js";const a=()=>import("./c.f12697b4.js"),i=(i,l,m)=>new Promise((n=>{const r=l.cancel,s=l.confirm;o(i,"show-dialog",{dialogTag:"dialog-box",dialogImport:a,dialogParams:{...l,...m,cancel:()=>{n(!(null==m||!m.prompt)&&null),r&&r()},confirm:o=>{n(null==m||!m.prompt||o),s&&s(o)}}})})),l=(o,a)=>i(o,a),m=(o,a)=>i(o,a,{confirmation:!0}),n=(o,a)=>i(o,a,{prompt:!0});export{l as a,n as b,m as s};

Binary file not shown.

File diff suppressed because one or more lines are too long

Some files were not shown because too many files have changed in this diff Show More