backup
This commit is contained in:
@@ -1 +1 @@
|
|||||||
2022.12.8
|
2023.2.5
|
||||||
35
airsonos.xml
Normal file
35
airsonos.xml
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
<?xml version="1.0"?>
|
||||||
|
<main_log>info</main_log>
|
||||||
|
<upnp_log>info</upnp_log>
|
||||||
|
<util_log>warn</util_log>
|
||||||
|
<raop_log>info</raop_log>
|
||||||
|
<log_limit>-1</log_limit>
|
||||||
|
<max_players>32</max_players>
|
||||||
|
<binding>?</binding>
|
||||||
|
<ports>0:0</ports>
|
||||||
|
<enabled>1</enabled>
|
||||||
|
<max_volume>100</max_volume>
|
||||||
|
<http_length>-1</http_length>
|
||||||
|
<upnp_max>1</upnp_max>
|
||||||
|
<codec>flc</codec>
|
||||||
|
<metadata>1</metadata>
|
||||||
|
<flush>1</flush>
|
||||||
|
<artwork></artwork>
|
||||||
|
<latency>1000:2000</latency>
|
||||||
|
<drift>0</drift>
|
||||||
|
<pcm>http-get:*:audio/L16;rate=44100;channels=2:DLNA.ORG_PN=LPCM;DLNA.ORG_OP=00;DLNA.ORG_CI=0;DLNA.ORG_FLAGS=0d500000000000000000000000000000</pcm>
|
||||||
|
<wav>http-get:*:audio/wav:DLNA.ORG_OP=00;DLNA.ORG_CI=0;DLNA.ORG_FLAGS=0d500000000000000000000000000000</wav>
|
||||||
|
<flac>http-get:*:audio/flac:DLNA.ORG_OP=00;DLNA.ORG_CI=0;DLNA.ORG_FLAGS=0d500000000000000000000000000000</flac>
|
||||||
|
<mp3>http-get:*:audio/mpeg:DLNA.ORG_PN=MP3;DLNA.ORG_OP=00;DLNA.ORG_CI=0;DLNA.ORG_FLAGS=0d500000000000000000000000000000</mp3>
|
||||||
|
<device>
|
||||||
|
<udn>uuid:RINCON_38420B93464001400</udn>
|
||||||
|
<name>Keuken+</name>
|
||||||
|
<mac>bb:bb:0b:93:46:40</mac>
|
||||||
|
<enabled>1</enabled>
|
||||||
|
</device>
|
||||||
|
<device>
|
||||||
|
<udn>uuid:BO5EBO5E-F00D-F00D-FEED-506583CEC197</udn>
|
||||||
|
<name>Bose woonkamer+</name>
|
||||||
|
<mac>bb:bb:7f:f3:34:e1</mac>
|
||||||
|
<enabled>1</enabled>
|
||||||
|
</device>
|
||||||
869
automations.yaml
869
automations.yaml
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,9 @@
|
|||||||
script: !include scripts.yaml
|
script: !include scripts.yaml
|
||||||
scene: !include scenes.yaml
|
scene: !include scenes.yaml
|
||||||
|
|
||||||
|
frontend:
|
||||||
|
themes: !include_dir_merge_named themes/
|
||||||
|
|
||||||
homeassistant:
|
homeassistant:
|
||||||
#packages: !include_dir_named packages/
|
#packages: !include_dir_named packages/
|
||||||
packages: !include_dir_named "integrations"
|
packages: !include_dir_named "integrations"
|
||||||
|
|||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -15,7 +15,7 @@ SENSOR_TYPES = {
|
|||||||
|
|
||||||
SENSOR_LOCATIONS_TO_URL = {
|
SENSOR_LOCATIONS_TO_URL = {
|
||||||
"trashapi": [
|
"trashapi": [
|
||||||
"http://trashapi.azurewebsites.net/trash?Location={0}&ZipCode={1}&HouseNumber={2}&HouseNumberSuffix={3}&DiftarCode={4}"
|
"http://trashapi.azurewebsites.net/trash?Location={0}&ZipCode={1}&HouseNumber={2}&HouseNumberSuffix={3}&District={4}&DiftarCode={5}&ShowWholeYear={6}"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -66,6 +66,8 @@ CONF_LOCATION = "location"
|
|||||||
CONF_POSTCODE = "postcode"
|
CONF_POSTCODE = "postcode"
|
||||||
CONF_STREET_NUMBER = "streetnumber"
|
CONF_STREET_NUMBER = "streetnumber"
|
||||||
CONF_STREET_NUMBER_SUFFIX = "streetnumbersuffix"
|
CONF_STREET_NUMBER_SUFFIX = "streetnumbersuffix"
|
||||||
|
CONF_DISTRICT = "district"
|
||||||
|
CONF_GET_WHOLE_YEAR = "getwholeyear"
|
||||||
CONF_DATE_FORMAT = "dateformat"
|
CONF_DATE_FORMAT = "dateformat"
|
||||||
CONF_TIMESPAN_IN_DAYS = "timespanindays"
|
CONF_TIMESPAN_IN_DAYS = "timespanindays"
|
||||||
CONF_LOCALE = "locale"
|
CONF_LOCALE = "locale"
|
||||||
@@ -82,6 +84,7 @@ ATTR_YEAR_MONTH_DAY_DATE = "year_month_day_date"
|
|||||||
ATTR_FRIENDLY_NAME = "friendly_name"
|
ATTR_FRIENDLY_NAME = "friendly_name"
|
||||||
ATTR_LAST_COLLECTION_DATE = "last_collection_date"
|
ATTR_LAST_COLLECTION_DATE = "last_collection_date"
|
||||||
ATTR_TOTAL_COLLECTIONS_THIS_YEAR = "total_collections_this_year"
|
ATTR_TOTAL_COLLECTIONS_THIS_YEAR = "total_collections_this_year"
|
||||||
|
ATTR_WHOLE_YEAR_DATES = "whole_year_dates"
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|||||||
Binary file not shown.
Binary file not shown.
@@ -16,38 +16,48 @@ class TrashApiAfval(object):
|
|||||||
postcode,
|
postcode,
|
||||||
street_number,
|
street_number,
|
||||||
street_number_suffix,
|
street_number_suffix,
|
||||||
|
district,
|
||||||
diftar_code,
|
diftar_code,
|
||||||
|
get_whole_year,
|
||||||
resources,
|
resources,
|
||||||
):
|
):
|
||||||
_LOGGER.debug("Updating Waste collection dates")
|
_LOGGER.debug("Updating Waste collection dates")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
API_ENDPOINT = SENSOR_LOCATIONS_TO_URL["trashapi"][0].format(
|
API_ENDPOINT = SENSOR_LOCATIONS_TO_URL["trashapi"][0].format(
|
||||||
location, postcode, street_number, street_number_suffix, diftar_code
|
location,
|
||||||
|
postcode,
|
||||||
|
street_number,
|
||||||
|
street_number_suffix,
|
||||||
|
district,
|
||||||
|
diftar_code,
|
||||||
|
get_whole_year,
|
||||||
)
|
)
|
||||||
|
|
||||||
r = requests.get(url=API_ENDPOINT)
|
r = requests.get(url=API_ENDPOINT)
|
||||||
dataList = r.json()
|
dataList = r.json()
|
||||||
|
|
||||||
# Place all possible values in the dictionary even if they are not necessary
|
# Place all possible values in the dictionary even if they are not necessary
|
||||||
waste_dict = {}
|
waste_array = []
|
||||||
|
|
||||||
# _LOGGER.warning(dataList)
|
# _LOGGER.warning(dataList)
|
||||||
|
|
||||||
for data in dataList:
|
for data in dataList:
|
||||||
|
|
||||||
# find gft.
|
# find gft, kerstboom, papier, pbd, takken or textiel
|
||||||
if "gft" in resources and data["name"].lower() == "gft":
|
if (
|
||||||
waste_dict["gft"] = data["date"].split("T")[0]
|
("gft" in resources and data["name"].lower() == "gft")
|
||||||
# find kerstboom.
|
or (
|
||||||
if "kerstboom" in resources and data["name"].lower() == "kerstboom":
|
"kerstboom" in resources and data["name"].lower() == "kerstboom"
|
||||||
waste_dict["kerstboom"] = data["date"].split("T")[0]
|
)
|
||||||
# find papier
|
or ("papier" in resources and data["name"].lower() == "papier")
|
||||||
if "papier" in resources and data["name"].lower() == "papier":
|
or ("pbd" in resources and data["name"].lower() == "pbd")
|
||||||
waste_dict["papier"] = data["date"].split("T")[0]
|
or ("takken" in resources and data["name"].lower() == "takken")
|
||||||
# find pbd.
|
or ("textiel" in resources and data["name"].lower() == "textiel")
|
||||||
if "pbd" in resources and data["name"].lower() == "pbd":
|
):
|
||||||
waste_dict["pbd"] = data["date"].split("T")[0]
|
waste_array.append(
|
||||||
|
{data["name"].lower(): data["date"].split("T")[0]}
|
||||||
|
)
|
||||||
# find restafval.
|
# find restafval.
|
||||||
if "restafval" in resources and data["name"].lower() == "restafval":
|
if "restafval" in resources and data["name"].lower() == "restafval":
|
||||||
if (
|
if (
|
||||||
@@ -56,18 +66,20 @@ class TrashApiAfval(object):
|
|||||||
data["date"].split("T")[0], "%Y-%m-%d"
|
data["date"].split("T")[0], "%Y-%m-%d"
|
||||||
).date()
|
).date()
|
||||||
):
|
):
|
||||||
waste_dict["restafval"] = data["date"].split("T")[0]
|
waste_array.append(
|
||||||
|
{data["name"].lower(): data["date"].split("T")[0]}
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
waste_dict["restafvaldiftardate"] = data["date"].split("T")[0]
|
waste_array.append(
|
||||||
waste_dict["restafvaldiftarcollections"] = data["totalThisYear"]
|
{"restafvaldiftardate": data["date"].split("T")[0]}
|
||||||
# find takken
|
)
|
||||||
if "takken" in resources and data["name"].lower() == "takken":
|
waste_array.append(
|
||||||
waste_dict["takken"] = data["date"].split("T")[0]
|
{"restafvaldiftarcollections": data["totalThisYear"]}
|
||||||
# find textiel
|
)
|
||||||
if "textiel" in resources and data["name"].lower() == "textiel":
|
|
||||||
waste_dict["textiel"] = data["date"].split("T")[0]
|
|
||||||
|
|
||||||
return waste_dict
|
# _LOGGER.warning(waste_array)
|
||||||
|
|
||||||
|
return waste_array
|
||||||
except urllib.error.URLError as exc:
|
except urllib.error.URLError as exc:
|
||||||
_LOGGER.error("Error occurred while fetching data: %r", exc.reason)
|
_LOGGER.error("Error occurred while fetching data: %r", exc.reason)
|
||||||
return False
|
return False
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"domain": "afvalinfo",
|
"domain": "afvalinfo",
|
||||||
"name": "Afvalinfo",
|
"name": "Afvalinfo",
|
||||||
"version": "1.0.9",
|
"version": "1.1.0",
|
||||||
"documentation": "https://github.com/heyajohnny/afvalinfo",
|
"documentation": "https://github.com/heyajohnny/afvalinfo",
|
||||||
"issue_tracker": "https://github.com/heyajohnny/afvalinfo/issues",
|
"issue_tracker": "https://github.com/heyajohnny/afvalinfo/issues",
|
||||||
"dependencies": [],
|
"dependencies": [],
|
||||||
|
|||||||
@@ -16,10 +16,12 @@ from .const.const import (
|
|||||||
MIN_TIME_BETWEEN_UPDATES,
|
MIN_TIME_BETWEEN_UPDATES,
|
||||||
_LOGGER,
|
_LOGGER,
|
||||||
CONF_CITY,
|
CONF_CITY,
|
||||||
|
CONF_DISTRICT,
|
||||||
CONF_LOCATION,
|
CONF_LOCATION,
|
||||||
CONF_POSTCODE,
|
CONF_POSTCODE,
|
||||||
CONF_STREET_NUMBER,
|
CONF_STREET_NUMBER,
|
||||||
CONF_STREET_NUMBER_SUFFIX,
|
CONF_STREET_NUMBER_SUFFIX,
|
||||||
|
CONF_GET_WHOLE_YEAR,
|
||||||
CONF_DATE_FORMAT,
|
CONF_DATE_FORMAT,
|
||||||
CONF_TIMESPAN_IN_DAYS,
|
CONF_TIMESPAN_IN_DAYS,
|
||||||
CONF_NO_TRASH_TEXT,
|
CONF_NO_TRASH_TEXT,
|
||||||
@@ -36,6 +38,7 @@ from .const.const import (
|
|||||||
ATTR_FRIENDLY_NAME,
|
ATTR_FRIENDLY_NAME,
|
||||||
ATTR_LAST_COLLECTION_DATE,
|
ATTR_LAST_COLLECTION_DATE,
|
||||||
ATTR_TOTAL_COLLECTIONS_THIS_YEAR,
|
ATTR_TOTAL_COLLECTIONS_THIS_YEAR,
|
||||||
|
ATTR_WHOLE_YEAR_DATES,
|
||||||
SENSOR_TYPES,
|
SENSOR_TYPES,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -57,12 +60,14 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
|||||||
vol.Required(CONF_POSTCODE, default="3361AB"): cv.string,
|
vol.Required(CONF_POSTCODE, default="3361AB"): cv.string,
|
||||||
vol.Required(CONF_STREET_NUMBER, default="1"): cv.string,
|
vol.Required(CONF_STREET_NUMBER, default="1"): cv.string,
|
||||||
vol.Optional(CONF_STREET_NUMBER_SUFFIX, default=""): cv.string,
|
vol.Optional(CONF_STREET_NUMBER_SUFFIX, default=""): cv.string,
|
||||||
|
vol.Optional(CONF_DISTRICT, default=""): cv.string,
|
||||||
vol.Optional(CONF_DATE_FORMAT, default="%d-%m-%Y"): cv.string,
|
vol.Optional(CONF_DATE_FORMAT, default="%d-%m-%Y"): cv.string,
|
||||||
vol.Optional(CONF_TIMESPAN_IN_DAYS, default="365"): cv.string,
|
vol.Optional(CONF_TIMESPAN_IN_DAYS, default="365"): cv.string,
|
||||||
vol.Optional(CONF_LOCALE, default="en"): cv.string,
|
vol.Optional(CONF_LOCALE, default="en"): cv.string,
|
||||||
vol.Optional(CONF_ID, default=""): cv.string,
|
vol.Optional(CONF_ID, default=""): cv.string,
|
||||||
vol.Optional(CONF_NO_TRASH_TEXT, default="none"): cv.string,
|
vol.Optional(CONF_NO_TRASH_TEXT, default="none"): cv.string,
|
||||||
vol.Optional(CONF_DIFTAR_CODE, default=""): cv.string,
|
vol.Optional(CONF_DIFTAR_CODE, default=""): cv.string,
|
||||||
|
vol.Optional(CONF_GET_WHOLE_YEAR, default="false"): cv.string,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -76,12 +81,14 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
|||||||
postcode = config.get(CONF_POSTCODE).strip()
|
postcode = config.get(CONF_POSTCODE).strip()
|
||||||
street_number = config.get(CONF_STREET_NUMBER)
|
street_number = config.get(CONF_STREET_NUMBER)
|
||||||
street_number_suffix = config.get(CONF_STREET_NUMBER_SUFFIX)
|
street_number_suffix = config.get(CONF_STREET_NUMBER_SUFFIX)
|
||||||
|
district = config.get(CONF_DISTRICT)
|
||||||
date_format = config.get(CONF_DATE_FORMAT).strip()
|
date_format = config.get(CONF_DATE_FORMAT).strip()
|
||||||
timespan_in_days = config.get(CONF_TIMESPAN_IN_DAYS)
|
timespan_in_days = config.get(CONF_TIMESPAN_IN_DAYS)
|
||||||
locale = config.get(CONF_LOCALE)
|
locale = config.get(CONF_LOCALE)
|
||||||
id_name = config.get(CONF_ID)
|
id_name = config.get(CONF_ID)
|
||||||
no_trash_text = config.get(CONF_NO_TRASH_TEXT)
|
no_trash_text = config.get(CONF_NO_TRASH_TEXT)
|
||||||
diftar_code = config.get(CONF_DIFTAR_CODE)
|
diftar_code = config.get(CONF_DIFTAR_CODE)
|
||||||
|
get_whole_year = config.get(CONF_GET_WHOLE_YEAR)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
resources = config[CONF_RESOURCES].copy()
|
resources = config[CONF_RESOURCES].copy()
|
||||||
@@ -102,7 +109,9 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
|||||||
postcode,
|
postcode,
|
||||||
street_number,
|
street_number,
|
||||||
street_number_suffix,
|
street_number_suffix,
|
||||||
|
district,
|
||||||
diftar_code,
|
diftar_code,
|
||||||
|
get_whole_year,
|
||||||
resourcesMinusTodayAndTomorrow,
|
resourcesMinusTodayAndTomorrow,
|
||||||
)
|
)
|
||||||
except urllib.error.HTTPError as error:
|
except urllib.error.HTTPError as error:
|
||||||
@@ -139,6 +148,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
|||||||
timespan_in_days,
|
timespan_in_days,
|
||||||
locale,
|
locale,
|
||||||
id_name,
|
id_name,
|
||||||
|
get_whole_year,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -175,7 +185,9 @@ class AfvalinfoData(object):
|
|||||||
postcode,
|
postcode,
|
||||||
street_number,
|
street_number,
|
||||||
street_number_suffix,
|
street_number_suffix,
|
||||||
|
district,
|
||||||
diftar_code,
|
diftar_code,
|
||||||
|
get_whole_year,
|
||||||
resources,
|
resources,
|
||||||
):
|
):
|
||||||
self.data = None
|
self.data = None
|
||||||
@@ -183,7 +195,9 @@ class AfvalinfoData(object):
|
|||||||
self.postcode = postcode
|
self.postcode = postcode
|
||||||
self.street_number = street_number
|
self.street_number = street_number
|
||||||
self.street_number_suffix = street_number_suffix
|
self.street_number_suffix = street_number_suffix
|
||||||
|
self.district = district
|
||||||
self.diftar_code = diftar_code
|
self.diftar_code = diftar_code
|
||||||
|
self.get_whole_year = get_whole_year
|
||||||
self.resources = resources
|
self.resources = resources
|
||||||
|
|
||||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||||
@@ -194,7 +208,9 @@ class AfvalinfoData(object):
|
|||||||
self.postcode,
|
self.postcode,
|
||||||
self.street_number,
|
self.street_number,
|
||||||
self.street_number_suffix,
|
self.street_number_suffix,
|
||||||
|
self.district,
|
||||||
self.diftar_code,
|
self.diftar_code,
|
||||||
|
self.get_whole_year,
|
||||||
self.resources,
|
self.resources,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -209,6 +225,7 @@ class AfvalinfoSensor(Entity):
|
|||||||
timespan_in_days,
|
timespan_in_days,
|
||||||
locale,
|
locale,
|
||||||
id_name,
|
id_name,
|
||||||
|
get_whole_year,
|
||||||
):
|
):
|
||||||
self.data = data
|
self.data = data
|
||||||
self.type = sensor_type
|
self.type = sensor_type
|
||||||
@@ -217,6 +234,7 @@ class AfvalinfoSensor(Entity):
|
|||||||
self.timespan_in_days = timespan_in_days
|
self.timespan_in_days = timespan_in_days
|
||||||
self.locale = locale
|
self.locale = locale
|
||||||
self._name = sensor_friendly_name
|
self._name = sensor_friendly_name
|
||||||
|
self._get_whole_year = get_whole_year
|
||||||
self.entity_id = "sensor." + (
|
self.entity_id = "sensor." + (
|
||||||
(
|
(
|
||||||
SENSOR_PREFIX
|
SENSOR_PREFIX
|
||||||
@@ -241,6 +259,7 @@ class AfvalinfoSensor(Entity):
|
|||||||
self._year_month_day_date = None
|
self._year_month_day_date = None
|
||||||
self._last_collection_date = None
|
self._last_collection_date = None
|
||||||
self._total_collections_this_year = None
|
self._total_collections_this_year = None
|
||||||
|
self._whole_year_dates = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self):
|
def name(self):
|
||||||
@@ -266,98 +285,119 @@ class AfvalinfoSensor(Entity):
|
|||||||
ATTR_IS_COLLECTION_DATE_TODAY: self._is_collection_date_today,
|
ATTR_IS_COLLECTION_DATE_TODAY: self._is_collection_date_today,
|
||||||
ATTR_LAST_COLLECTION_DATE: self._last_collection_date,
|
ATTR_LAST_COLLECTION_DATE: self._last_collection_date,
|
||||||
ATTR_TOTAL_COLLECTIONS_THIS_YEAR: self._total_collections_this_year,
|
ATTR_TOTAL_COLLECTIONS_THIS_YEAR: self._total_collections_this_year,
|
||||||
|
ATTR_WHOLE_YEAR_DATES: self._whole_year_dates,
|
||||||
}
|
}
|
||||||
|
|
||||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||||
def update(self):
|
def update(self):
|
||||||
self.data.update()
|
self.data.update()
|
||||||
waste_data = self.data.data
|
waste_array = self.data.data
|
||||||
self._error = False
|
self._error = False
|
||||||
|
|
||||||
try:
|
# Loop through all the dates to put the dates in the whole_year_dates attribute
|
||||||
if waste_data:
|
if self._get_whole_year == "True":
|
||||||
|
whole_year_dates = []
|
||||||
|
for waste_data in waste_array:
|
||||||
if self.type in waste_data:
|
if self.type in waste_data:
|
||||||
collection_date = datetime.strptime(
|
whole_year_dates.append(
|
||||||
waste_data[self.type], "%Y-%m-%d"
|
datetime.strptime(waste_data[self.type], "%Y-%m-%d").date()
|
||||||
).date()
|
)
|
||||||
|
|
||||||
# Date in date format "%Y-%m-%d"
|
self._whole_year_dates = whole_year_dates
|
||||||
self._year_month_day_date = str(collection_date)
|
|
||||||
|
|
||||||
if collection_date:
|
try:
|
||||||
# Set the values of the sensor
|
if waste_array:
|
||||||
self._last_update = datetime.today().strftime("%d-%m-%Y %H:%M")
|
for waste_data in waste_array:
|
||||||
|
if self.type in waste_data:
|
||||||
|
collection_date = datetime.strptime(
|
||||||
|
waste_data[self.type], "%Y-%m-%d"
|
||||||
|
).date()
|
||||||
|
|
||||||
# Is the collection date today?
|
# Date in date format "%Y-%m-%d"
|
||||||
self._is_collection_date_today = date.today() == collection_date
|
self._year_month_day_date = str(collection_date)
|
||||||
|
|
||||||
if (
|
if collection_date:
|
||||||
self.type == "restafval"
|
# Set the values of the sensor
|
||||||
and "restafvaldiftardate" in waste_data
|
self._last_update = datetime.today().strftime(
|
||||||
):
|
"%d-%m-%Y %H:%M"
|
||||||
self._last_collection_date = str(
|
|
||||||
datetime.strptime(
|
|
||||||
waste_data["restafvaldiftardate"], "%Y-%m-%d"
|
|
||||||
).date()
|
|
||||||
)
|
)
|
||||||
self._total_collections_this_year = waste_data[
|
|
||||||
"restafvaldiftarcollections"
|
|
||||||
]
|
|
||||||
|
|
||||||
# Days until collection date
|
# Is the collection date today?
|
||||||
delta = collection_date - date.today()
|
self._is_collection_date_today = (
|
||||||
self._days_until_collection_date = delta.days
|
date.today() == collection_date
|
||||||
|
)
|
||||||
|
|
||||||
# Only show the value if the date is lesser than or equal to (today + timespan_in_days)
|
|
||||||
if collection_date <= date.today() + relativedelta(
|
|
||||||
days=int(self.timespan_in_days)
|
|
||||||
):
|
|
||||||
# if the date does not contain a named day or month, return the date as normal
|
|
||||||
if (
|
if (
|
||||||
self.date_format.find("a") == -1
|
self.type == "restafval"
|
||||||
and self.date_format.find("A") == -1
|
and "restafvaldiftardate" in waste_data
|
||||||
and self.date_format.find("b") == -1
|
|
||||||
and self.date_format.find("B") == -1
|
|
||||||
):
|
):
|
||||||
self._state = collection_date.strftime(self.date_format)
|
self._last_collection_date = str(
|
||||||
# else convert the named values to the locale names
|
datetime.strptime(
|
||||||
|
waste_data["restafvaldiftardate"], "%Y-%m-%d"
|
||||||
|
).date()
|
||||||
|
)
|
||||||
|
self._total_collections_this_year = waste_data[
|
||||||
|
"restafvaldiftarcollections"
|
||||||
|
]
|
||||||
|
|
||||||
|
# Days until collection date
|
||||||
|
delta = collection_date - date.today()
|
||||||
|
self._days_until_collection_date = delta.days
|
||||||
|
|
||||||
|
# Only show the value if the date is lesser than or equal to (today + timespan_in_days)
|
||||||
|
if collection_date <= date.today() + relativedelta(
|
||||||
|
days=int(self.timespan_in_days)
|
||||||
|
):
|
||||||
|
# if the date does not contain a named day or month, return the date as normal
|
||||||
|
if (
|
||||||
|
self.date_format.find("a") == -1
|
||||||
|
and self.date_format.find("A") == -1
|
||||||
|
and self.date_format.find("b") == -1
|
||||||
|
and self.date_format.find("B") == -1
|
||||||
|
):
|
||||||
|
self._state = collection_date.strftime(
|
||||||
|
self.date_format
|
||||||
|
)
|
||||||
|
# else convert the named values to the locale names
|
||||||
|
else:
|
||||||
|
edited_date_format = self.date_format.replace(
|
||||||
|
"%a", "EEE"
|
||||||
|
)
|
||||||
|
edited_date_format = edited_date_format.replace(
|
||||||
|
"%A", "EEEE"
|
||||||
|
)
|
||||||
|
edited_date_format = edited_date_format.replace(
|
||||||
|
"%b", "MMM"
|
||||||
|
)
|
||||||
|
edited_date_format = edited_date_format.replace(
|
||||||
|
"%B", "MMMM"
|
||||||
|
)
|
||||||
|
|
||||||
|
# half babel, half date string... something like EEEE 04-MMMM-2020
|
||||||
|
half_babel_half_date = collection_date.strftime(
|
||||||
|
edited_date_format
|
||||||
|
)
|
||||||
|
|
||||||
|
# replace the digits with qquoted digits 01 --> '01'
|
||||||
|
half_babel_half_date = re.sub(
|
||||||
|
r"(\d+)", r"'\1'", half_babel_half_date
|
||||||
|
)
|
||||||
|
# transform the EEE, EEEE etc... to a real locale date, with babel
|
||||||
|
locale_date = format_date(
|
||||||
|
collection_date,
|
||||||
|
half_babel_half_date,
|
||||||
|
locale=self.locale,
|
||||||
|
)
|
||||||
|
|
||||||
|
self._state = locale_date
|
||||||
|
break # we have a result, break the loop
|
||||||
else:
|
else:
|
||||||
edited_date_format = self.date_format.replace(
|
self._hidden = True
|
||||||
"%a", "EEE"
|
|
||||||
)
|
|
||||||
edited_date_format = edited_date_format.replace(
|
|
||||||
"%A", "EEEE"
|
|
||||||
)
|
|
||||||
edited_date_format = edited_date_format.replace(
|
|
||||||
"%b", "MMM"
|
|
||||||
)
|
|
||||||
edited_date_format = edited_date_format.replace(
|
|
||||||
"%B", "MMMM"
|
|
||||||
)
|
|
||||||
|
|
||||||
# half babel, half date string... something like EEEE 04-MMMM-2020
|
|
||||||
half_babel_half_date = collection_date.strftime(
|
|
||||||
edited_date_format
|
|
||||||
)
|
|
||||||
|
|
||||||
# replace the digits with qquoted digits 01 --> '01'
|
|
||||||
half_babel_half_date = re.sub(
|
|
||||||
r"(\d+)", r"'\1'", half_babel_half_date
|
|
||||||
)
|
|
||||||
# transform the EEE, EEEE etc... to a real locale date, with babel
|
|
||||||
locale_date = format_date(
|
|
||||||
collection_date,
|
|
||||||
half_babel_half_date,
|
|
||||||
locale=self.locale,
|
|
||||||
)
|
|
||||||
|
|
||||||
self._state = locale_date
|
|
||||||
else:
|
else:
|
||||||
self._hidden = True
|
# collection_date empty
|
||||||
else:
|
raise ValueError()
|
||||||
raise ValueError()
|
# else:
|
||||||
else:
|
# No matching result data for current waste type, no problem
|
||||||
raise ValueError()
|
|
||||||
else:
|
else:
|
||||||
raise ValueError()
|
raise ValueError()
|
||||||
except ValueError:
|
except ValueError:
|
||||||
@@ -369,4 +409,5 @@ class AfvalinfoSensor(Entity):
|
|||||||
# self._is_collection_date_today = False
|
# self._is_collection_date_today = False
|
||||||
# self._last_collection_date = None
|
# self._last_collection_date = None
|
||||||
# self._total_collections_this_year = None
|
# self._total_collections_this_year = None
|
||||||
|
# self._whole_year_dates = None
|
||||||
self._last_update = datetime.today().strftime("%d-%m-%Y %H:%M")
|
self._last_update = datetime.today().strftime("%d-%m-%Y %H:%M")
|
||||||
|
|||||||
@@ -12,8 +12,7 @@ def get_waste_data_raw(
|
|||||||
postal_code,
|
postal_code,
|
||||||
street_number,
|
street_number,
|
||||||
suffix,
|
suffix,
|
||||||
): # sourcery skip: avoid-builtin-shadow
|
):
|
||||||
|
|
||||||
if provider not in SENSOR_COLLECTORS_ICALENDAR.keys():
|
if provider not in SENSOR_COLLECTORS_ICALENDAR.keys():
|
||||||
raise ValueError(f"Invalid provider: {provider}, please verify")
|
raise ValueError(f"Invalid provider: {provider}, please verify")
|
||||||
|
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ def get_waste_data_raw(
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
bag_id = None
|
bag_id = None
|
||||||
|
suffix = suffix.strip().upper()
|
||||||
_verify = provider != "suez"
|
_verify = provider != "suez"
|
||||||
url = f"{SENSOR_COLLECTORS_OPZET[provider]}/rest/adressen/{postal_code}-{street_number}"
|
url = f"{SENSOR_COLLECTORS_OPZET[provider]}/rest/adressen/{postal_code}-{street_number}"
|
||||||
raw_response = requests.get(url, verify=_verify)
|
raw_response = requests.get(url, verify=_verify)
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
|
|
||||||
API = "api"
|
API = "api"
|
||||||
NAME = "afvalwijzer"
|
NAME = "afvalwijzer"
|
||||||
VERSION = "2022.11.02"
|
VERSION = "2023.01.01"
|
||||||
ISSUE_URL = "https://github.com/xirixiz/homeassistant-afvalwijzer/issues"
|
ISSUE_URL = "https://github.com/xirixiz/homeassistant-afvalwijzer/issues"
|
||||||
|
|
||||||
SENSOR_COLLECTOR_TO_URL = {
|
SENSOR_COLLECTOR_TO_URL = {
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"domain": "afvalwijzer",
|
"domain": "afvalwijzer",
|
||||||
"name": "Afvalwijzer",
|
"name": "Afvalwijzer",
|
||||||
"version": "2022.11.02",
|
"version": "2023.01.01",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"documentation": "https://github.com/xirixiz/homeassistant-afvalwijzer/blob/master/README.md",
|
"documentation": "https://github.com/xirixiz/homeassistant-afvalwijzer/blob/master/README.md",
|
||||||
"issue_tracker": "https://github.com/xirixiz/homeassistant-afvalwijzer/issues",
|
"issue_tracker": "https://github.com/xirixiz/homeassistant-afvalwijzer/issues",
|
||||||
@@ -11,4 +11,4 @@
|
|||||||
"@xirixiz"
|
"@xirixiz"
|
||||||
],
|
],
|
||||||
"requirements": []
|
"requirements": []
|
||||||
}
|
}
|
||||||
@@ -40,7 +40,7 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
|||||||
vol.Optional(CONF_SUFFIX, default=""): cv.string,
|
vol.Optional(CONF_SUFFIX, default=""): cv.string,
|
||||||
vol.Optional(CONF_EXCLUDE_PICKUP_TODAY, default="true"): cv.string,
|
vol.Optional(CONF_EXCLUDE_PICKUP_TODAY, default="true"): cv.string,
|
||||||
vol.Optional(CONF_EXCLUDE_LIST, default=""): cv.string,
|
vol.Optional(CONF_EXCLUDE_LIST, default=""): cv.string,
|
||||||
vol.Optional(CONF_DEFAULT_LABEL, default="Geen"): cv.string,
|
vol.Optional(CONF_DEFAULT_LABEL, default="geen"): cv.string,
|
||||||
vol.Optional(CONF_ID.strip().lower(), default=""): cv.string,
|
vol.Optional(CONF_ID.strip().lower(), default=""): cv.string,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@@ -77,7 +77,7 @@ async def async_setup_platform(hass, config, async_add_entities, discovery_info=
|
|||||||
except ValueError as err:
|
except ValueError as err:
|
||||||
_LOGGER.error(f"Check afvalwijzer platform settings {err.args}")
|
_LOGGER.error(f"Check afvalwijzer platform settings {err.args}")
|
||||||
|
|
||||||
fetch_data = AfvalwijzerData(config)
|
fetch_data = AfvalwijzerData(hass, config)
|
||||||
|
|
||||||
waste_types_provider = collector.waste_types_provider
|
waste_types_provider = collector.waste_types_provider
|
||||||
_LOGGER.debug(f"Generating waste_types_provider list = {waste_types_provider}")
|
_LOGGER.debug(f"Generating waste_types_provider list = {waste_types_provider}")
|
||||||
@@ -98,7 +98,8 @@ async def async_setup_platform(hass, config, async_add_entities, discovery_info=
|
|||||||
|
|
||||||
|
|
||||||
class AfvalwijzerData(object):
|
class AfvalwijzerData(object):
|
||||||
def __init__(self, config):
|
def __init__(self, hass, config):
|
||||||
|
self._hass = hass
|
||||||
self.config = config
|
self.config = config
|
||||||
|
|
||||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ from ..collector.main_collector import MainCollector
|
|||||||
# Common
|
# Common
|
||||||
suffix = ""
|
suffix = ""
|
||||||
exclude_pickup_today = "True"
|
exclude_pickup_today = "True"
|
||||||
default_label = "Geen"
|
default_label = "geen"
|
||||||
exclude_list = ""
|
exclude_list = ""
|
||||||
|
|
||||||
# DeAfvalapp
|
# DeAfvalapp
|
||||||
@@ -42,6 +42,11 @@ exclude_list = ""
|
|||||||
# postal_code = "5146eg"
|
# postal_code = "5146eg"
|
||||||
# street_number = "1"
|
# street_number = "1"
|
||||||
|
|
||||||
|
provider = "rmn"
|
||||||
|
postal_code = "3701XK"
|
||||||
|
street_number = "24"
|
||||||
|
suffix = "b"
|
||||||
|
|
||||||
# Opzet
|
# Opzet
|
||||||
# provider = "prezero"
|
# provider = "prezero"
|
||||||
# postal_code = "6665CN"
|
# postal_code = "6665CN"
|
||||||
@@ -54,9 +59,9 @@ exclude_list = ""
|
|||||||
# suffix = "C"
|
# suffix = "C"
|
||||||
|
|
||||||
# Ximmio
|
# Ximmio
|
||||||
provider = "meerlanden"
|
# provider = "meerlanden"
|
||||||
postal_code = "2121xt"
|
# postal_code = "2121xt"
|
||||||
street_number = "38"
|
# street_number = "38"
|
||||||
|
|
||||||
# Ximmio
|
# Ximmio
|
||||||
# provider = "acv"
|
# provider = "acv"
|
||||||
|
|||||||
@@ -25,6 +25,7 @@ import voluptuous as vol
|
|||||||
|
|
||||||
from .base import HacsBase
|
from .base import HacsBase
|
||||||
from .const import DOMAIN, MINIMUM_HA_VERSION, STARTUP
|
from .const import DOMAIN, MINIMUM_HA_VERSION, STARTUP
|
||||||
|
from .data_client import HacsDataClient
|
||||||
from .enums import ConfigurationType, HacsDisabledReason, HacsStage, LovelaceMode
|
from .enums import ConfigurationType, HacsDisabledReason, HacsStage, LovelaceMode
|
||||||
from .frontend import async_register_frontend
|
from .frontend import async_register_frontend
|
||||||
from .utils.configuration_schema import hacs_config_combined
|
from .utils.configuration_schema import hacs_config_combined
|
||||||
@@ -87,6 +88,10 @@ async def async_initialize_integration(
|
|||||||
hacs.hass = hass
|
hacs.hass = hass
|
||||||
hacs.queue = QueueManager(hass=hass)
|
hacs.queue = QueueManager(hass=hass)
|
||||||
hacs.data = HacsData(hacs=hacs)
|
hacs.data = HacsData(hacs=hacs)
|
||||||
|
hacs.data_client = HacsDataClient(
|
||||||
|
session=clientsession,
|
||||||
|
client_name=f"HACS/{integration.version}",
|
||||||
|
)
|
||||||
hacs.system.running = True
|
hacs.system.running = True
|
||||||
hacs.session = clientsession
|
hacs.session = clientsession
|
||||||
|
|
||||||
@@ -153,8 +158,9 @@ async def async_initialize_integration(
|
|||||||
hacs.disable_hacs(HacsDisabledReason.RESTORE)
|
hacs.disable_hacs(HacsDisabledReason.RESTORE)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
can_update = await hacs.async_can_update()
|
if not hacs.configuration.experimental:
|
||||||
hacs.log.debug("Can update %s repositories", can_update)
|
can_update = await hacs.async_can_update()
|
||||||
|
hacs.log.debug("Can update %s repositories", can_update)
|
||||||
|
|
||||||
hacs.set_active_categories()
|
hacs.set_active_categories()
|
||||||
|
|
||||||
@@ -168,7 +174,7 @@ async def async_initialize_integration(
|
|||||||
hacs.log.info("Update entities are only supported when using UI configuration")
|
hacs.log.info("Update entities are only supported when using UI configuration")
|
||||||
|
|
||||||
else:
|
else:
|
||||||
hass.config_entries.async_setup_platforms(
|
await hass.config_entries.async_forward_entry_setups(
|
||||||
config_entry,
|
config_entry,
|
||||||
[Platform.SENSOR, Platform.UPDATE]
|
[Platform.SENSOR, Platform.UPDATE]
|
||||||
if hacs.configuration.experimental
|
if hacs.configuration.experimental
|
||||||
|
|||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -28,11 +28,17 @@ from homeassistant.config_entries import ConfigEntry, ConfigEntryState
|
|||||||
from homeassistant.const import EVENT_HOMEASSISTANT_FINAL_WRITE, Platform
|
from homeassistant.const import EVENT_HOMEASSISTANT_FINAL_WRITE, Platform
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||||
from homeassistant.helpers.issue_registry import async_create_issue, IssueSeverity
|
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||||
from homeassistant.loader import Integration
|
from homeassistant.loader import Integration
|
||||||
from homeassistant.util import dt
|
from homeassistant.util import dt
|
||||||
|
|
||||||
|
from custom_components.hacs.repositories.base import (
|
||||||
|
HACS_MANIFEST_KEYS_TO_EXPORT,
|
||||||
|
REPOSITORY_KEYS_TO_EXPORT,
|
||||||
|
)
|
||||||
|
|
||||||
from .const import DOMAIN, TV, URL_BASE
|
from .const import DOMAIN, TV, URL_BASE
|
||||||
|
from .data_client import HacsDataClient
|
||||||
from .enums import (
|
from .enums import (
|
||||||
ConfigurationType,
|
ConfigurationType,
|
||||||
HacsCategory,
|
HacsCategory,
|
||||||
@@ -47,6 +53,7 @@ from .exceptions import (
|
|||||||
HacsException,
|
HacsException,
|
||||||
HacsExecutionStillInProgress,
|
HacsExecutionStillInProgress,
|
||||||
HacsExpectedException,
|
HacsExpectedException,
|
||||||
|
HacsNotModifiedException,
|
||||||
HacsRepositoryArchivedException,
|
HacsRepositoryArchivedException,
|
||||||
HacsRepositoryExistException,
|
HacsRepositoryExistException,
|
||||||
HomeAssistantCoreRepositoryException,
|
HomeAssistantCoreRepositoryException,
|
||||||
@@ -166,6 +173,7 @@ class HacsStatus:
|
|||||||
new: bool = False
|
new: bool = False
|
||||||
active_frontend_endpoint_plugin: bool = False
|
active_frontend_endpoint_plugin: bool = False
|
||||||
active_frontend_endpoint_theme: bool = False
|
active_frontend_endpoint_theme: bool = False
|
||||||
|
inital_fetch_done: bool = False
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -176,6 +184,7 @@ class HacsSystem:
|
|||||||
running: bool = False
|
running: bool = False
|
||||||
stage = HacsStage.SETUP
|
stage = HacsStage.SETUP
|
||||||
action: bool = False
|
action: bool = False
|
||||||
|
generator: bool = False
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def disabled(self) -> bool:
|
def disabled(self) -> bool:
|
||||||
@@ -265,7 +274,7 @@ class HacsRepositories:
|
|||||||
|
|
||||||
self._default_repositories.add(repo_id)
|
self._default_repositories.add(repo_id)
|
||||||
|
|
||||||
def set_repository_id(self, repository, repo_id):
|
def set_repository_id(self, repository: HacsRepository, repo_id: str):
|
||||||
"""Update a repository id."""
|
"""Update a repository id."""
|
||||||
existing_repo_id = str(repository.data.id)
|
existing_repo_id = str(repository.data.id)
|
||||||
if existing_repo_id == repo_id:
|
if existing_repo_id == repo_id:
|
||||||
@@ -350,6 +359,7 @@ class HacsBase:
|
|||||||
configuration = HacsConfiguration()
|
configuration = HacsConfiguration()
|
||||||
core = HacsCore()
|
core = HacsCore()
|
||||||
data: HacsData | None = None
|
data: HacsData | None = None
|
||||||
|
data_client: HacsDataClient | None = None
|
||||||
frontend_version: str | None = None
|
frontend_version: str | None = None
|
||||||
github: GitHub | None = None
|
github: GitHub | None = None
|
||||||
githubapi: GitHubAPI | None = None
|
githubapi: GitHubAPI | None = None
|
||||||
@@ -546,8 +556,6 @@ class HacsBase:
|
|||||||
if check:
|
if check:
|
||||||
try:
|
try:
|
||||||
await repository.async_registration(ref)
|
await repository.async_registration(ref)
|
||||||
if self.status.new:
|
|
||||||
repository.data.new = False
|
|
||||||
if repository.validate.errors:
|
if repository.validate.errors:
|
||||||
self.common.skip.append(repository.data.full_name)
|
self.common.skip.append(repository.data.full_name)
|
||||||
if not self.status.startup:
|
if not self.status.startup:
|
||||||
@@ -561,7 +569,11 @@ class HacsBase:
|
|||||||
repository.logger.info("%s Validation completed", repository.string)
|
repository.logger.info("%s Validation completed", repository.string)
|
||||||
else:
|
else:
|
||||||
repository.logger.info("%s Registration completed", repository.string)
|
repository.logger.info("%s Registration completed", repository.string)
|
||||||
except (HacsRepositoryExistException, HacsRepositoryArchivedException):
|
except (HacsRepositoryExistException, HacsRepositoryArchivedException) as exception:
|
||||||
|
if self.system.generator:
|
||||||
|
repository.logger.error(
|
||||||
|
"%s Registration Failed - %s", repository.string, exception
|
||||||
|
)
|
||||||
return
|
return
|
||||||
except AIOGitHubAPIException as exception:
|
except AIOGitHubAPIException as exception:
|
||||||
self.common.skip.append(repository.data.full_name)
|
self.common.skip.append(repository.data.full_name)
|
||||||
@@ -569,6 +581,9 @@ class HacsBase:
|
|||||||
f"Validation for {repository_full_name} failed with {exception}."
|
f"Validation for {repository_full_name} failed with {exception}."
|
||||||
) from exception
|
) from exception
|
||||||
|
|
||||||
|
if self.status.new:
|
||||||
|
repository.data.new = False
|
||||||
|
|
||||||
if repository_id is not None:
|
if repository_id is not None:
|
||||||
repository.data.id = repository_id
|
repository.data.id = repository_id
|
||||||
|
|
||||||
@@ -588,34 +603,7 @@ class HacsBase:
|
|||||||
async def startup_tasks(self, _=None) -> None:
|
async def startup_tasks(self, _=None) -> None:
|
||||||
"""Tasks that are started after setup."""
|
"""Tasks that are started after setup."""
|
||||||
self.set_stage(HacsStage.STARTUP)
|
self.set_stage(HacsStage.STARTUP)
|
||||||
|
await self.async_load_hacs_from_github()
|
||||||
try:
|
|
||||||
repository = self.repositories.get_by_full_name(HacsGitHubRepo.INTEGRATION)
|
|
||||||
if repository is None:
|
|
||||||
await self.async_register_repository(
|
|
||||||
repository_full_name=HacsGitHubRepo.INTEGRATION,
|
|
||||||
category=HacsCategory.INTEGRATION,
|
|
||||||
default=True,
|
|
||||||
)
|
|
||||||
repository = self.repositories.get_by_full_name(HacsGitHubRepo.INTEGRATION)
|
|
||||||
if repository is None:
|
|
||||||
raise HacsException("Unknown error")
|
|
||||||
|
|
||||||
repository.data.installed = True
|
|
||||||
repository.data.installed_version = self.integration.version.string
|
|
||||||
repository.data.new = False
|
|
||||||
repository.data.releases = True
|
|
||||||
|
|
||||||
self.repository = repository.repository_object
|
|
||||||
self.repositories.mark_default(repository)
|
|
||||||
except HacsException as exception:
|
|
||||||
if "403" in str(exception):
|
|
||||||
self.log.critical(
|
|
||||||
"GitHub API is ratelimited, or the token is wrong.",
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
self.log.critical("Could not load HACS! - %s", exception)
|
|
||||||
self.disable_hacs(HacsDisabledReason.LOAD_HACS)
|
|
||||||
|
|
||||||
if critical := await async_load_from_store(self.hass, "critical"):
|
if critical := await async_load_from_store(self.hass, "critical"):
|
||||||
for repo in critical:
|
for repo in critical:
|
||||||
@@ -626,16 +614,38 @@ class HacsBase:
|
|||||||
)
|
)
|
||||||
break
|
break
|
||||||
|
|
||||||
|
if not self.configuration.experimental:
|
||||||
|
self.recuring_tasks.append(
|
||||||
|
self.hass.helpers.event.async_track_time_interval(
|
||||||
|
self.async_update_downloaded_repositories, timedelta(hours=48)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.recuring_tasks.append(
|
||||||
|
self.hass.helpers.event.async_track_time_interval(
|
||||||
|
self.async_update_all_repositories,
|
||||||
|
timedelta(hours=96),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.recuring_tasks.append(
|
||||||
|
self.hass.helpers.event.async_track_time_interval(
|
||||||
|
self.async_load_hacs_from_github,
|
||||||
|
timedelta(hours=48),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
self.recuring_tasks.append(
|
self.recuring_tasks.append(
|
||||||
self.hass.helpers.event.async_track_time_interval(
|
self.hass.helpers.event.async_track_time_interval(
|
||||||
self.async_get_all_category_repositories, timedelta(hours=3)
|
self.async_update_downloaded_custom_repositories, timedelta(hours=48)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
self.recuring_tasks.append(
|
self.recuring_tasks.append(
|
||||||
self.hass.helpers.event.async_track_time_interval(
|
self.hass.helpers.event.async_track_time_interval(
|
||||||
self.async_update_all_repositories, timedelta(hours=25)
|
self.async_get_all_category_repositories, timedelta(hours=6)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
self.recuring_tasks.append(
|
self.recuring_tasks.append(
|
||||||
self.hass.helpers.event.async_track_time_interval(
|
self.hass.helpers.event.async_track_time_interval(
|
||||||
self.async_check_rate_limit, timedelta(minutes=5)
|
self.async_check_rate_limit, timedelta(minutes=5)
|
||||||
@@ -646,14 +656,10 @@ class HacsBase:
|
|||||||
self.async_prosess_queue, timedelta(minutes=10)
|
self.async_prosess_queue, timedelta(minutes=10)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
self.recuring_tasks.append(
|
self.recuring_tasks.append(
|
||||||
self.hass.helpers.event.async_track_time_interval(
|
self.hass.helpers.event.async_track_time_interval(
|
||||||
self.async_update_downloaded_repositories, timedelta(hours=2)
|
self.async_handle_critical_repositories, timedelta(hours=6)
|
||||||
)
|
|
||||||
)
|
|
||||||
self.recuring_tasks.append(
|
|
||||||
self.hass.helpers.event.async_track_time_interval(
|
|
||||||
self.async_handle_critical_repositories, timedelta(hours=2)
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -661,6 +667,8 @@ class HacsBase:
|
|||||||
EVENT_HOMEASSISTANT_FINAL_WRITE, self.data.async_force_write
|
EVENT_HOMEASSISTANT_FINAL_WRITE, self.data.async_force_write
|
||||||
)
|
)
|
||||||
|
|
||||||
|
self.log.debug("There are %s scheduled recurring tasks", len(self.recuring_tasks))
|
||||||
|
|
||||||
self.status.startup = False
|
self.status.startup = False
|
||||||
self.async_dispatch(HacsDispatchEvent.STATUS, {})
|
self.async_dispatch(HacsDispatchEvent.STATUS, {})
|
||||||
|
|
||||||
@@ -758,6 +766,42 @@ class HacsBase:
|
|||||||
if self.configuration.netdaemon:
|
if self.configuration.netdaemon:
|
||||||
self.enable_hacs_category(HacsCategory.NETDAEMON)
|
self.enable_hacs_category(HacsCategory.NETDAEMON)
|
||||||
|
|
||||||
|
async def async_load_hacs_from_github(self, _=None) -> None:
|
||||||
|
"""Load HACS from GitHub."""
|
||||||
|
if self.configuration.experimental and self.status.inital_fetch_done:
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
repository = self.repositories.get_by_full_name(HacsGitHubRepo.INTEGRATION)
|
||||||
|
if repository is None:
|
||||||
|
await self.async_register_repository(
|
||||||
|
repository_full_name=HacsGitHubRepo.INTEGRATION,
|
||||||
|
category=HacsCategory.INTEGRATION,
|
||||||
|
default=True,
|
||||||
|
)
|
||||||
|
repository = self.repositories.get_by_full_name(HacsGitHubRepo.INTEGRATION)
|
||||||
|
elif self.configuration.experimental and not self.status.startup:
|
||||||
|
self.log.error("Scheduling update of hacs/integration")
|
||||||
|
self.queue.add(repository.common_update())
|
||||||
|
if repository is None:
|
||||||
|
raise HacsException("Unknown error")
|
||||||
|
|
||||||
|
repository.data.installed = True
|
||||||
|
repository.data.installed_version = self.integration.version.string
|
||||||
|
repository.data.new = False
|
||||||
|
repository.data.releases = True
|
||||||
|
|
||||||
|
self.repository = repository.repository_object
|
||||||
|
self.repositories.mark_default(repository)
|
||||||
|
except HacsException as exception:
|
||||||
|
if "403" in str(exception):
|
||||||
|
self.log.critical(
|
||||||
|
"GitHub API is ratelimited, or the token is wrong.",
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.log.critical("Could not load HACS! - %s", exception)
|
||||||
|
self.disable_hacs(HacsDisabledReason.LOAD_HACS)
|
||||||
|
|
||||||
async def async_get_all_category_repositories(self, _=None) -> None:
|
async def async_get_all_category_repositories(self, _=None) -> None:
|
||||||
"""Get all category repositories."""
|
"""Get all category repositories."""
|
||||||
if self.system.disabled:
|
if self.system.disabled:
|
||||||
@@ -765,11 +809,62 @@ class HacsBase:
|
|||||||
self.log.info("Loading known repositories")
|
self.log.info("Loading known repositories")
|
||||||
await asyncio.gather(
|
await asyncio.gather(
|
||||||
*[
|
*[
|
||||||
self.async_get_category_repositories(HacsCategory(category))
|
self.async_get_category_repositories_experimental(category)
|
||||||
|
if self.configuration.experimental
|
||||||
|
else self.async_get_category_repositories(HacsCategory(category))
|
||||||
for category in self.common.categories or []
|
for category in self.common.categories or []
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def async_get_category_repositories_experimental(self, category: str) -> None:
|
||||||
|
"""Update all category repositories."""
|
||||||
|
self.log.debug("Fetching updated content for %s", category)
|
||||||
|
try:
|
||||||
|
category_data = await self.data_client.get_data(category)
|
||||||
|
except HacsNotModifiedException:
|
||||||
|
self.log.debug("No updates for %s", category)
|
||||||
|
return
|
||||||
|
except HacsException as exception:
|
||||||
|
self.log.error("Could not update %s - %s", category, exception)
|
||||||
|
return
|
||||||
|
|
||||||
|
await self.data.register_unknown_repositories(category_data, category)
|
||||||
|
|
||||||
|
for repo_id, repo_data in category_data.items():
|
||||||
|
repo = repo_data["full_name"]
|
||||||
|
if self.common.renamed_repositories.get(repo):
|
||||||
|
repo = self.common.renamed_repositories[repo]
|
||||||
|
if self.repositories.is_removed(repo):
|
||||||
|
continue
|
||||||
|
if repo in self.common.archived_repositories:
|
||||||
|
continue
|
||||||
|
if repository := self.repositories.get_by_full_name(repo):
|
||||||
|
self.repositories.set_repository_id(repository, repo_id)
|
||||||
|
self.repositories.mark_default(repository)
|
||||||
|
if repository.data.last_fetched is None or (
|
||||||
|
repository.data.last_fetched.timestamp() < repo_data["last_fetched"]
|
||||||
|
):
|
||||||
|
repository.data.update_data({**dict(REPOSITORY_KEYS_TO_EXPORT), **repo_data})
|
||||||
|
if (manifest := repo_data.get("manifest")) is not None:
|
||||||
|
repository.repository_manifest.update_data(
|
||||||
|
{**dict(HACS_MANIFEST_KEYS_TO_EXPORT), **manifest}
|
||||||
|
)
|
||||||
|
|
||||||
|
if category == "integration":
|
||||||
|
self.status.inital_fetch_done = True
|
||||||
|
|
||||||
|
if self.stage == HacsStage.STARTUP:
|
||||||
|
for repository in self.repositories.list_all:
|
||||||
|
if (
|
||||||
|
repository.data.category == category
|
||||||
|
and not repository.data.installed
|
||||||
|
and not self.repositories.is_default(repository.data.id)
|
||||||
|
):
|
||||||
|
repository.logger.debug(
|
||||||
|
"%s Unregister stale custom repository", repository.string
|
||||||
|
)
|
||||||
|
self.repositories.unregister(repository)
|
||||||
|
|
||||||
async def async_get_category_repositories(self, category: HacsCategory) -> None:
|
async def async_get_category_repositories(self, category: HacsCategory) -> None:
|
||||||
"""Get repositories from category."""
|
"""Get repositories from category."""
|
||||||
if self.system.disabled:
|
if self.system.disabled:
|
||||||
@@ -845,7 +940,7 @@ class HacsBase:
|
|||||||
return
|
return
|
||||||
can_update = await self.async_can_update()
|
can_update = await self.async_can_update()
|
||||||
self.log.debug(
|
self.log.debug(
|
||||||
"Can update %s repositories, " "items in queue %s",
|
"Can update %s repositories, items in queue %s",
|
||||||
can_update,
|
can_update,
|
||||||
self.queue.pending_tasks,
|
self.queue.pending_tasks,
|
||||||
)
|
)
|
||||||
@@ -867,9 +962,12 @@ class HacsBase:
|
|||||||
self.log.info("Loading removed repositories")
|
self.log.info("Loading removed repositories")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
removed_repositories = await self.async_github_get_hacs_default_file(
|
if self.configuration.experimental:
|
||||||
HacsCategory.REMOVED
|
removed_repositories = await self.data_client.get_data("removed")
|
||||||
)
|
else:
|
||||||
|
removed_repositories = await self.async_github_get_hacs_default_file(
|
||||||
|
HacsCategory.REMOVED
|
||||||
|
)
|
||||||
except HacsException:
|
except HacsException:
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -915,7 +1013,7 @@ class HacsBase:
|
|||||||
|
|
||||||
async def async_update_downloaded_repositories(self, _=None) -> None:
|
async def async_update_downloaded_repositories(self, _=None) -> None:
|
||||||
"""Execute the task."""
|
"""Execute the task."""
|
||||||
if self.system.disabled:
|
if self.system.disabled or self.configuration.experimental:
|
||||||
return
|
return
|
||||||
self.log.info("Starting recurring background task for downloaded repositories")
|
self.log.info("Starting recurring background task for downloaded repositories")
|
||||||
|
|
||||||
@@ -925,6 +1023,21 @@ class HacsBase:
|
|||||||
|
|
||||||
self.log.debug("Recurring background task for downloaded repositories done")
|
self.log.debug("Recurring background task for downloaded repositories done")
|
||||||
|
|
||||||
|
async def async_update_downloaded_custom_repositories(self, _=None) -> None:
|
||||||
|
"""Execute the task."""
|
||||||
|
if self.system.disabled or not self.configuration.experimental:
|
||||||
|
return
|
||||||
|
self.log.info("Starting recurring background task for downloaded custom repositories")
|
||||||
|
|
||||||
|
for repository in self.repositories.list_downloaded:
|
||||||
|
if (
|
||||||
|
repository.data.category in self.common.categories
|
||||||
|
and not self.repositories.is_default(repository.data.id)
|
||||||
|
):
|
||||||
|
self.queue.add(repository.update_repository(ignore_issues=True))
|
||||||
|
|
||||||
|
self.log.debug("Recurring background task for downloaded custom repositories done")
|
||||||
|
|
||||||
async def async_handle_critical_repositories(self, _=None) -> None:
|
async def async_handle_critical_repositories(self, _=None) -> None:
|
||||||
"""Handle critical repositories."""
|
"""Handle critical repositories."""
|
||||||
critical_queue = QueueManager(hass=self.hass)
|
critical_queue = QueueManager(hass=self.hass)
|
||||||
@@ -933,8 +1046,11 @@ class HacsBase:
|
|||||||
was_installed = False
|
was_installed = False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
critical = await self.async_github_get_hacs_default_file("critical")
|
if self.configuration.experimental:
|
||||||
except GitHubNotModifiedException:
|
critical = await self.data_client.get_data("critical")
|
||||||
|
else:
|
||||||
|
critical = await self.async_github_get_hacs_default_file("critical")
|
||||||
|
except (GitHubNotModifiedException, HacsNotModifiedException):
|
||||||
return
|
return
|
||||||
except HacsException:
|
except HacsException:
|
||||||
pass
|
pass
|
||||||
|
|||||||
@@ -17,6 +17,8 @@ PACKAGE_NAME = "custom_components.hacs"
|
|||||||
DEFAULT_CONCURRENT_TASKS = 15
|
DEFAULT_CONCURRENT_TASKS = 15
|
||||||
DEFAULT_CONCURRENT_BACKOFF_TIME = 1
|
DEFAULT_CONCURRENT_BACKOFF_TIME = 1
|
||||||
|
|
||||||
|
HACS_REPOSITORY_ID = "172733314"
|
||||||
|
|
||||||
HACS_ACTION_GITHUB_API_HEADERS = {
|
HACS_ACTION_GITHUB_API_HEADERS = {
|
||||||
"User-Agent": "HACS/action",
|
"User-Agent": "HACS/action",
|
||||||
"Accept": ACCEPT_HEADERS["preview"],
|
"Accept": ACCEPT_HEADERS["preview"],
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ import sys
|
|||||||
|
|
||||||
if sys.version_info.minor >= 11:
|
if sys.version_info.minor >= 11:
|
||||||
# Needs Python 3.11
|
# Needs Python 3.11
|
||||||
from enum import StrEnum ## pylint: disable=no-name-in-module
|
from enum import StrEnum # # pylint: disable=no-name-in-module
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
# https://github.com/home-assistant/core/blob/dev/homeassistant/backports/enum.py
|
# https://github.com/home-assistant/core/blob/dev/homeassistant/backports/enum.py
|
||||||
|
|||||||
@@ -8,13 +8,12 @@ from homeassistant.components.http import HomeAssistantView
|
|||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
|
|
||||||
from .const import DOMAIN, URL_BASE
|
from .const import DOMAIN, URL_BASE
|
||||||
from .hacs_frontend import locate_dir, VERSION as FE_VERSION
|
from .hacs_frontend import VERSION as FE_VERSION, locate_dir
|
||||||
from .hacs_frontend_experimental import (
|
from .hacs_frontend_experimental import (
|
||||||
locate_dir as experimental_locate_dir,
|
|
||||||
VERSION as EXPERIMENTAL_FE_VERSION,
|
VERSION as EXPERIMENTAL_FE_VERSION,
|
||||||
|
locate_dir as experimental_locate_dir,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .base import HacsBase
|
from .base import HacsBase
|
||||||
|
|
||||||
|
|||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -1,9 +1,9 @@
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
new Function("import('/hacsfiles/frontend/main-c4dd4de7.js')")();
|
new Function("import('/hacsfiles/frontend/main-aeda8d41.js')")();
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
var el = document.createElement('script');
|
var el = document.createElement('script');
|
||||||
el.src = '/hacsfiles/frontend/main-c4dd4de7.js';
|
el.src = '/hacsfiles/frontend/main-aeda8d41.js';
|
||||||
el.type = 'module';
|
el.type = 'module';
|
||||||
document.body.appendChild(el);
|
document.body.appendChild(el);
|
||||||
}
|
}
|
||||||
|
|||||||
Binary file not shown.
@@ -1,3 +1,3 @@
|
|||||||
{
|
{
|
||||||
"./src/main.ts": "main-c4dd4de7.js"
|
"./src/main.ts": "main-aeda8d41.js"
|
||||||
}
|
}
|
||||||
@@ -1 +1 @@
|
|||||||
VERSION="20221217163936"
|
VERSION="20230127100107"
|
||||||
@@ -19,5 +19,5 @@
|
|||||||
"requirements": [
|
"requirements": [
|
||||||
"aiogithubapi>=22.10.1"
|
"aiogithubapi>=22.10.1"
|
||||||
],
|
],
|
||||||
"version": "1.29.0"
|
"version": "1.30.1"
|
||||||
}
|
}
|
||||||
@@ -1,13 +1,14 @@
|
|||||||
"""Repairs platform for HACS."""
|
"""Repairs platform for HACS."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
import voluptuous as vol
|
from typing import Any
|
||||||
|
|
||||||
from homeassistant import data_entry_flow
|
from homeassistant import data_entry_flow
|
||||||
from homeassistant.components.repairs import RepairsFlow
|
from homeassistant.components.repairs import RepairsFlow
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
from custom_components.hacs.base import HacsBase
|
from custom_components.hacs.base import HacsBase
|
||||||
|
|
||||||
from .const import DOMAIN
|
from .const import DOMAIN
|
||||||
|
|||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -50,16 +50,27 @@ if TYPE_CHECKING:
|
|||||||
|
|
||||||
|
|
||||||
TOPIC_FILTER = (
|
TOPIC_FILTER = (
|
||||||
|
"add-on",
|
||||||
|
"addon",
|
||||||
|
"app",
|
||||||
|
"appdaemon-apps",
|
||||||
|
"appdaemon",
|
||||||
"custom-card",
|
"custom-card",
|
||||||
|
"custom-cards",
|
||||||
"custom-component",
|
"custom-component",
|
||||||
"custom-components",
|
"custom-components",
|
||||||
"customcomponents",
|
"customcomponents",
|
||||||
"hacktoberfest",
|
"hacktoberfest",
|
||||||
"hacs-default",
|
"hacs-default",
|
||||||
"hacs-integration",
|
"hacs-integration",
|
||||||
|
"hacs-repository",
|
||||||
"hacs",
|
"hacs",
|
||||||
"hass",
|
"hass",
|
||||||
"hassio",
|
"hassio",
|
||||||
|
"home-assistant-custom",
|
||||||
|
"home-assistant-frontend",
|
||||||
|
"home-assistant-hacs",
|
||||||
|
"home-assistant-sensor",
|
||||||
"home-assistant",
|
"home-assistant",
|
||||||
"home-automation",
|
"home-automation",
|
||||||
"homeassistant-components",
|
"homeassistant-components",
|
||||||
@@ -68,16 +79,45 @@ TOPIC_FILTER = (
|
|||||||
"homeassistant",
|
"homeassistant",
|
||||||
"homeautomation",
|
"homeautomation",
|
||||||
"integration",
|
"integration",
|
||||||
|
"lovelace-ui",
|
||||||
"lovelace",
|
"lovelace",
|
||||||
|
"media-player",
|
||||||
|
"mediaplayer",
|
||||||
|
"netdaemon",
|
||||||
|
"plugin",
|
||||||
|
"python_script",
|
||||||
|
"python-script",
|
||||||
"python",
|
"python",
|
||||||
"sensor",
|
"sensor",
|
||||||
|
"smart-home",
|
||||||
|
"smarthome",
|
||||||
"theme",
|
"theme",
|
||||||
"themes",
|
"themes",
|
||||||
"custom-cards",
|
)
|
||||||
"home-assistant-frontend",
|
|
||||||
"home-assistant-hacs",
|
|
||||||
"home-assistant-custom",
|
REPOSITORY_KEYS_TO_EXPORT = (
|
||||||
"lovelace-ui",
|
# Keys can not be removed from this list until v3
|
||||||
|
# If keys are added, the action need to be re-run with force
|
||||||
|
("description", ""),
|
||||||
|
("downloads", 0),
|
||||||
|
("domain", None),
|
||||||
|
("etag_repository", None),
|
||||||
|
("full_name", ""),
|
||||||
|
("last_commit", None),
|
||||||
|
("last_updated", 0),
|
||||||
|
("last_version", None),
|
||||||
|
("manifest_name", None),
|
||||||
|
("open_issues", 0),
|
||||||
|
("stargazers_count", 0),
|
||||||
|
("topics", []),
|
||||||
|
)
|
||||||
|
|
||||||
|
HACS_MANIFEST_KEYS_TO_EXPORT = (
|
||||||
|
# Keys can not be removed from this list until v3
|
||||||
|
# If keys are added, the action need to be re-run with force
|
||||||
|
("country", []),
|
||||||
|
("name", None),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -120,7 +160,6 @@ class RepositoryData:
|
|||||||
new: bool = True
|
new: bool = True
|
||||||
open_issues: int = 0
|
open_issues: int = 0
|
||||||
published_tags: list[str] = []
|
published_tags: list[str] = []
|
||||||
pushed_at: str = ""
|
|
||||||
releases: bool = False
|
releases: bool = False
|
||||||
selected_tag: str = None
|
selected_tag: str = None
|
||||||
show_beta: bool = False
|
show_beta: bool = False
|
||||||
@@ -147,32 +186,24 @@ class RepositoryData:
|
|||||||
|
|
||||||
def update_data(self, data: dict, action: bool = False) -> None:
|
def update_data(self, data: dict, action: bool = False) -> None:
|
||||||
"""Update data of the repository."""
|
"""Update data of the repository."""
|
||||||
for key in data:
|
for key, value in data.items():
|
||||||
if key not in self.__dict__:
|
if key not in self.__dict__:
|
||||||
continue
|
continue
|
||||||
if key == "pushed_at":
|
|
||||||
if data[key] == "":
|
if key == "last_fetched" and isinstance(value, float):
|
||||||
continue
|
setattr(self, key, datetime.fromtimestamp(value))
|
||||||
if "Z" in data[key]:
|
|
||||||
setattr(
|
|
||||||
self,
|
|
||||||
key,
|
|
||||||
datetime.strptime(data[key], "%Y-%m-%dT%H:%M:%SZ"),
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
setattr(self, key, datetime.strptime(data[key], "%Y-%m-%dT%H:%M:%S"))
|
|
||||||
elif key == "id":
|
elif key == "id":
|
||||||
setattr(self, key, str(data[key]))
|
setattr(self, key, str(value))
|
||||||
elif key == "country":
|
elif key == "country":
|
||||||
if isinstance(data[key], str):
|
if isinstance(value, str):
|
||||||
setattr(self, key, [data[key]])
|
setattr(self, key, [value])
|
||||||
else:
|
else:
|
||||||
setattr(self, key, data[key])
|
setattr(self, key, value)
|
||||||
elif key == "topics" and not action:
|
elif key == "topics" and not action:
|
||||||
setattr(self, key, [topic for topic in data[key] if topic not in TOPIC_FILTER])
|
setattr(self, key, [topic for topic in value if topic not in TOPIC_FILTER])
|
||||||
|
|
||||||
else:
|
else:
|
||||||
setattr(self, key, data[key])
|
setattr(self, key, value)
|
||||||
|
|
||||||
|
|
||||||
@attr.s(auto_attribs=True)
|
@attr.s(auto_attribs=True)
|
||||||
@@ -215,6 +246,20 @@ class HacsManifest:
|
|||||||
setattr(manifest_data, key, value)
|
setattr(manifest_data, key, value)
|
||||||
return manifest_data
|
return manifest_data
|
||||||
|
|
||||||
|
def update_data(self, data: dict) -> None:
|
||||||
|
"""Update the manifest data."""
|
||||||
|
for key, value in data.items():
|
||||||
|
if key not in self.__dict__:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if key == "country":
|
||||||
|
if isinstance(value, str):
|
||||||
|
setattr(self, key, [value])
|
||||||
|
else:
|
||||||
|
setattr(self, key, value)
|
||||||
|
else:
|
||||||
|
setattr(self, key, value)
|
||||||
|
|
||||||
|
|
||||||
class RepositoryReleases:
|
class RepositoryReleases:
|
||||||
"""RepositoyReleases."""
|
"""RepositoyReleases."""
|
||||||
@@ -449,6 +494,10 @@ class HacsRepository:
|
|||||||
self.logger.debug("%s Did not update, content was not modified", self.string)
|
self.logger.debug("%s Did not update, content was not modified", self.string)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
if self.repository_object:
|
||||||
|
self.data.last_updated = self.repository_object.attributes.get("pushed_at", 0)
|
||||||
|
self.data.last_fetched = datetime.utcnow()
|
||||||
|
|
||||||
# Set topics
|
# Set topics
|
||||||
self.data.topics = self.data.topics
|
self.data.topics = self.data.topics
|
||||||
|
|
||||||
@@ -497,7 +546,7 @@ class HacsRepository:
|
|||||||
self.additional_info = await self.async_get_info_file_contents()
|
self.additional_info = await self.async_get_info_file_contents()
|
||||||
|
|
||||||
# Set last fetch attribute
|
# Set last fetch attribute
|
||||||
self.data.last_fetched = datetime.now()
|
self.data.last_fetched = datetime.utcnow()
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -1011,7 +1060,11 @@ class HacsRepository:
|
|||||||
self.hacs.common.renamed_repositories[
|
self.hacs.common.renamed_repositories[
|
||||||
self.data.full_name
|
self.data.full_name
|
||||||
] = repository_object.full_name
|
] = repository_object.full_name
|
||||||
raise HacsRepositoryExistException
|
if not self.hacs.system.generator:
|
||||||
|
raise HacsRepositoryExistException
|
||||||
|
self.logger.error(
|
||||||
|
"%s Repository has been renamed - %s", self.string, repository_object.full_name
|
||||||
|
)
|
||||||
self.data.update_data(
|
self.data.update_data(
|
||||||
repository_object.attributes,
|
repository_object.attributes,
|
||||||
action=self.hacs.system.action,
|
action=self.hacs.system.action,
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ from __future__ import annotations
|
|||||||
|
|
||||||
from typing import TYPE_CHECKING, Any
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
from homeassistant.helpers.issue_registry import async_create_issue, IssueSeverity
|
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||||
from homeassistant.loader import async_get_custom_components
|
from homeassistant.loader import async_get_custom_components
|
||||||
|
|
||||||
from ..const import DOMAIN
|
from ..const import DOMAIN
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
"""Sensor platform for HACS."""
|
"""Sensor platform for HACS."""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
from homeassistant.components.sensor import SensorEntity
|
from homeassistant.components.sensor import SensorEntity
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ from .base import HacsBase
|
|||||||
from .const import DOMAIN
|
from .const import DOMAIN
|
||||||
|
|
||||||
GITHUB_STATUS = "https://www.githubstatus.com/"
|
GITHUB_STATUS = "https://www.githubstatus.com/"
|
||||||
|
CLOUDFLARE_STATUS = "https://www.cloudflarestatus.com/"
|
||||||
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
@@ -39,4 +40,9 @@ async def system_health_info(hass):
|
|||||||
if hacs.system.disabled:
|
if hacs.system.disabled:
|
||||||
data["Disabled"] = hacs.system.disabled_reason
|
data["Disabled"] = hacs.system.disabled_reason
|
||||||
|
|
||||||
|
if hacs.configuration.experimental:
|
||||||
|
data["HACS Data"] = system_health.async_check_can_reach_url(
|
||||||
|
hass, "https://data-v2.hacs.xyz/data.json", CLOUDFLARE_STATUS
|
||||||
|
)
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|||||||
@@ -90,6 +90,17 @@ class HacsRepositoryUpdateEntity(HacsRepositoryEntity, UpdateEntity):
|
|||||||
if self.repository.pending_restart or not self.repository.can_download:
|
if self.repository.pending_restart or not self.repository.can_download:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
if self.latest_version not in self.repository.data.published_tags:
|
||||||
|
releases = await self.repository.get_releases(
|
||||||
|
prerelease=self.repository.data.show_beta,
|
||||||
|
returnlimit=self.hacs.configuration.release_limit,
|
||||||
|
)
|
||||||
|
if releases:
|
||||||
|
self.repository.data.releases = True
|
||||||
|
self.repository.releases.objects = releases
|
||||||
|
self.repository.data.published_tags = [x.tag_name for x in releases]
|
||||||
|
self.repository.data.last_version = next(iter(self.repository.data.published_tags))
|
||||||
|
|
||||||
release_notes = ""
|
release_notes = ""
|
||||||
if len(self.repository.releases.objects) > 0:
|
if len(self.repository.releases.objects) > 0:
|
||||||
release = self.repository.releases.objects[0]
|
release = self.repository.releases.objects[0]
|
||||||
|
|||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -1,38 +1,45 @@
|
|||||||
"""Data handler for HACS."""
|
"""Data handler for HACS."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
from homeassistant.core import callback
|
from homeassistant.core import callback
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.util import json as json_util
|
from homeassistant.util import json as json_util
|
||||||
|
|
||||||
from ..base import HacsBase
|
from ..base import HacsBase
|
||||||
from ..enums import HacsDisabledReason, HacsDispatchEvent, HacsGitHubRepo
|
from ..const import HACS_REPOSITORY_ID
|
||||||
|
from ..enums import HacsDisabledReason, HacsDispatchEvent
|
||||||
from ..repositories.base import TOPIC_FILTER, HacsManifest, HacsRepository
|
from ..repositories.base import TOPIC_FILTER, HacsManifest, HacsRepository
|
||||||
from .logger import LOGGER
|
from .logger import LOGGER
|
||||||
from .path import is_safe
|
from .path import is_safe
|
||||||
from .store import async_load_from_store, async_save_to_store
|
from .store import async_load_from_store, async_save_to_store
|
||||||
|
|
||||||
DEFAULT_BASE_REPOSITORY_DATA = (
|
EXPORTED_BASE_DATA = (
|
||||||
|
("new", False),
|
||||||
|
("full_name", ""),
|
||||||
|
)
|
||||||
|
|
||||||
|
EXPORTED_REPOSITORY_DATA = EXPORTED_BASE_DATA + (
|
||||||
("authors", []),
|
("authors", []),
|
||||||
("category", ""),
|
("category", ""),
|
||||||
("description", ""),
|
("description", ""),
|
||||||
("domain", None),
|
("domain", None),
|
||||||
("downloads", 0),
|
("downloads", 0),
|
||||||
("etag_repository", None),
|
("etag_repository", None),
|
||||||
("full_name", ""),
|
|
||||||
("last_updated", 0),
|
|
||||||
("hide", False),
|
("hide", False),
|
||||||
|
("last_updated", 0),
|
||||||
("new", False),
|
("new", False),
|
||||||
("stargazers_count", 0),
|
("stargazers_count", 0),
|
||||||
("topics", []),
|
("topics", []),
|
||||||
)
|
)
|
||||||
|
|
||||||
DEFAULT_EXTENDED_REPOSITORY_DATA = (
|
EXPORTED_DOWNLOADED_REPOSITORY_DATA = EXPORTED_REPOSITORY_DATA + (
|
||||||
("archived", False),
|
("archived", False),
|
||||||
("config_flow", False),
|
("config_flow", False),
|
||||||
("default_branch", None),
|
("default_branch", None),
|
||||||
("description", ""),
|
|
||||||
("first_install", False),
|
("first_install", False),
|
||||||
("installed_commit", None),
|
("installed_commit", None),
|
||||||
("installed", False),
|
("installed", False),
|
||||||
@@ -41,12 +48,9 @@ DEFAULT_EXTENDED_REPOSITORY_DATA = (
|
|||||||
("manifest_name", None),
|
("manifest_name", None),
|
||||||
("open_issues", 0),
|
("open_issues", 0),
|
||||||
("published_tags", []),
|
("published_tags", []),
|
||||||
("pushed_at", ""),
|
|
||||||
("releases", False),
|
("releases", False),
|
||||||
("selected_tag", None),
|
("selected_tag", None),
|
||||||
("show_beta", False),
|
("show_beta", False),
|
||||||
("stargazers_count", 0),
|
|
||||||
("topics", []),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -80,6 +84,8 @@ class HacsData:
|
|||||||
"ignored_repositories": self.hacs.common.ignored_repositories,
|
"ignored_repositories": self.hacs.common.ignored_repositories,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
if self.hacs.configuration.experimental:
|
||||||
|
await self._async_store_experimental_content_and_repos()
|
||||||
await self._async_store_content_and_repos()
|
await self._async_store_content_and_repos()
|
||||||
|
|
||||||
async def _async_store_content_and_repos(self, _=None): # bb: ignore
|
async def _async_store_content_and_repos(self, _=None): # bb: ignore
|
||||||
@@ -94,40 +100,94 @@ class HacsData:
|
|||||||
for event in (HacsDispatchEvent.REPOSITORY, HacsDispatchEvent.CONFIG):
|
for event in (HacsDispatchEvent.REPOSITORY, HacsDispatchEvent.CONFIG):
|
||||||
self.hacs.async_dispatch(event, {})
|
self.hacs.async_dispatch(event, {})
|
||||||
|
|
||||||
|
async def _async_store_experimental_content_and_repos(self, _=None): # bb: ignore
|
||||||
|
"""Store the main repos file and each repo that is out of date."""
|
||||||
|
# Repositories
|
||||||
|
self.content = {}
|
||||||
|
for repository in self.hacs.repositories.list_all:
|
||||||
|
if repository.data.category in self.hacs.common.categories:
|
||||||
|
self.async_store_experimental_repository_data(repository)
|
||||||
|
|
||||||
|
await async_save_to_store(self.hacs.hass, "data", {"repositories": self.content})
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_store_repository_data(self, repository: HacsRepository) -> dict:
|
def async_store_repository_data(self, repository: HacsRepository) -> dict:
|
||||||
"""Store the repository data."""
|
"""Store the repository data."""
|
||||||
data = {"repository_manifest": repository.repository_manifest.manifest}
|
data = {"repository_manifest": repository.repository_manifest.manifest}
|
||||||
|
|
||||||
for key, default_value in DEFAULT_BASE_REPOSITORY_DATA:
|
for key, default in (
|
||||||
if (value := repository.data.__getattribute__(key)) != default_value:
|
EXPORTED_DOWNLOADED_REPOSITORY_DATA
|
||||||
|
if repository.data.installed
|
||||||
|
else EXPORTED_REPOSITORY_DATA
|
||||||
|
):
|
||||||
|
if (value := getattr(repository.data, key, default)) != default:
|
||||||
data[key] = value
|
data[key] = value
|
||||||
|
|
||||||
if repository.data.installed:
|
if repository.data.installed_version:
|
||||||
for key, default_value in DEFAULT_EXTENDED_REPOSITORY_DATA:
|
|
||||||
if (value := repository.data.__getattribute__(key)) != default_value:
|
|
||||||
data[key] = value
|
|
||||||
data["version_installed"] = repository.data.installed_version
|
data["version_installed"] = repository.data.installed_version
|
||||||
|
|
||||||
if repository.data.last_fetched:
|
if repository.data.last_fetched:
|
||||||
data["last_fetched"] = repository.data.last_fetched.timestamp()
|
data["last_fetched"] = repository.data.last_fetched.timestamp()
|
||||||
|
|
||||||
self.content[str(repository.data.id)] = data
|
self.content[str(repository.data.id)] = data
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def async_store_experimental_repository_data(self, repository: HacsRepository) -> None:
|
||||||
|
"""Store the experimental repository data for non downloaded repositories."""
|
||||||
|
data = {}
|
||||||
|
self.content.setdefault(repository.data.category, [])
|
||||||
|
|
||||||
|
if repository.data.installed:
|
||||||
|
data["repository_manifest"] = repository.repository_manifest.manifest
|
||||||
|
for key, default in EXPORTED_DOWNLOADED_REPOSITORY_DATA:
|
||||||
|
if (value := getattr(repository.data, key, default)) != default:
|
||||||
|
data[key] = value
|
||||||
|
|
||||||
|
if repository.data.installed_version:
|
||||||
|
data["version_installed"] = repository.data.installed_version
|
||||||
|
if repository.data.last_fetched:
|
||||||
|
data["last_fetched"] = repository.data.last_fetched.timestamp()
|
||||||
|
else:
|
||||||
|
for key, default in EXPORTED_BASE_DATA:
|
||||||
|
if (value := getattr(repository.data, key, default)) != default:
|
||||||
|
data[key] = value
|
||||||
|
|
||||||
|
self.content[repository.data.category].append({"id": str(repository.data.id), **data})
|
||||||
|
|
||||||
async def restore(self):
|
async def restore(self):
|
||||||
"""Restore saved data."""
|
"""Restore saved data."""
|
||||||
self.hacs.status.new = False
|
self.hacs.status.new = False
|
||||||
|
repositories = {}
|
||||||
|
hacs = {}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
hacs = await async_load_from_store(self.hacs.hass, "hacs") or {}
|
hacs = await async_load_from_store(self.hacs.hass, "hacs") or {}
|
||||||
except HomeAssistantError:
|
except HomeAssistantError:
|
||||||
hacs = {}
|
pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
repositories = await async_load_from_store(self.hacs.hass, "repositories") or {}
|
data = (
|
||||||
|
await async_load_from_store(
|
||||||
|
self.hacs.hass,
|
||||||
|
"data" if self.hacs.configuration.experimental else "repositories",
|
||||||
|
)
|
||||||
|
or {}
|
||||||
|
)
|
||||||
|
if data and self.hacs.configuration.experimental:
|
||||||
|
for category, entries in data.get("repositories", {}).items():
|
||||||
|
for repository in entries:
|
||||||
|
repositories[repository["id"]] = {"category": category, **repository}
|
||||||
|
else:
|
||||||
|
repositories = (
|
||||||
|
data or await async_load_from_store(self.hacs.hass, "repositories") or {}
|
||||||
|
)
|
||||||
except HomeAssistantError as exception:
|
except HomeAssistantError as exception:
|
||||||
self.hacs.log.error(
|
self.hacs.log.error(
|
||||||
"Could not read %s, restore the file from a backup - %s",
|
"Could not read %s, restore the file from a backup - %s",
|
||||||
self.hacs.hass.config.path(".storage/hacs.repositories"),
|
self.hacs.hass.config.path(
|
||||||
|
".storage/hacs.data"
|
||||||
|
if self.hacs.configuration.experimental
|
||||||
|
else ".storage/hacs.repositories"
|
||||||
|
),
|
||||||
exception,
|
exception,
|
||||||
)
|
)
|
||||||
self.hacs.disable_hacs(HacsDisabledReason.RESTORE)
|
self.hacs.disable_hacs(HacsDisabledReason.RESTORE)
|
||||||
@@ -136,6 +196,8 @@ class HacsData:
|
|||||||
if not hacs and not repositories:
|
if not hacs and not repositories:
|
||||||
# Assume new install
|
# Assume new install
|
||||||
self.hacs.status.new = True
|
self.hacs.status.new = True
|
||||||
|
if self.hacs.configuration.experimental:
|
||||||
|
return True
|
||||||
self.logger.info("<HacsData restore> Loading base repository information")
|
self.logger.info("<HacsData restore> Loading base repository information")
|
||||||
repositories = await self.hacs.hass.async_add_executor_job(
|
repositories = await self.hacs.hass.async_add_executor_job(
|
||||||
json_util.load_json,
|
json_util.load_json,
|
||||||
@@ -186,28 +248,34 @@ class HacsData:
|
|||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
async def register_unknown_repositories(self, repositories):
|
async def register_unknown_repositories(self, repositories, category: str | None = None):
|
||||||
"""Registry any unknown repositories."""
|
"""Registry any unknown repositories."""
|
||||||
register_tasks = [
|
register_tasks = [
|
||||||
self.hacs.async_register_repository(
|
self.hacs.async_register_repository(
|
||||||
repository_full_name=repo_data["full_name"],
|
repository_full_name=repo_data["full_name"],
|
||||||
category=repo_data["category"],
|
category=repo_data.get("category", category),
|
||||||
check=False,
|
check=False,
|
||||||
repository_id=entry,
|
repository_id=entry,
|
||||||
)
|
)
|
||||||
for entry, repo_data in repositories.items()
|
for entry, repo_data in repositories.items()
|
||||||
if entry != "0" and not self.hacs.repositories.is_registered(repository_id=entry)
|
if entry != "0"
|
||||||
|
and not self.hacs.repositories.is_registered(repository_id=entry)
|
||||||
|
and repo_data.get("category", category) is not None
|
||||||
]
|
]
|
||||||
if register_tasks:
|
if register_tasks:
|
||||||
await asyncio.gather(*register_tasks)
|
await asyncio.gather(*register_tasks)
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_restore_repository(self, entry, repository_data):
|
def async_restore_repository(self, entry: str, repository_data: dict[str, Any]):
|
||||||
"""Restore repository."""
|
"""Restore repository."""
|
||||||
full_name = repository_data["full_name"]
|
repository: HacsRepository | None = None
|
||||||
if not (repository := self.hacs.repositories.get_by_full_name(full_name)):
|
if full_name := repository_data.get("full_name"):
|
||||||
self.logger.error("<HacsData restore> Did not find %s (%s)", full_name, entry)
|
repository = self.hacs.repositories.get_by_full_name(full_name)
|
||||||
|
if not repository:
|
||||||
|
repository = self.hacs.repositories.get_by_id(entry)
|
||||||
|
if not repository:
|
||||||
return
|
return
|
||||||
|
|
||||||
# Restore repository attributes
|
# Restore repository attributes
|
||||||
self.hacs.repositories.set_repository_id(repository, entry)
|
self.hacs.repositories.set_repository_id(repository, entry)
|
||||||
repository.data.authors = repository_data.get("authors", [])
|
repository.data.authors = repository_data.get("authors", [])
|
||||||
@@ -238,7 +306,7 @@ class HacsData:
|
|||||||
repository.data.last_fetched = datetime.fromtimestamp(last_fetched)
|
repository.data.last_fetched = datetime.fromtimestamp(last_fetched)
|
||||||
|
|
||||||
repository.repository_manifest = HacsManifest.from_dict(
|
repository.repository_manifest = HacsManifest.from_dict(
|
||||||
repository_data.get("repository_manifest", {})
|
repository_data.get("manifest") or repository_data.get("repository_manifest") or {}
|
||||||
)
|
)
|
||||||
|
|
||||||
if repository.localpath is not None and is_safe(self.hacs, repository.localpath):
|
if repository.localpath is not None and is_safe(self.hacs, repository.localpath):
|
||||||
@@ -248,6 +316,6 @@ class HacsData:
|
|||||||
if repository.data.installed:
|
if repository.data.installed:
|
||||||
repository.data.first_install = False
|
repository.data.first_install = False
|
||||||
|
|
||||||
if full_name == HacsGitHubRepo.INTEGRATION:
|
if entry == HACS_REPOSITORY_ID:
|
||||||
repository.data.installed_version = self.hacs.version
|
repository.data.installed_version = self.hacs.version
|
||||||
repository.data.installed = True
|
repository.data.installed = True
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -66,8 +66,9 @@ async def hacs_repositories_list(
|
|||||||
"topics": repo.data.topics,
|
"topics": repo.data.topics,
|
||||||
}
|
}
|
||||||
for repo in hacs.repositories.list_all
|
for repo in hacs.repositories.list_all
|
||||||
if repo.data.category in (msg.get("categories") or hacs.common.categories)
|
if repo.data.category in msg.get("categories", hacs.common.categories)
|
||||||
and not repo.ignored_by_country_configuration
|
and not repo.ignored_by_country_configuration
|
||||||
|
and (not hacs.configuration.experimental or repo.data.last_fetched)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@@ -201,8 +202,6 @@ async def hacs_repositories_remove(
|
|||||||
):
|
):
|
||||||
"""Remove custom repositoriy."""
|
"""Remove custom repositoriy."""
|
||||||
hacs: HacsBase = hass.data.get(DOMAIN)
|
hacs: HacsBase = hass.data.get(DOMAIN)
|
||||||
hacs.log.warning(connection.context)
|
|
||||||
hacs.log.warning(msg)
|
|
||||||
repository = hacs.repositories.get_by_id(msg["repository"])
|
repository = hacs.repositories.get_by_id(msg["repository"])
|
||||||
|
|
||||||
repository.remove()
|
repository.remove()
|
||||||
|
|||||||
@@ -10,7 +10,8 @@ from homeassistant.const import (
|
|||||||
CONF_SCAN_INTERVAL,
|
CONF_SCAN_INTERVAL,
|
||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import ConfigEntryNotReady
|
from homeassistant.exceptions import ConfigEntryNotReady, ConfigEntryAuthFailed
|
||||||
|
from hyundai_kia_connect_api.exceptions import *
|
||||||
import hashlib
|
import hashlib
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
@@ -52,9 +53,10 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
|
|||||||
await coordinator.async_config_entry_first_refresh()
|
await coordinator.async_config_entry_first_refresh()
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
raise ConfigEntryNotReady(f"Config Not Ready: {ex}")
|
raise ConfigEntryNotReady(f"Config Not Ready: {ex}")
|
||||||
|
|
||||||
hass.data.setdefault(DOMAIN, {})
|
hass.data.setdefault(DOMAIN, {})
|
||||||
hass.data[DOMAIN][config_entry.unique_id] = coordinator
|
hass.data[DOMAIN][config_entry.unique_id] = coordinator
|
||||||
hass.config_entries.async_setup_platforms(config_entry, PLATFORMS)
|
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
|
||||||
async_setup_services(hass)
|
async_setup_services(hass)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user