periodic push
This commit is contained in:
@@ -1 +1 @@
|
||||
2023.2.5
|
||||
2023.6.3
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -29,4 +29,6 @@ home-assistant.log.fault
|
||||
# Ensure these YAML files are ignored, otherwise your secret data/credentials will leak.
|
||||
ip_bans.yaml
|
||||
secrets.yaml
|
||||
known_devices.yaml
|
||||
known_devices.yaml
|
||||
*.pyc
|
||||
*.gz
|
||||
|
||||
586
automations.yaml
586
automations.yaml
@@ -125,23 +125,14 @@
|
||||
use_blueprint:
|
||||
path: freakshock88/motion_illuminance_activated_entity.yaml
|
||||
input:
|
||||
motion_sensor: binary_sensor.lumi_lumi_sensor_motion_aq2_occupancy
|
||||
target_entity: light.lamp_wc_beneden_light
|
||||
illuminance_sensor: sensor.lumi_lumi_sensor_motion_aq2_illuminance
|
||||
no_motion_wait: input_number.beweging_toilet_timer
|
||||
target_off_entity: light.lamp_wc_beneden_light
|
||||
illuminance_cutoff: input_number.beweging_toilet_helderheid
|
||||
- id: '1660502406213'
|
||||
alias: Tuin - Lights On At Sunset
|
||||
description: ''
|
||||
use_blueprint:
|
||||
path: CyanAutomation/lights_on_at_sunset.yaml
|
||||
input:
|
||||
target_brightness: 40
|
||||
target_light:
|
||||
area_id: tuin
|
||||
motion_sensor: binary_sensor.lumi_lumi_sensor_motion_aq2_occupancy
|
||||
- id: '1660507765057'
|
||||
alias: woonkamer - alles uit
|
||||
alias: woonkamer - alles uit - sfeer schakelaar
|
||||
description: ''
|
||||
trigger:
|
||||
- platform: device
|
||||
@@ -164,44 +155,26 @@
|
||||
action:
|
||||
- service: scene.turn_on
|
||||
target:
|
||||
entity_id: scene.beneden_alles_uit
|
||||
entity_id: scene.woonkamer_ochtend_scene
|
||||
metadata: {}
|
||||
mode: single
|
||||
- id: '1660508989788'
|
||||
alias: Slaapkamer - bedlamp aan lage helderheid
|
||||
description: ''
|
||||
trigger:
|
||||
- device_id: dc42e9871d6dfc0ce76c594054038cd9
|
||||
domain: zha
|
||||
platform: device
|
||||
type: remote_button_short_press
|
||||
subtype: button_1
|
||||
condition: []
|
||||
action:
|
||||
- choose:
|
||||
- conditions:
|
||||
- condition: device
|
||||
type: is_off
|
||||
device_id: 01b9a993ffea323f6f094e9c231f6d3c
|
||||
entity_id: light.lamp_bed_willem_light
|
||||
domain: light
|
||||
sequence:
|
||||
- type: turn_on
|
||||
device_id: 01b9a993ffea323f6f094e9c231f6d3c
|
||||
entity_id: light.lamp_bed_willem_light
|
||||
domain: light
|
||||
brightness_pct: 10
|
||||
- conditions:
|
||||
- condition: device
|
||||
type: is_on
|
||||
device_id: 01b9a993ffea323f6f094e9c231f6d3c
|
||||
entity_id: light.lamp_bed_willem_light
|
||||
domain: light
|
||||
sequence:
|
||||
- type: turn_off
|
||||
device_id: 01b9a993ffea323f6f094e9c231f6d3c
|
||||
entity_id: light.lamp_bed_willem_light
|
||||
domain: light
|
||||
- type: turn_off
|
||||
device_id: 6652a530d4f2c349be36ea58904e613f
|
||||
entity_id: switch.display_woonkamer_browsee_screen
|
||||
domain: switch
|
||||
- delay:
|
||||
hours: 0
|
||||
minutes: 0
|
||||
seconds: 20
|
||||
milliseconds: 0
|
||||
- service: scene.turn_on
|
||||
data:
|
||||
transition: 19
|
||||
target:
|
||||
entity_id: scene.tuin_nacht
|
||||
- type: turn_off
|
||||
device_id: 1281186f8b90c2b00053decdf33a6d72
|
||||
entity_id: light.lamp_aanrecht
|
||||
domain: light
|
||||
mode: single
|
||||
- id: '1661107342293'
|
||||
alias: Zolder - Wasdroger cycle
|
||||
@@ -250,51 +223,33 @@
|
||||
target:
|
||||
entity_id: scene.tuin_uit
|
||||
metadata: {}
|
||||
mode: single
|
||||
- id: '1661711305212'
|
||||
alias: Tuin - waterklep fix
|
||||
description: ''
|
||||
trigger:
|
||||
- type: opened
|
||||
platform: device
|
||||
device_id: 172891d014f4ffcaefd3e0310574ed3a
|
||||
entity_id: binary_sensor.waterklep_contact_contact
|
||||
domain: binary_sensor
|
||||
condition:
|
||||
- condition: device
|
||||
type: is_on
|
||||
device_id: 14791754a4e8dd8e44b075ab2b932296
|
||||
entity_id: switch.waterklep
|
||||
domain: switch
|
||||
for:
|
||||
- delay:
|
||||
hours: 0
|
||||
minutes: 0
|
||||
seconds: 4
|
||||
action:
|
||||
- type: turn_on
|
||||
device_id: 14791754a4e8dd8e44b075ab2b932296
|
||||
entity_id: switch.waterklep
|
||||
domain: switch
|
||||
mode: restart
|
||||
seconds: 30
|
||||
milliseconds: 0
|
||||
- service: light.turn_off
|
||||
data: {}
|
||||
target:
|
||||
entity_id:
|
||||
- light.tuin_verlichting
|
||||
- light.tuin_achtertuin
|
||||
mode: single
|
||||
- id: '1661803600011'
|
||||
alias: Woonkamer - Beweging
|
||||
description: ''
|
||||
trigger:
|
||||
- type: motion
|
||||
platform: device
|
||||
device_id: dba81805bffdc5ab5eb183b0c2d845dc
|
||||
entity_id: binary_sensor.beweging_woonkamer2_iaszone
|
||||
device_id: 4cf96197bf033071d030814729b20dfb
|
||||
entity_id: binary_sensor.ikea_of_sweden_tradfri_motion_sensor_motion
|
||||
domain: binary_sensor
|
||||
for:
|
||||
hours: 0
|
||||
minutes: 0
|
||||
seconds: 3
|
||||
condition:
|
||||
- condition: or
|
||||
conditions:
|
||||
- condition: time
|
||||
after: 06:30:00
|
||||
before: 08:00:00
|
||||
after: 04:30:00
|
||||
before: sensor.sun_next_rising
|
||||
weekday:
|
||||
- sun
|
||||
- mon
|
||||
@@ -303,8 +258,9 @@
|
||||
- thu
|
||||
- sat
|
||||
- fri
|
||||
alias: Before sun rise
|
||||
- condition: time
|
||||
after: '17:30:00'
|
||||
after: sensor.sun_next_setting
|
||||
before: '23:00:00'
|
||||
weekday:
|
||||
- sun
|
||||
@@ -314,6 +270,7 @@
|
||||
- thu
|
||||
- fri
|
||||
- sat
|
||||
alias: After Sunset
|
||||
- condition: device
|
||||
type: is_off
|
||||
device_id: bc61b8f4ddf2fc04d3a0a6001ea7c7c8
|
||||
@@ -323,85 +280,13 @@
|
||||
hours: 0
|
||||
minutes: 10
|
||||
seconds: 0
|
||||
enabled: true
|
||||
action:
|
||||
- service: scene.turn_on
|
||||
target:
|
||||
entity_id: scene.woonkamer_sfeer_verlichting_aan
|
||||
metadata: {}
|
||||
mode: single
|
||||
- id: '1662235717886'
|
||||
alias: Woonkamer - CO2 melding
|
||||
description: ''
|
||||
trigger:
|
||||
- platform: numeric_state
|
||||
entity_id: sensor.woonkamer_co2
|
||||
for:
|
||||
hours: 0
|
||||
minutes: 5
|
||||
seconds: 0
|
||||
attribute: state_class
|
||||
above: '1000'
|
||||
condition: []
|
||||
action:
|
||||
- service: notify.mobile_app_iphone
|
||||
data:
|
||||
message: Co2 in de woonkamer te hoog, nu ventileren
|
||||
title: Let op!
|
||||
- service: notify.mobile_app_iphone_van_ilse
|
||||
data:
|
||||
message: Co2 in de woonkamer te hoog, nu ventileren
|
||||
title: Let op!
|
||||
mode: single
|
||||
- id: '1662613235716'
|
||||
alias: Slaapkamer - toggle rgb lamp
|
||||
description: ''
|
||||
trigger:
|
||||
- device_id: dc42e9871d6dfc0ce76c594054038cd9
|
||||
domain: zha
|
||||
platform: device
|
||||
type: remote_button_short_press
|
||||
subtype: button_2
|
||||
condition: []
|
||||
action:
|
||||
- if:
|
||||
- condition: device
|
||||
type: is_off
|
||||
device_id: 486c39036f87bee0fb2ed8521eb89559
|
||||
entity_id: light.lamp_rgb_slaapkamer
|
||||
domain: light
|
||||
then:
|
||||
- service: scene.turn_on
|
||||
target:
|
||||
entity_id: scene.slaapkamer_dim_wit
|
||||
metadata: {}
|
||||
else:
|
||||
- type: turn_off
|
||||
device_id: 486c39036f87bee0fb2ed8521eb89559
|
||||
entity_id: light.lamp_rgb_slaapkamer
|
||||
domain: light
|
||||
mode: single
|
||||
- id: '1666338442880'
|
||||
alias: Tuin - melding poort
|
||||
description: ''
|
||||
trigger:
|
||||
- type: opened
|
||||
platform: device
|
||||
device_id: 692b4399bddfc992385e65ea0fcf8af6
|
||||
entity_id: binary_sensor.deurcontact_poort_contact
|
||||
domain: binary_sensor
|
||||
for:
|
||||
hours: 0
|
||||
minutes: 0
|
||||
seconds: 3
|
||||
condition: []
|
||||
action:
|
||||
- service: notify.mobile_app_iphone_van_ilse
|
||||
data:
|
||||
message: sensor van de poort getriggerd (poort open)
|
||||
- service: notify.mobile_app_iphone_van_willem
|
||||
data:
|
||||
message: sensor van de poort getriggerd (poort open)
|
||||
mode: single
|
||||
- id: '1666506600186'
|
||||
alias: Slaapkamer - bed lamp feller
|
||||
description: ''
|
||||
@@ -450,7 +335,7 @@
|
||||
brightness_pct: 20
|
||||
- type: turn_on
|
||||
device_id: 4c2d45d53cd5318e416fdc1cc942f65a
|
||||
entity_id: light.lamp_dressoir_light
|
||||
entity_id: light.lamp_dressoir
|
||||
domain: light
|
||||
brightness_pct: 20
|
||||
- conditions:
|
||||
@@ -466,7 +351,7 @@
|
||||
domain: light
|
||||
- type: turn_off
|
||||
device_id: 4c2d45d53cd5318e416fdc1cc942f65a
|
||||
entity_id: light.lamp_dressoir_light
|
||||
entity_id: light.lamp_dressoir
|
||||
domain: light
|
||||
mode: single
|
||||
- id: '1671052282402'
|
||||
@@ -491,23 +376,6 @@
|
||||
domain: light
|
||||
brightness_pct: 100
|
||||
mode: single
|
||||
- id: '1671659606183'
|
||||
alias: Alles uit
|
||||
description: ''
|
||||
trigger:
|
||||
- platform: device
|
||||
domain: mqtt
|
||||
device_id: c976ab1909dcc67895eccdce5708b0dc
|
||||
type: action
|
||||
subtype: brightness_move_down_1
|
||||
discovery_id: 0x60a423fffe28320f action_brightness_move_down_1
|
||||
condition: []
|
||||
action:
|
||||
- service: scene.turn_on
|
||||
data: {}
|
||||
target:
|
||||
entity_id: scene.beneden_alles_uit
|
||||
mode: single
|
||||
- id: '1672424581965'
|
||||
alias: slaapkamer tim - schakelaar (Actions)
|
||||
description: ''
|
||||
@@ -710,38 +578,6 @@
|
||||
device_id: 9f4dd21a83f9473a5350876da52296d6
|
||||
entity_id: light.lamp_bank_light
|
||||
domain: light
|
||||
- id: '1674681637235'
|
||||
alias: Slaapkamer i&w - knoppen bed willem [BP]
|
||||
description: ''
|
||||
use_blueprint:
|
||||
path: andordavoti/zha-aqara-wireless-switch-WXKG02LM.yaml
|
||||
input:
|
||||
aqara_switch: dc42e9871d6dfc0ce76c594054038cd9
|
||||
button_single_press_right:
|
||||
- choose:
|
||||
- conditions:
|
||||
- condition: device
|
||||
type: is_off
|
||||
device_id: 01b9a993ffea323f6f094e9c231f6d3c
|
||||
entity_id: light.lamp_bed_willem_light
|
||||
domain: light
|
||||
sequence:
|
||||
- type: turn_on
|
||||
device_id: 01b9a993ffea323f6f094e9c231f6d3c
|
||||
entity_id: light.lamp_bed_willem_light
|
||||
domain: light
|
||||
brightness_pct: 10
|
||||
- conditions:
|
||||
- condition: device
|
||||
type: is_on
|
||||
device_id: 01b9a993ffea323f6f094e9c231f6d3c
|
||||
entity_id: light.lamp_bed_willem_light
|
||||
domain: light
|
||||
sequence:
|
||||
- type: turn_off
|
||||
device_id: 01b9a993ffea323f6f094e9c231f6d3c
|
||||
entity_id: light.lamp_bed_willem_light
|
||||
domain: light
|
||||
- id: '1675520145446'
|
||||
alias: Zolder - knoppen bureau willem (BP)
|
||||
description: ''
|
||||
@@ -760,15 +596,15 @@
|
||||
target:
|
||||
entity_id: scene.klaar_met_werken_op_zolder
|
||||
on_button_2_short:
|
||||
- device_id: 9186cc61ae5a6d7206c02fc67cfdb878
|
||||
- device_id: 53a4e48d13f3f94e7ca1ffe0557ac135
|
||||
domain: climate
|
||||
entity_id: climate.ac_zolder
|
||||
entity_id: climate.airco_4
|
||||
type: set_hvac_mode
|
||||
hvac_mode: heat_cool
|
||||
off_button_2_short:
|
||||
- device_id: 9186cc61ae5a6d7206c02fc67cfdb878
|
||||
- device_id: 53a4e48d13f3f94e7ca1ffe0557ac135
|
||||
domain: climate
|
||||
entity_id: climate.ac_zolder
|
||||
entity_id: climate.airco_4
|
||||
type: set_hvac_mode
|
||||
hvac_mode: 'off'
|
||||
- id: '1675628615548'
|
||||
@@ -776,23 +612,28 @@
|
||||
description: ''
|
||||
trigger:
|
||||
- platform: time
|
||||
at: 00:01:00
|
||||
at: 03:00:00
|
||||
condition:
|
||||
- condition: or
|
||||
conditions:
|
||||
- condition: template
|
||||
value_template: '{{ (as_timestamp(now()) - as_timestamp(states.sensor.roborock_vacuum_s5e_last_clean_end.last_updated)
|
||||
)/3600 > 8}}'
|
||||
enabled: false
|
||||
- condition: and
|
||||
conditions:
|
||||
- condition: template
|
||||
value_template: " - condition: template\n value_template: >-\n {{
|
||||
(as_timestamp(now()) -\n as_timestamp(states.sensor.roborock_vacuum_s5e_last_clean_end.last_updated)\n
|
||||
\ )/3600 > 1}}"
|
||||
- condition: numeric_state
|
||||
entity_id: sensor.roborock_vacuum_s5e_last_clean_area
|
||||
below: 7
|
||||
enabled: false
|
||||
enabled: false
|
||||
action:
|
||||
- service: vacuum.set_fan_speed
|
||||
data:
|
||||
fan_speed: Low
|
||||
target:
|
||||
device_id: b99930765798a4796ca3ebb87e84299e
|
||||
- device_id: b99930765798a4796ca3ebb87e84299e
|
||||
domain: vacuum
|
||||
entity_id: vacuum.roborock_vacuum_s5e
|
||||
@@ -807,6 +648,24 @@
|
||||
offset: 0
|
||||
condition: []
|
||||
action:
|
||||
- service: scene.turn_on
|
||||
target:
|
||||
entity_id: scene.tuin_avond
|
||||
metadata: {}
|
||||
- delay:
|
||||
hours: 0
|
||||
minutes: 0
|
||||
seconds: 30
|
||||
milliseconds: 0
|
||||
- service: scene.turn_on
|
||||
target:
|
||||
entity_id: scene.tuin_avond
|
||||
metadata: {}
|
||||
- delay:
|
||||
hours: 0
|
||||
minutes: 0
|
||||
seconds: 30
|
||||
milliseconds: 0
|
||||
- service: scene.turn_on
|
||||
target:
|
||||
entity_id: scene.tuin_avond
|
||||
@@ -832,7 +691,7 @@
|
||||
minutes: 1
|
||||
seconds: 0
|
||||
- condition: time
|
||||
after: '22:00:00'
|
||||
after: '20:30:00'
|
||||
before: 05:00:00
|
||||
- type: is_no_motion
|
||||
condition: device
|
||||
@@ -868,3 +727,302 @@
|
||||
- service: notify.mobile_app_iphone_van_willem
|
||||
data:
|
||||
message: sensor batteries are low {{sensors}}
|
||||
- id: '1677526576561'
|
||||
alias: slaapkamer i&w - knoppen bed willem (knop 2)
|
||||
description: ''
|
||||
trigger:
|
||||
- device_id: dc42e9871d6dfc0ce76c594054038cd9
|
||||
domain: zha
|
||||
platform: device
|
||||
type: remote_button_short_press
|
||||
subtype: button_2
|
||||
id: remote_button2_short_press
|
||||
- device_id: dc42e9871d6dfc0ce76c594054038cd9
|
||||
domain: zha
|
||||
platform: device
|
||||
type: remote_button_double_press
|
||||
subtype: button_2
|
||||
id: remote_button2_double_press
|
||||
- device_id: dc42e9871d6dfc0ce76c594054038cd9
|
||||
domain: zha
|
||||
platform: device
|
||||
type: remote_button_long_press
|
||||
subtype: button_2
|
||||
id: remote_button2_long_press
|
||||
- device_id: dc42e9871d6dfc0ce76c594054038cd9
|
||||
domain: zha
|
||||
platform: device
|
||||
type: remote_button_short_press
|
||||
subtype: button_1
|
||||
id: button_1_short
|
||||
condition: []
|
||||
action:
|
||||
- if:
|
||||
- condition: device
|
||||
type: is_off
|
||||
device_id: 01b9a993ffea323f6f094e9c231f6d3c
|
||||
entity_id: light.lamp_bed_willem_light
|
||||
domain: light
|
||||
then:
|
||||
- choose:
|
||||
- conditions:
|
||||
- condition: trigger
|
||||
id: remote_button2_short_press
|
||||
sequence:
|
||||
- type: turn_on
|
||||
device_id: 01b9a993ffea323f6f094e9c231f6d3c
|
||||
entity_id: light.lamp_bed_willem_light
|
||||
domain: light
|
||||
brightness_pct: 5
|
||||
- conditions:
|
||||
- condition: trigger
|
||||
id: remote_button2_double_press
|
||||
sequence:
|
||||
- type: turn_on
|
||||
device_id: 01b9a993ffea323f6f094e9c231f6d3c
|
||||
entity_id: light.lamp_bed_willem_light
|
||||
domain: light
|
||||
brightness_pct: 100
|
||||
- conditions:
|
||||
- condition: trigger
|
||||
id: remote_button2_long_press
|
||||
sequence:
|
||||
- service: light.turn_on
|
||||
target:
|
||||
entity_id: light.lamp_bed_willem_light
|
||||
data:
|
||||
brightness: '{% if state_attr("light.lamp_bed_willem_light","brightness")
|
||||
is none %} 8 {% elif state_attr("light.lamp_bed_willem_light","brightness")|int
|
||||
< 9 %} 32 {% elif state_attr("light.lamp_bed_willem_light","brightness")|int
|
||||
< 33 %} 128 {% elif state_attr("light.lamp_bed_willem_light","brightness")|int
|
||||
< 129 %} 160 {% elif state_attr("light.lamp_bed_willem_light","brightness")|int
|
||||
< 161 %} 192 {% elif state_attr("light.lamp_bed_willem_light","brightness")|int
|
||||
< 193 %} 224 {% elif state_attr("light.lamp_bed_willem_light","brightness")|int
|
||||
< 225 %} 255 {% else %} 10 {% endif %}
|
||||
|
||||
'
|
||||
else:
|
||||
- type: turn_off
|
||||
device_id: 01b9a993ffea323f6f094e9c231f6d3c
|
||||
entity_id: light.lamp_bed_willem_light
|
||||
domain: light
|
||||
mode: single
|
||||
- id: '1678901438847'
|
||||
alias: Knop kamer Luuk, verwarming uit
|
||||
description: ''
|
||||
trigger:
|
||||
- device_id: 17ebe217929cb75025e2e52c47113267
|
||||
domain: zha
|
||||
platform: device
|
||||
type: remote_button_short_press
|
||||
subtype: remote_button_short_press
|
||||
condition: []
|
||||
action:
|
||||
- service: climate.set_temperature
|
||||
data:
|
||||
temperature: 16
|
||||
target:
|
||||
entity_id: climate.kamer_luuk
|
||||
mode: single
|
||||
- id: '1678902018862'
|
||||
alias: knop luuk dubbel klik - verwarming aan
|
||||
description: ''
|
||||
trigger:
|
||||
- device_id: 17ebe217929cb75025e2e52c47113267
|
||||
domain: zha
|
||||
platform: device
|
||||
type: remote_button_double_press
|
||||
subtype: remote_button_double_press
|
||||
condition: []
|
||||
action:
|
||||
- service: climate.set_preset_mode
|
||||
data:
|
||||
preset_mode: None
|
||||
target:
|
||||
entity_id: climate.kamer_luuk
|
||||
- service: climate.set_temperature
|
||||
data:
|
||||
temperature: 18.5
|
||||
target:
|
||||
entity_id: climate.kamer_luuk
|
||||
mode: single
|
||||
- id: '1679258969777'
|
||||
alias: badkamer - thermostaat automatisch uit
|
||||
description: ''
|
||||
trigger:
|
||||
- platform: numeric_state
|
||||
entity_id: climate.badkamer
|
||||
for:
|
||||
hours: 2
|
||||
minutes: 0
|
||||
seconds: 0
|
||||
above: 17.5
|
||||
attribute: temperature
|
||||
condition:
|
||||
- type: is_not_occupied
|
||||
condition: device
|
||||
device_id: dd0fea1459ae5d17823e14348b73cb24
|
||||
entity_id: binary_sensor.lumi_lumi_sensor_motion_aq2_occupancy_2
|
||||
domain: binary_sensor
|
||||
for:
|
||||
hours: 0
|
||||
minutes: 20
|
||||
seconds: 0
|
||||
action:
|
||||
- service: climate.set_temperature
|
||||
data:
|
||||
temperature: 16
|
||||
target:
|
||||
entity_id: climate.badkamer
|
||||
- service: climate.set_temperature
|
||||
data:
|
||||
temperature: 15
|
||||
target:
|
||||
entity_id: climate.vloerverwarming_badkamer
|
||||
- service: notify.mobile_app_iphone_van_willem
|
||||
data:
|
||||
message: Badkamer verwarming uit
|
||||
mode: single
|
||||
- id: '1679849595183'
|
||||
alias: woonkamer - alles uit als het licht is
|
||||
description: ''
|
||||
trigger:
|
||||
- platform: time
|
||||
at: sensor.sun_next_rising
|
||||
condition: []
|
||||
action:
|
||||
- service: scene.turn_on
|
||||
target:
|
||||
entity_id: scene.beneden_alles_uit
|
||||
metadata: {}
|
||||
mode: single
|
||||
- id: '1683562357537'
|
||||
alias: awtrix_weather_app
|
||||
description: ''
|
||||
use_blueprint:
|
||||
path: smarthomejunkie/awtrix_weather_app.yaml
|
||||
input:
|
||||
awtrix_display: 658104c6c966f75ddf9c747816e4a8f4
|
||||
my_sensor: weather.forecast_home
|
||||
switch_to_app: false
|
||||
toggle_helper: input_boolean.awtrix_weather_app_toggle
|
||||
show_rainbow: false
|
||||
duration: '10'
|
||||
push_icon: '1'
|
||||
show_windspeed: false
|
||||
- id: '1683562545849'
|
||||
alias: awtrix_rain_app
|
||||
description: ''
|
||||
use_blueprint:
|
||||
path: smarthomejunkie/awtrix_rain_forecast.yaml
|
||||
input:
|
||||
awtrix_display: 658104c6c966f75ddf9c747816e4a8f4
|
||||
toggle_helper: input_boolean.awtrix_rain_app_toggle
|
||||
my_sensor: weather.forecast_home
|
||||
graph_type: line
|
||||
switch_to_app: true
|
||||
- id: '1683566553338'
|
||||
alias: awtrix_sensor_power_app
|
||||
description: ''
|
||||
use_blueprint:
|
||||
path: smarthomejunkie/awtrix_create_sensor_app.yaml
|
||||
input:
|
||||
awtrix_display: 658104c6c966f75ddf9c747816e4a8f4
|
||||
toggle_helper: input_boolean.awtrix_power_sensor_toggle
|
||||
my_sensor: sensor.dsmr_reading_electricity_currently_delivered
|
||||
my_icon: '21256'
|
||||
show_rainbow: false
|
||||
- id: '1683882275516'
|
||||
alias: awtrix_sensor_pm2.5
|
||||
description: ''
|
||||
use_blueprint:
|
||||
path: smarthomejunkie/awtrix_create_sensor_app.yaml
|
||||
input:
|
||||
awtrix_display: 658104c6c966f75ddf9c747816e4a8f4
|
||||
toggle_helper: input_boolean.awtrix_power_sensor_toggle
|
||||
my_icon: '2718'
|
||||
push_icon: '1'
|
||||
my_sensor: sensor.particulate_matter_2_5um_concentration
|
||||
- id: '1683882676954'
|
||||
alias: awtrix_stock_batt_toggle
|
||||
description: ''
|
||||
use_blueprint:
|
||||
path: smarthomejunkie/awtrix_toggle_stock_app.yaml
|
||||
input:
|
||||
stock_app: bat
|
||||
awtrix_display: 658104c6c966f75ddf9c747816e4a8f4
|
||||
toggle_helper: input_boolean.awtrix_stock_batt_toggle
|
||||
- id: '1684882366818'
|
||||
alias: zolder - airco automatisch uit
|
||||
description: ''
|
||||
trigger:
|
||||
- type: not_occupied
|
||||
platform: device
|
||||
device_id: 029153653b8e5f423c3350efedb3b0d3
|
||||
entity_id: binary_sensor.aqs_zolder_still_target
|
||||
domain: binary_sensor
|
||||
for:
|
||||
hours: 0
|
||||
minutes: 20
|
||||
seconds: 0
|
||||
condition:
|
||||
- condition: device
|
||||
type: is_on
|
||||
device_id: 6f9cbe311638680986f710f63e99e576
|
||||
entity_id: switch.contact_airco_flap_zolder_switch
|
||||
domain: switch
|
||||
enabled: true
|
||||
action:
|
||||
- device_id: 53a4e48d13f3f94e7ca1ffe0557ac135
|
||||
domain: climate
|
||||
entity_id: climate.airco_4
|
||||
type: set_hvac_mode
|
||||
hvac_mode: 'off'
|
||||
mode: single
|
||||
- id: '1685739271048'
|
||||
alias: Woonkamer - display aan
|
||||
description: ''
|
||||
trigger:
|
||||
- platform: time
|
||||
at: sensor.sun_next_rising
|
||||
condition: []
|
||||
action:
|
||||
- type: turn_on
|
||||
device_id: 6652a530d4f2c349be36ea58904e613f
|
||||
entity_id: switch.display_woonkamer_browsee_screen
|
||||
domain: switch
|
||||
mode: single
|
||||
- id: '1686156742969'
|
||||
alias: woonkamer - beweging - leds - aan
|
||||
description: ''
|
||||
trigger:
|
||||
- type: occupied
|
||||
platform: device
|
||||
device_id: 5bb607cec18e50a97f334c94836fa9f0
|
||||
entity_id: binary_sensor.aqs_woonkamer2_aqs_woonkamer2_still_target
|
||||
domain: binary_sensor
|
||||
condition: []
|
||||
action:
|
||||
- type: turn_on
|
||||
device_id: 5bb607cec18e50a97f334c94836fa9f0
|
||||
entity_id: light.aqs_woonkamer2_aqs_woonkamer2_rgb_light
|
||||
domain: light
|
||||
brightness_pct: 50
|
||||
mode: single
|
||||
- id: '1686848917898'
|
||||
alias: Tuin - automatisch water op timer
|
||||
description: ''
|
||||
trigger:
|
||||
- platform: device
|
||||
type: turned_on
|
||||
device_id: e1fd7471fc9fcde9a19ec7175a486dd9
|
||||
entity_id: switch.valve
|
||||
domain: switch
|
||||
condition: []
|
||||
action:
|
||||
- service: notify.mobile_app_iphone_van_willem
|
||||
data:
|
||||
message: Water timer gestart
|
||||
- service: script.tuin_water_op_timer
|
||||
data: {}
|
||||
mode: single
|
||||
|
||||
@@ -7,6 +7,13 @@ frontend:
|
||||
homeassistant:
|
||||
#packages: !include_dir_named packages/
|
||||
packages: !include_dir_named "integrations"
|
||||
|
||||
allowlist_external_dirs:
|
||||
- "/config/www/images"
|
||||
|
||||
media_dirs:
|
||||
media: /media
|
||||
media: /media
|
||||
|
||||
logger:
|
||||
default: warning
|
||||
logs:
|
||||
custom_components.kia_uvo: debug
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -2,7 +2,12 @@ import logging
|
||||
from datetime import timedelta
|
||||
|
||||
SENSOR_TYPES = {
|
||||
"cleanprofsgft": ["CleanprofsGft", "mdi:recycle"],
|
||||
"cleanprofspbd": ["CleanprofsPbd", "mdi:recycle"],
|
||||
"cleanprofsrestafval": ["CleanprofsRestafval", "mdi:recycle"],
|
||||
"gft": ["GFT", "mdi:recycle"],
|
||||
"grofvuil": ["Grofvuil", "mdi:recycle"],
|
||||
"kca": ["KCA", "mdi:recycle"],
|
||||
"kerstboom": ["Kerstboom", "mdi:recycle"],
|
||||
"papier": ["Papier", "mdi:recycle"],
|
||||
"pbd": ["PBD", "mdi:recycle"],
|
||||
@@ -15,7 +20,7 @@ SENSOR_TYPES = {
|
||||
|
||||
SENSOR_LOCATIONS_TO_URL = {
|
||||
"trashapi": [
|
||||
"http://trashapi.azurewebsites.net/trash?Location={0}&ZipCode={1}&HouseNumber={2}&HouseNumberSuffix={3}&District={4}&DiftarCode={5}&ShowWholeYear={6}"
|
||||
"http://trashapi.azurewebsites.net/trash?Location={0}&ZipCode={1}&HouseNumber={2}&HouseNumberSuffix={3}&District={4}&DiftarCode={5}&ShowWholeYear={6}&GetCleanprofsData={7}"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -77,7 +82,6 @@ CONF_DIFTAR_CODE = "diftarcode"
|
||||
SENSOR_PREFIX = "Afvalinfo "
|
||||
ATTR_ERROR = "error"
|
||||
ATTR_LAST_UPDATE = "last_update"
|
||||
ATTR_HIDDEN = "hidden"
|
||||
ATTR_IS_COLLECTION_DATE_TODAY = "is_collection_date_today"
|
||||
ATTR_DAYS_UNTIL_COLLECTION_DATE = "days_until_collection_date"
|
||||
ATTR_YEAR_MONTH_DAY_DATE = "year_month_day_date"
|
||||
|
||||
Binary file not shown.
Binary file not shown.
@@ -1,5 +1,4 @@
|
||||
from ..const.const import (
|
||||
MONTH_TO_NUMBER,
|
||||
SENSOR_LOCATIONS_TO_URL,
|
||||
_LOGGER,
|
||||
)
|
||||
@@ -7,10 +6,11 @@ from datetime import date, datetime, timedelta
|
||||
import urllib.request
|
||||
import urllib.error
|
||||
import requests
|
||||
import asyncio
|
||||
|
||||
|
||||
class TrashApiAfval(object):
|
||||
def get_data(
|
||||
async def get_data(
|
||||
self,
|
||||
location,
|
||||
postcode,
|
||||
@@ -20,6 +20,7 @@ class TrashApiAfval(object):
|
||||
diftar_code,
|
||||
get_whole_year,
|
||||
resources,
|
||||
get_cleanprofs_data,
|
||||
):
|
||||
_LOGGER.debug("Updating Waste collection dates")
|
||||
|
||||
@@ -32,9 +33,14 @@ class TrashApiAfval(object):
|
||||
district,
|
||||
diftar_code,
|
||||
get_whole_year,
|
||||
get_cleanprofs_data,
|
||||
)
|
||||
|
||||
r = requests.get(url=API_ENDPOINT)
|
||||
loop = asyncio.get_event_loop()
|
||||
future = loop.run_in_executor(None, requests.get, API_ENDPOINT)
|
||||
r = await future
|
||||
|
||||
# r = await requests.get(url=API_ENDPOINT, timeout=10)
|
||||
dataList = r.json()
|
||||
|
||||
# Place all possible values in the dictionary even if they are not necessary
|
||||
@@ -43,10 +49,22 @@ class TrashApiAfval(object):
|
||||
# _LOGGER.warning(dataList)
|
||||
|
||||
for data in dataList:
|
||||
|
||||
# find gft, kerstboom, papier, pbd, takken or textiel
|
||||
if (
|
||||
("gft" in resources and data["name"].lower() == "gft")
|
||||
(
|
||||
"cleanprofsgft" in resources
|
||||
and data["name"].lower() == "cleanprofsgft"
|
||||
)
|
||||
or (
|
||||
"cleanprofspbd" in resources
|
||||
and data["name"].lower() == "cleanprofspbd"
|
||||
)
|
||||
or (
|
||||
"cleanprofsrestafval" in resources
|
||||
and data["name"].lower() == "cleanprofsrestafval"
|
||||
)
|
||||
or ("gft" in resources and data["name"].lower() == "gft")
|
||||
or ("grofvuil" in resources and data["name"].lower() == "grofvuil")
|
||||
or ("kca" in resources and data["name"].lower() == "kca")
|
||||
or (
|
||||
"kerstboom" in resources and data["name"].lower() == "kerstboom"
|
||||
)
|
||||
@@ -58,7 +76,7 @@ class TrashApiAfval(object):
|
||||
waste_array.append(
|
||||
{data["name"].lower(): data["date"].split("T")[0]}
|
||||
)
|
||||
# find restafval.
|
||||
# find restafval and diftar.
|
||||
if "restafval" in resources and data["name"].lower() == "restafval":
|
||||
if (
|
||||
date.today()
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"domain": "afvalinfo",
|
||||
"name": "Afvalinfo",
|
||||
"version": "1.1.0",
|
||||
"version": "1.3.1",
|
||||
"documentation": "https://github.com/heyajohnny/afvalinfo",
|
||||
"issue_tracker": "https://github.com/heyajohnny/afvalinfo/issues",
|
||||
"dependencies": [],
|
||||
|
||||
@@ -31,7 +31,6 @@ from .const.const import (
|
||||
SENSOR_PREFIX,
|
||||
ATTR_ERROR,
|
||||
ATTR_LAST_UPDATE,
|
||||
ATTR_HIDDEN,
|
||||
ATTR_DAYS_UNTIL_COLLECTION_DATE,
|
||||
ATTR_IS_COLLECTION_DATE_TODAY,
|
||||
ATTR_YEAR_MONTH_DAY_DATE,
|
||||
@@ -62,9 +61,11 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
vol.Optional(CONF_STREET_NUMBER_SUFFIX, default=""): cv.string,
|
||||
vol.Optional(CONF_DISTRICT, default=""): cv.string,
|
||||
vol.Optional(CONF_DATE_FORMAT, default="%d-%m-%Y"): cv.string,
|
||||
vol.Optional(CONF_TIMESPAN_IN_DAYS, default="365"): cv.string,
|
||||
vol.Optional(CONF_LOCALE, default="en"): cv.string,
|
||||
vol.Optional(CONF_ID, default=""): cv.string,
|
||||
vol.Optional(
|
||||
CONF_TIMESPAN_IN_DAYS, default="365"
|
||||
): cv.string, # Not used anymore 20230507, but gives errors in configs that still has the timespanindays set
|
||||
vol.Optional(CONF_NO_TRASH_TEXT, default="none"): cv.string,
|
||||
vol.Optional(CONF_DIFTAR_CODE, default=""): cv.string,
|
||||
vol.Optional(CONF_GET_WHOLE_YEAR, default="false"): cv.string,
|
||||
@@ -72,7 +73,7 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
)
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
|
||||
_LOGGER.debug("Setup Afvalinfo sensor")
|
||||
|
||||
location = config.get(CONF_CITY).lower().strip()
|
||||
@@ -83,7 +84,6 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
street_number_suffix = config.get(CONF_STREET_NUMBER_SUFFIX)
|
||||
district = config.get(CONF_DISTRICT)
|
||||
date_format = config.get(CONF_DATE_FORMAT).strip()
|
||||
timespan_in_days = config.get(CONF_TIMESPAN_IN_DAYS)
|
||||
locale = config.get(CONF_LOCALE)
|
||||
id_name = config.get(CONF_ID)
|
||||
no_trash_text = config.get(CONF_NO_TRASH_TEXT)
|
||||
@@ -104,6 +104,15 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
if "trash_type_tomorrow" in resourcesMinusTodayAndTomorrow:
|
||||
resourcesMinusTodayAndTomorrow.remove("trash_type_tomorrow")
|
||||
|
||||
# Check if resources contain cleanprofsgft or cleanprofsrestafval
|
||||
if (
|
||||
"cleanprofsgft" in resourcesMinusTodayAndTomorrow
|
||||
or "cleanprofsrestafval" in resourcesMinusTodayAndTomorrow
|
||||
):
|
||||
get_cleanprofs_data = True
|
||||
else:
|
||||
get_cleanprofs_data = False
|
||||
|
||||
data = AfvalinfoData(
|
||||
location,
|
||||
postcode,
|
||||
@@ -113,7 +122,12 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
diftar_code,
|
||||
get_whole_year,
|
||||
resourcesMinusTodayAndTomorrow,
|
||||
get_cleanprofs_data,
|
||||
)
|
||||
|
||||
# Initial trigger for updating data
|
||||
await data.async_update()
|
||||
|
||||
except urllib.error.HTTPError as error:
|
||||
_LOGGER.error(error.reason)
|
||||
return False
|
||||
@@ -145,7 +159,6 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
sensor_type,
|
||||
sensor_friendly_name,
|
||||
date_format,
|
||||
timespan_in_days,
|
||||
locale,
|
||||
id_name,
|
||||
get_whole_year,
|
||||
@@ -175,7 +188,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
)
|
||||
entities.append(tomorrow)
|
||||
|
||||
add_entities(entities)
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class AfvalinfoData(object):
|
||||
@@ -189,6 +202,7 @@ class AfvalinfoData(object):
|
||||
diftar_code,
|
||||
get_whole_year,
|
||||
resources,
|
||||
get_cleanprofs_data,
|
||||
):
|
||||
self.data = None
|
||||
self.location = location
|
||||
@@ -199,11 +213,13 @@ class AfvalinfoData(object):
|
||||
self.diftar_code = diftar_code
|
||||
self.get_whole_year = get_whole_year
|
||||
self.resources = resources
|
||||
self.get_cleanprofs_data = get_cleanprofs_data
|
||||
|
||||
# This will make sure that we can't execute it more often
|
||||
# than the MIN_TIME_BETWEEN_UPDATES
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
def update(self):
|
||||
_LOGGER.debug("Updating Waste collection dates")
|
||||
self.data = TrashApiAfval().get_data(
|
||||
async def async_update(self):
|
||||
self.data = await TrashApiAfval().get_data(
|
||||
self.location,
|
||||
self.postcode,
|
||||
self.street_number,
|
||||
@@ -212,6 +228,7 @@ class AfvalinfoData(object):
|
||||
self.diftar_code,
|
||||
self.get_whole_year,
|
||||
self.resources,
|
||||
self.get_cleanprofs_data,
|
||||
)
|
||||
|
||||
|
||||
@@ -222,7 +239,6 @@ class AfvalinfoSensor(Entity):
|
||||
sensor_type,
|
||||
sensor_friendly_name,
|
||||
date_format,
|
||||
timespan_in_days,
|
||||
locale,
|
||||
id_name,
|
||||
get_whole_year,
|
||||
@@ -231,7 +247,6 @@ class AfvalinfoSensor(Entity):
|
||||
self.type = sensor_type
|
||||
self.friendly_name = sensor_friendly_name
|
||||
self.date_format = date_format
|
||||
self.timespan_in_days = timespan_in_days
|
||||
self.locale = locale
|
||||
self._name = sensor_friendly_name
|
||||
self._get_whole_year = get_whole_year
|
||||
@@ -250,7 +265,6 @@ class AfvalinfoSensor(Entity):
|
||||
+ sensor_friendly_name
|
||||
)
|
||||
self._icon = SENSOR_TYPES[sensor_type][1]
|
||||
self._hidden = False
|
||||
self._error = False
|
||||
self._state = None
|
||||
self._last_update = None
|
||||
@@ -280,7 +294,6 @@ class AfvalinfoSensor(Entity):
|
||||
ATTR_FRIENDLY_NAME: self.friendly_name,
|
||||
ATTR_YEAR_MONTH_DAY_DATE: self._year_month_day_date,
|
||||
ATTR_LAST_UPDATE: self._last_update,
|
||||
ATTR_HIDDEN: self._hidden,
|
||||
ATTR_DAYS_UNTIL_COLLECTION_DATE: self._days_until_collection_date,
|
||||
ATTR_IS_COLLECTION_DATE_TODAY: self._is_collection_date_today,
|
||||
ATTR_LAST_COLLECTION_DATE: self._last_collection_date,
|
||||
@@ -288,22 +301,27 @@ class AfvalinfoSensor(Entity):
|
||||
ATTR_WHOLE_YEAR_DATES: self._whole_year_dates,
|
||||
}
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
def update(self):
|
||||
self.data.update()
|
||||
# Run this every minute
|
||||
@Throttle(timedelta(minutes=1))
|
||||
async def async_update(self):
|
||||
"""We are calling this often,
|
||||
but the @Throttle on the data.async_update
|
||||
will limit the times it will be executed"""
|
||||
await self.data.async_update()
|
||||
waste_array = self.data.data
|
||||
self._error = False
|
||||
|
||||
# Loop through all the dates to put the dates in the whole_year_dates attribute
|
||||
if self._get_whole_year == "True":
|
||||
whole_year_dates = []
|
||||
for waste_data in waste_array:
|
||||
if self.type in waste_data:
|
||||
whole_year_dates.append(
|
||||
datetime.strptime(waste_data[self.type], "%Y-%m-%d").date()
|
||||
)
|
||||
if waste_array:
|
||||
for waste_data in waste_array:
|
||||
if self.type in waste_data:
|
||||
whole_year_dates.append(
|
||||
datetime.strptime(waste_data[self.type], "%Y-%m-%d").date()
|
||||
)
|
||||
|
||||
self._whole_year_dates = whole_year_dates
|
||||
self._whole_year_dates = whole_year_dates
|
||||
|
||||
try:
|
||||
if waste_array:
|
||||
@@ -327,72 +345,69 @@ class AfvalinfoSensor(Entity):
|
||||
date.today() == collection_date
|
||||
)
|
||||
|
||||
if (
|
||||
self.type == "restafval"
|
||||
and "restafvaldiftardate" in waste_data
|
||||
):
|
||||
self._last_collection_date = str(
|
||||
datetime.strptime(
|
||||
waste_data["restafvaldiftardate"], "%Y-%m-%d"
|
||||
).date()
|
||||
)
|
||||
self._total_collections_this_year = waste_data[
|
||||
"restafvaldiftarcollections"
|
||||
]
|
||||
# Get the diftar data
|
||||
if self.type == "restafval":
|
||||
for obj in waste_array:
|
||||
if "restafvaldiftardate" in obj:
|
||||
self._last_collection_date = str(
|
||||
datetime.strptime(
|
||||
obj["restafvaldiftardate"], "%Y-%m-%d"
|
||||
).date()
|
||||
)
|
||||
break
|
||||
for obj in waste_array:
|
||||
if "restafvaldiftarcollections" in obj:
|
||||
self._total_collections_this_year = obj[
|
||||
"restafvaldiftarcollections"
|
||||
]
|
||||
break
|
||||
|
||||
# Days until collection date
|
||||
delta = collection_date - date.today()
|
||||
self._days_until_collection_date = delta.days
|
||||
|
||||
# Only show the value if the date is lesser than or equal to (today + timespan_in_days)
|
||||
if collection_date <= date.today() + relativedelta(
|
||||
days=int(self.timespan_in_days)
|
||||
# if the date does not contain a named day or month, return the date as normal
|
||||
if (
|
||||
self.date_format.find("a") == -1
|
||||
and self.date_format.find("A") == -1
|
||||
and self.date_format.find("b") == -1
|
||||
and self.date_format.find("B") == -1
|
||||
):
|
||||
# if the date does not contain a named day or month, return the date as normal
|
||||
if (
|
||||
self.date_format.find("a") == -1
|
||||
and self.date_format.find("A") == -1
|
||||
and self.date_format.find("b") == -1
|
||||
and self.date_format.find("B") == -1
|
||||
):
|
||||
self._state = collection_date.strftime(
|
||||
self.date_format
|
||||
)
|
||||
# else convert the named values to the locale names
|
||||
else:
|
||||
edited_date_format = self.date_format.replace(
|
||||
"%a", "EEE"
|
||||
)
|
||||
edited_date_format = edited_date_format.replace(
|
||||
"%A", "EEEE"
|
||||
)
|
||||
edited_date_format = edited_date_format.replace(
|
||||
"%b", "MMM"
|
||||
)
|
||||
edited_date_format = edited_date_format.replace(
|
||||
"%B", "MMMM"
|
||||
)
|
||||
|
||||
# half babel, half date string... something like EEEE 04-MMMM-2020
|
||||
half_babel_half_date = collection_date.strftime(
|
||||
edited_date_format
|
||||
)
|
||||
|
||||
# replace the digits with qquoted digits 01 --> '01'
|
||||
half_babel_half_date = re.sub(
|
||||
r"(\d+)", r"'\1'", half_babel_half_date
|
||||
)
|
||||
# transform the EEE, EEEE etc... to a real locale date, with babel
|
||||
locale_date = format_date(
|
||||
collection_date,
|
||||
half_babel_half_date,
|
||||
locale=self.locale,
|
||||
)
|
||||
|
||||
self._state = locale_date
|
||||
self._state = collection_date.strftime(self.date_format)
|
||||
break # we have a result, break the loop
|
||||
# else convert the named values to the locale names
|
||||
else:
|
||||
self._hidden = True
|
||||
edited_date_format = self.date_format.replace(
|
||||
"%a", "EEE"
|
||||
)
|
||||
edited_date_format = edited_date_format.replace(
|
||||
"%A", "EEEE"
|
||||
)
|
||||
edited_date_format = edited_date_format.replace(
|
||||
"%b", "MMM"
|
||||
)
|
||||
edited_date_format = edited_date_format.replace(
|
||||
"%B", "MMMM"
|
||||
)
|
||||
|
||||
# half babel, half date string... something like EEEE 04-MMMM-2020
|
||||
half_babel_half_date = collection_date.strftime(
|
||||
edited_date_format
|
||||
)
|
||||
|
||||
# replace the digits with qquoted digits 01 --> '01'
|
||||
half_babel_half_date = re.sub(
|
||||
r"(\d+)", r"'\1'", half_babel_half_date
|
||||
)
|
||||
# transform the EEE, EEEE etc... to a real locale date, with babel
|
||||
locale_date = format_date(
|
||||
collection_date,
|
||||
half_babel_half_date,
|
||||
locale=self.locale,
|
||||
)
|
||||
|
||||
self._state = locale_date
|
||||
break # we have a result, break the loop
|
||||
else:
|
||||
# collection_date empty
|
||||
raise ValueError()
|
||||
@@ -402,12 +417,4 @@ class AfvalinfoSensor(Entity):
|
||||
raise ValueError()
|
||||
except ValueError:
|
||||
self._error = True
|
||||
# self._state = None
|
||||
# self._hidden = True
|
||||
# self._days_until_collection_date = None
|
||||
# self._year_month_day_date = None
|
||||
# self._is_collection_date_today = False
|
||||
# self._last_collection_date = None
|
||||
# self._total_collections_this_year = None
|
||||
# self._whole_year_dates = None
|
||||
self._last_update = datetime.today().strftime("%d-%m-%Y %H:%M")
|
||||
|
||||
@@ -57,8 +57,11 @@ class AfvalInfoTodaySensor(Entity):
|
||||
return {ATTR_LAST_UPDATE: self._last_update}
|
||||
|
||||
@Throttle(timedelta(minutes=1))
|
||||
def update(self):
|
||||
self.data.update()
|
||||
async def async_update(self):
|
||||
"""We are calling this often,
|
||||
but the @Throttle on the data.async_update
|
||||
will limit the times it will be executed"""
|
||||
await self.data.async_update()
|
||||
self._last_update = datetime.today().strftime("%d-%m-%Y %H:%M")
|
||||
# use a tempState to change the real state only on a change...
|
||||
tempState = self._no_trash_text
|
||||
|
||||
@@ -57,8 +57,11 @@ class AfvalInfoTomorrowSensor(Entity):
|
||||
return {ATTR_LAST_UPDATE: self._last_update}
|
||||
|
||||
@Throttle(timedelta(minutes=1))
|
||||
def update(self):
|
||||
self.data.update()
|
||||
async def async_update(self):
|
||||
"""We are calling this often,
|
||||
but the @Throttle on the data.async_update
|
||||
will limit the times it will be executed"""
|
||||
await self.data.async_update()
|
||||
self._last_update = datetime.today().strftime("%d-%m-%Y %H:%M")
|
||||
# use a tempState to change the real state only on a change...
|
||||
tempState = self._no_trash_text
|
||||
|
||||
@@ -1,57 +0,0 @@
|
||||
from datetime import datetime
|
||||
import re
|
||||
|
||||
import requests
|
||||
|
||||
from ..common.main_functions import _waste_type_rename
|
||||
from ..const.const import _LOGGER, SENSOR_COLLECTORS_DEAFVALAPP
|
||||
|
||||
|
||||
def get_waste_data_raw(
|
||||
provider,
|
||||
postal_code,
|
||||
street_number,
|
||||
suffix,
|
||||
):
|
||||
if provider not in SENSOR_COLLECTORS_DEAFVALAPP.keys():
|
||||
raise ValueError(f"Invalid provider: {provider}, please verify")
|
||||
|
||||
corrected_postal_code_parts = re.search(r"(\d\d\d\d) ?([A-z][A-z])", postal_code)
|
||||
corrected_postal_code = (
|
||||
corrected_postal_code_parts[1] + corrected_postal_code_parts[2].upper()
|
||||
)
|
||||
|
||||
try:
|
||||
url = SENSOR_COLLECTORS_DEAFVALAPP[provider].format(
|
||||
corrected_postal_code,
|
||||
street_number,
|
||||
suffix,
|
||||
)
|
||||
raw_response = requests.get(url)
|
||||
except requests.exceptions.RequestException as err:
|
||||
raise ValueError(err) from err
|
||||
|
||||
try:
|
||||
response = raw_response.text
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Invalid and/or no data received from {url}") from e
|
||||
|
||||
if not response:
|
||||
_LOGGER.error("No waste data found!")
|
||||
return
|
||||
|
||||
waste_data_raw = []
|
||||
|
||||
for rows in response.strip().split("\n"):
|
||||
for ophaaldatum in rows.split(";")[1:-1]:
|
||||
temp = {"type": _waste_type_rename(rows.split(";")[0].strip().lower())}
|
||||
temp["date"] = datetime.strptime(ophaaldatum, "%d-%m-%Y").strftime(
|
||||
"%Y-%m-%d"
|
||||
)
|
||||
waste_data_raw.append(temp)
|
||||
|
||||
return waste_data_raw
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("Yell something at a mountain!")
|
||||
@@ -1,71 +0,0 @@
|
||||
from datetime import datetime
|
||||
import re
|
||||
|
||||
import requests
|
||||
|
||||
from ..common.main_functions import _waste_type_rename
|
||||
from ..const.const import _LOGGER, SENSOR_COLLECTORS_ICALENDAR
|
||||
|
||||
|
||||
def get_waste_data_raw(
|
||||
provider,
|
||||
postal_code,
|
||||
street_number,
|
||||
suffix,
|
||||
):
|
||||
if provider not in SENSOR_COLLECTORS_ICALENDAR.keys():
|
||||
raise ValueError(f"Invalid provider: {provider}, please verify")
|
||||
|
||||
DATE_PATTERN = re.compile(r"^\d{8}")
|
||||
|
||||
try:
|
||||
url = SENSOR_COLLECTORS_ICALENDAR[provider].format(
|
||||
provider,
|
||||
postal_code,
|
||||
street_number,
|
||||
suffix,
|
||||
datetime.now().strftime("%Y-%m-%d"),
|
||||
)
|
||||
raw_response = requests.get(url)
|
||||
except requests.exceptions.RequestException as err:
|
||||
raise ValueError(err) from err
|
||||
|
||||
try:
|
||||
response = raw_response.text
|
||||
except ValueError as exc:
|
||||
raise ValueError(f"Invalid and/or no data received from {url}") from exc
|
||||
|
||||
if not response:
|
||||
_LOGGER.error("No waste data found!")
|
||||
return
|
||||
|
||||
waste_data_raw = []
|
||||
date = None
|
||||
type = None
|
||||
|
||||
for line in response.splitlines():
|
||||
key, value = line.split(":", 2)
|
||||
field = key.split(";")[0]
|
||||
if field == "BEGIN" and value == "VEVENT":
|
||||
date = None
|
||||
type = None
|
||||
elif field == "SUMMARY":
|
||||
type = value.strip().lower()
|
||||
elif field == "DTSTART":
|
||||
if DATE_PATTERN.match(value):
|
||||
date = f"{value[:4]}-{value[4:6]}-{value[6:8]}"
|
||||
else:
|
||||
_LOGGER.debug(f"Unsupported date format: {value}")
|
||||
elif field == "END" and value == "VEVENT":
|
||||
if date and type:
|
||||
waste_data_raw.append({"type": type, "date": date})
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
f"No date or type extracted from event: date={date}, type={type}"
|
||||
)
|
||||
|
||||
return waste_data_raw
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("Yell something at a mountain!")
|
||||
@@ -1,122 +0,0 @@
|
||||
from ..common.waste_data_transformer import WasteDataTransformer
|
||||
from ..const.const import (
|
||||
_LOGGER,
|
||||
SENSOR_COLLECTORS_AFVALWIJZER,
|
||||
SENSOR_COLLECTORS_DEAFVALAPP,
|
||||
SENSOR_COLLECTORS_ICALENDAR,
|
||||
SENSOR_COLLECTORS_OPZET,
|
||||
SENSOR_COLLECTORS_RD4,
|
||||
SENSOR_COLLECTORS_XIMMIO,
|
||||
)
|
||||
|
||||
try:
|
||||
from . import deafvalapp, icalendar, mijnafvalwijzer, opzet, rd4, ximmio
|
||||
except ImportError as err:
|
||||
_LOGGER.error(f"Import error {err.args}")
|
||||
|
||||
|
||||
class MainCollector(object):
|
||||
def __init__(
|
||||
self,
|
||||
provider,
|
||||
postal_code,
|
||||
street_number,
|
||||
suffix,
|
||||
exclude_pickup_today,
|
||||
exclude_list,
|
||||
default_label,
|
||||
):
|
||||
self.provider = provider.strip().lower()
|
||||
self.postal_code = postal_code.strip().upper()
|
||||
self.street_number = street_number.strip()
|
||||
self.suffix = suffix.strip().lower()
|
||||
self.exclude_pickup_today = exclude_pickup_today.strip()
|
||||
self.exclude_list = exclude_list.strip().lower()
|
||||
self.default_label = default_label.strip()
|
||||
|
||||
try:
|
||||
if provider in SENSOR_COLLECTORS_AFVALWIJZER:
|
||||
waste_data_raw = mijnafvalwijzer.get_waste_data_raw(
|
||||
self.provider,
|
||||
self.postal_code,
|
||||
self.street_number,
|
||||
self.suffix,
|
||||
)
|
||||
elif provider in SENSOR_COLLECTORS_DEAFVALAPP.keys():
|
||||
waste_data_raw = deafvalapp.get_waste_data_raw(
|
||||
self.provider,
|
||||
self.postal_code,
|
||||
self.street_number,
|
||||
self.suffix,
|
||||
)
|
||||
elif provider in SENSOR_COLLECTORS_ICALENDAR.keys():
|
||||
waste_data_raw = icalendar.get_waste_data_raw(
|
||||
self.provider,
|
||||
self.postal_code,
|
||||
self.street_number,
|
||||
self.suffix,
|
||||
)
|
||||
elif provider in SENSOR_COLLECTORS_OPZET.keys():
|
||||
waste_data_raw = opzet.get_waste_data_raw(
|
||||
self.provider,
|
||||
self.postal_code,
|
||||
self.street_number,
|
||||
self.suffix,
|
||||
)
|
||||
elif provider in SENSOR_COLLECTORS_RD4.keys():
|
||||
waste_data_raw = rd4.get_waste_data_raw(
|
||||
self.provider,
|
||||
self.postal_code,
|
||||
self.street_number,
|
||||
self.suffix,
|
||||
)
|
||||
elif provider in SENSOR_COLLECTORS_XIMMIO.keys():
|
||||
waste_data_raw = ximmio.get_waste_data_raw(
|
||||
self.provider,
|
||||
self.postal_code,
|
||||
self.street_number,
|
||||
self.suffix,
|
||||
)
|
||||
else:
|
||||
_LOGGER.error(f"Unknown provider: {provider}")
|
||||
return False
|
||||
|
||||
except ValueError as err:
|
||||
_LOGGER.error(f"Check afvalwijzer platform settings {err.args}")
|
||||
|
||||
##########################################################################
|
||||
# COMMON CODE
|
||||
##########################################################################
|
||||
self._waste_data = WasteDataTransformer(
|
||||
waste_data_raw,
|
||||
self.exclude_pickup_today,
|
||||
self.exclude_list,
|
||||
self.default_label,
|
||||
)
|
||||
|
||||
##########################################################################
|
||||
# PROPERTIES FOR EXECUTION
|
||||
##########################################################################
|
||||
@property
|
||||
def waste_data_with_today(self):
|
||||
return self._waste_data.waste_data_with_today
|
||||
|
||||
@property
|
||||
def waste_data_without_today(self):
|
||||
return self._waste_data.waste_data_without_today
|
||||
|
||||
@property
|
||||
def waste_data_provider(self):
|
||||
return self._waste_data.waste_data_provider
|
||||
|
||||
@property
|
||||
def waste_types_provider(self):
|
||||
return self._waste_data.waste_types_provider
|
||||
|
||||
@property
|
||||
def waste_data_custom(self):
|
||||
return self._waste_data.waste_data_custom
|
||||
|
||||
@property
|
||||
def waste_types_custom(self):
|
||||
return self._waste_data.waste_types_custom
|
||||
@@ -1,58 +0,0 @@
|
||||
from datetime import datetime
|
||||
|
||||
import requests
|
||||
|
||||
from ..common.main_functions import _waste_type_rename
|
||||
from ..const.const import (
|
||||
_LOGGER,
|
||||
SENSOR_COLLECTOR_TO_URL,
|
||||
SENSOR_COLLECTORS_AFVALWIJZER,
|
||||
)
|
||||
|
||||
|
||||
def get_waste_data_raw(
|
||||
provider,
|
||||
postal_code,
|
||||
street_number,
|
||||
suffix,
|
||||
):
|
||||
if provider not in SENSOR_COLLECTORS_AFVALWIJZER:
|
||||
raise ValueError(f"Invalid provider: {provider}, please verify")
|
||||
|
||||
if provider == "rova":
|
||||
provider = "inzamelkalender.rova"
|
||||
|
||||
try:
|
||||
url = SENSOR_COLLECTOR_TO_URL["afvalwijzer_data_default"][0].format(
|
||||
provider,
|
||||
postal_code,
|
||||
street_number,
|
||||
suffix,
|
||||
datetime.now().strftime("%Y-%m-%d"),
|
||||
)
|
||||
|
||||
raw_response = requests.get(url)
|
||||
except requests.exceptions.RequestException as err:
|
||||
raise ValueError(err) from err
|
||||
|
||||
try:
|
||||
response = raw_response.json()
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Invalid and/or no data received from {url}") from e
|
||||
|
||||
if not response:
|
||||
_LOGGER.error("Address not found!")
|
||||
return
|
||||
|
||||
try:
|
||||
waste_data_raw = (
|
||||
response["ophaaldagen"]["data"] + response["ophaaldagenNext"]["data"]
|
||||
)
|
||||
except KeyError as exc:
|
||||
raise KeyError(f"Invalid and/or no data received from {url}") from exc
|
||||
|
||||
return waste_data_raw
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("Yell something at a mountain!")
|
||||
@@ -1,70 +0,0 @@
|
||||
from datetime import datetime
|
||||
|
||||
import requests
|
||||
|
||||
from ..common.main_functions import _waste_type_rename
|
||||
from ..const.const import _LOGGER, SENSOR_COLLECTORS_OPZET
|
||||
|
||||
|
||||
def get_waste_data_raw(
|
||||
provider,
|
||||
postal_code,
|
||||
street_number,
|
||||
suffix,
|
||||
):
|
||||
if provider not in SENSOR_COLLECTORS_OPZET.keys():
|
||||
raise ValueError(f"Invalid provider: {provider}, please verify")
|
||||
|
||||
try:
|
||||
bag_id = None
|
||||
suffix = suffix.strip().upper()
|
||||
_verify = provider != "suez"
|
||||
url = f"{SENSOR_COLLECTORS_OPZET[provider]}/rest/adressen/{postal_code}-{street_number}"
|
||||
raw_response = requests.get(url, verify=_verify)
|
||||
except requests.exceptions.RequestException as err:
|
||||
raise ValueError(err) from err
|
||||
|
||||
try:
|
||||
response = raw_response.json()
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Invalid and/or no data received from {url}") from e
|
||||
|
||||
if not response:
|
||||
_LOGGER.error("No waste data found!")
|
||||
return
|
||||
|
||||
try:
|
||||
if len(response) > 1 and suffix:
|
||||
for item in response:
|
||||
if (
|
||||
item["huisletter"] == suffix
|
||||
or item["huisnummerToevoeging"] == suffix
|
||||
):
|
||||
bag_id = item["bagId"]
|
||||
break
|
||||
else:
|
||||
bag_id = response[0]["bagId"]
|
||||
|
||||
url = f"{SENSOR_COLLECTORS_OPZET[provider]}/rest/adressen/{bag_id}/afvalstromen"
|
||||
waste_data_raw_temp = requests.get(url, verify=_verify).json()
|
||||
waste_data_raw = []
|
||||
|
||||
for item in waste_data_raw_temp:
|
||||
if not item["ophaaldatum"]:
|
||||
continue
|
||||
waste_type = item["menu_title"]
|
||||
if not waste_type:
|
||||
continue
|
||||
temp = {"type": _waste_type_rename(item["menu_title"].strip().lower())}
|
||||
temp["date"] = datetime.strptime(item["ophaaldatum"], "%Y-%m-%d").strftime(
|
||||
"%Y-%m-%d"
|
||||
)
|
||||
waste_data_raw.append(temp)
|
||||
except ValueError as exc:
|
||||
raise ValueError(f"Invalid and/or no data received from {url}") from exc
|
||||
|
||||
return waste_data_raw
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("Yell something at a mountain!")
|
||||
@@ -1,74 +0,0 @@
|
||||
from datetime import datetime
|
||||
import re
|
||||
|
||||
import requests
|
||||
|
||||
from ..common.main_functions import _waste_type_rename
|
||||
from ..const.const import _LOGGER, SENSOR_COLLECTORS_RD4
|
||||
|
||||
|
||||
def get_waste_data_raw(
|
||||
provider,
|
||||
postal_code,
|
||||
street_number,
|
||||
suffix,
|
||||
):
|
||||
if provider not in SENSOR_COLLECTORS_RD4.keys():
|
||||
raise ValueError(f"Invalid provider: {provider}, please verify")
|
||||
|
||||
TODAY = datetime.now()
|
||||
YEAR_CURRENT = TODAY.year
|
||||
|
||||
corrected_postal_code_parts = re.search(r"(\d\d\d\d) ?([A-z][A-z])", postal_code)
|
||||
corrected_postal_code = (
|
||||
f"{corrected_postal_code_parts[1]}+{corrected_postal_code_parts[2].upper()}"
|
||||
)
|
||||
|
||||
try:
|
||||
url = SENSOR_COLLECTORS_RD4[provider].format(
|
||||
corrected_postal_code,
|
||||
street_number,
|
||||
suffix,
|
||||
YEAR_CURRENT,
|
||||
)
|
||||
raw_response = requests.get(url)
|
||||
except requests.exceptions.RequestException as err:
|
||||
raise ValueError(err) from err
|
||||
|
||||
try:
|
||||
response = raw_response.json()
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Invalid and/or no data received from {url}") from e
|
||||
|
||||
if not response:
|
||||
_LOGGER.error("No waste data found!")
|
||||
return
|
||||
|
||||
if not response["success"]:
|
||||
_LOGGER.error("Address not found!")
|
||||
return
|
||||
|
||||
try:
|
||||
waste_data_raw_temp = response["data"]["items"][0]
|
||||
except KeyError as exc:
|
||||
raise KeyError(f"Invalid and/or no data received from {url}") from exc
|
||||
|
||||
waste_data_raw = []
|
||||
|
||||
for item in waste_data_raw_temp:
|
||||
if not item["date"]:
|
||||
continue
|
||||
|
||||
waste_type = item["type"]
|
||||
if not waste_type:
|
||||
continue
|
||||
|
||||
temp = {"type": _waste_type_rename(item["type"].strip().lower())}
|
||||
temp["date"] = datetime.strptime(item["date"], "%Y-%m-%d").strftime("%Y-%m-%d")
|
||||
waste_data_raw.append(temp)
|
||||
|
||||
return waste_data_raw
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("Yell something at a mountain!")
|
||||
@@ -1,83 +0,0 @@
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
import requests
|
||||
|
||||
from ..common.main_functions import _waste_type_rename
|
||||
from ..const.const import _LOGGER, SENSOR_COLLECTOR_TO_URL, SENSOR_COLLECTORS_XIMMIO
|
||||
|
||||
|
||||
def get_waste_data_raw(
|
||||
provider,
|
||||
postal_code,
|
||||
street_number,
|
||||
suffix,
|
||||
):
|
||||
if provider not in SENSOR_COLLECTORS_XIMMIO.keys():
|
||||
raise ValueError(f"Invalid provider: {provider}, please verify")
|
||||
|
||||
collectors = ("avalex", "meerlanden", "rad", "westland")
|
||||
provider_url = "ximmio02" if provider in collectors else "ximmio01"
|
||||
|
||||
TODAY = datetime.now().strftime("%d-%m-%Y")
|
||||
DATE_TODAY = datetime.strptime(TODAY, "%d-%m-%Y")
|
||||
DATE_TOMORROW = datetime.strptime(TODAY, "%d-%m-%Y") + timedelta(days=1)
|
||||
DATE_TODAY_NEXT_YEAR = (DATE_TODAY.date() + timedelta(days=365)).strftime(
|
||||
"%Y-%m-%d"
|
||||
)
|
||||
|
||||
##########################################################################
|
||||
# First request: get uniqueId and community
|
||||
##########################################################################
|
||||
try:
|
||||
url = SENSOR_COLLECTOR_TO_URL[provider_url][0]
|
||||
companyCode = SENSOR_COLLECTORS_XIMMIO[provider]
|
||||
data = {
|
||||
"postCode": postal_code,
|
||||
"houseNumber": street_number,
|
||||
"companyCode": companyCode,
|
||||
}
|
||||
raw_response = requests.post(url=url, data=data)
|
||||
uniqueId = raw_response.json()["dataList"][0]["UniqueId"]
|
||||
community = raw_response.json()["dataList"][0]["Community"]
|
||||
except requests.exceptions.RequestException as err:
|
||||
raise ValueError(err) from err
|
||||
|
||||
##########################################################################
|
||||
# Second request: get the dates
|
||||
##########################################################################
|
||||
try:
|
||||
url = SENSOR_COLLECTOR_TO_URL[provider_url][1]
|
||||
data = {
|
||||
"companyCode": companyCode,
|
||||
"startDate": DATE_TODAY.date(),
|
||||
"endDate": DATE_TODAY_NEXT_YEAR,
|
||||
"community": community,
|
||||
"uniqueAddressID": uniqueId,
|
||||
}
|
||||
raw_response = requests.post(url=url, data=data).json()
|
||||
except requests.exceptions.RequestException as err:
|
||||
raise ValueError(err) from err
|
||||
|
||||
if not raw_response:
|
||||
_LOGGER.error("Address not found!")
|
||||
return
|
||||
|
||||
try:
|
||||
response = raw_response["dataList"]
|
||||
except KeyError as e:
|
||||
raise KeyError(f"Invalid and/or no data received from {url}") from e
|
||||
|
||||
waste_data_raw = []
|
||||
|
||||
for item in response:
|
||||
temp = {"type": _waste_type_rename(item["_pickupTypeText"].strip().lower())}
|
||||
temp["date"] = datetime.strptime(
|
||||
sorted(item["pickupDates"])[0], "%Y-%m-%dT%H:%M:%S"
|
||||
).strftime("%Y-%m-%d")
|
||||
waste_data_raw.append(temp)
|
||||
|
||||
return waste_data_raw
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("Yell something at a mountain!")
|
||||
@@ -1,66 +0,0 @@
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from ..const.const import _LOGGER
|
||||
|
||||
|
||||
class DaySensorData(object):
|
||||
|
||||
##########################################################################
|
||||
# INIT
|
||||
##########################################################################
|
||||
def __init__(
|
||||
self,
|
||||
waste_data_formatted,
|
||||
default_label,
|
||||
):
|
||||
TODAY = datetime.now().strftime("%d-%m-%Y")
|
||||
|
||||
self.waste_data_formatted = sorted(
|
||||
waste_data_formatted, key=lambda d: d["date"]
|
||||
)
|
||||
self.today_date = datetime.strptime(TODAY, "%d-%m-%Y")
|
||||
self.tomorrow_date = datetime.strptime(TODAY, "%d-%m-%Y") + timedelta(days=1)
|
||||
self.day_after_tomorrow_date = datetime.strptime(TODAY, "%d-%m-%Y") + timedelta(
|
||||
days=2
|
||||
)
|
||||
self.default_label = default_label
|
||||
|
||||
self.waste_data_today = self.__gen_day_sensor(self.today_date)
|
||||
self.waste_data_tomorrow = self.__gen_day_sensor(self.tomorrow_date)
|
||||
self.waste_data_dot = self.__gen_day_sensor(self.day_after_tomorrow_date)
|
||||
|
||||
self.data = self._gen_day_sensor_data()
|
||||
|
||||
##########################################################################
|
||||
# GENERATE TODAY, TOMORROW, DOT SENSOR(S)
|
||||
##########################################################################
|
||||
|
||||
# Generate sensor data per date
|
||||
def __gen_day_sensor(self, date):
|
||||
day = []
|
||||
try:
|
||||
for waste in self.waste_data_formatted:
|
||||
item_date = waste["date"]
|
||||
if item_date == date:
|
||||
item_name = waste["type"]
|
||||
day.append(item_name)
|
||||
if not day:
|
||||
day.append(self.default_label)
|
||||
except Exception as err:
|
||||
_LOGGER.error(f"Other error occurred __gen_day_sensor: {err}")
|
||||
return day
|
||||
|
||||
# Generate sensor data for today, tomorrow, day after tomorrow
|
||||
def _gen_day_sensor_data(self):
|
||||
day_sensor = {}
|
||||
try:
|
||||
day_sensor["today"] = ", ".join(self.waste_data_today)
|
||||
day_sensor["tomorrow"] = ", ".join(self.waste_data_tomorrow)
|
||||
day_sensor["day_after_tomorrow"] = ", ".join(self.waste_data_dot)
|
||||
except Exception as err:
|
||||
_LOGGER.error(f"Other error occurred _gen_day_sensor_data: {err}")
|
||||
return day_sensor
|
||||
|
||||
@property
|
||||
def day_sensor_data(self):
|
||||
return self.data
|
||||
@@ -1,80 +0,0 @@
|
||||
def _waste_type_rename(item_name):
|
||||
# DEAFVALAPP
|
||||
if item_name == "gemengde plastics":
|
||||
item_name = "plastic"
|
||||
if item_name == "zak_blauw":
|
||||
item_name = "restafval"
|
||||
if item_name == "pbp":
|
||||
item_name = "pmd"
|
||||
if item_name == "rest":
|
||||
item_name = "restafval"
|
||||
if item_name == "kerstboom":
|
||||
item_name = "kerstbomen"
|
||||
# OPZET
|
||||
if item_name == "snoeiafval":
|
||||
item_name = "takken"
|
||||
if item_name == "sloop":
|
||||
item_name = "grofvuil"
|
||||
if item_name == "groente":
|
||||
item_name = "gft"
|
||||
if item_name == "groente-, fruit en tuinafval":
|
||||
item_name = "gft"
|
||||
if item_name == "groente, fruit- en tuinafval":
|
||||
item_name = "gft"
|
||||
if item_name == "kca":
|
||||
item_name = "chemisch"
|
||||
if item_name == "tariefzak restafval":
|
||||
item_name = "restafvalzakken"
|
||||
if item_name == "restafvalzakken":
|
||||
item_name = "restafvalzakken"
|
||||
if item_name == "rest":
|
||||
item_name = "restafval"
|
||||
if item_name == "plastic, blik & drinkpakken overbetuwe":
|
||||
item_name = "pmd"
|
||||
if item_name == "papier en karton":
|
||||
item_name = "papier"
|
||||
if item_name == "kerstb":
|
||||
item_name = "kerstboom"
|
||||
# RD4
|
||||
if item_name == "pruning":
|
||||
item_name = "takken"
|
||||
if item_name == "residual_waste":
|
||||
item_name = "restafval"
|
||||
if item_name == "best_bag":
|
||||
item_name = "best-tas"
|
||||
if item_name == "paper":
|
||||
item_name = "papier"
|
||||
if item_name == "christmas_trees":
|
||||
item_name = "kerstbomen"
|
||||
# XIMMIO
|
||||
if item_name == "branches":
|
||||
item_name = "takken"
|
||||
if item_name == "bulklitter":
|
||||
item_name = "grofvuil"
|
||||
if item_name == "bulkygardenwaste":
|
||||
item_name = "tuinafval"
|
||||
if item_name == "glass":
|
||||
item_name = "glas"
|
||||
if item_name == "green":
|
||||
item_name = "gft"
|
||||
if item_name == "grey":
|
||||
item_name = "restafval"
|
||||
if item_name == "kca":
|
||||
item_name = "chemisch"
|
||||
if item_name == "plastic":
|
||||
item_name = "plastic"
|
||||
if item_name == "packages":
|
||||
item_name = "pmd"
|
||||
if item_name == "paper":
|
||||
item_name = "papier"
|
||||
if item_name == "remainder":
|
||||
item_name = "restwagen"
|
||||
if item_name == "textile":
|
||||
item_name = "textiel"
|
||||
if item_name == "tree":
|
||||
item_name = "kerstbomen"
|
||||
return item_name
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("Yell something at a mountain!")
|
||||
@@ -1,76 +0,0 @@
|
||||
from datetime import datetime
|
||||
|
||||
from ..const.const import _LOGGER
|
||||
|
||||
|
||||
class NextSensorData(object):
|
||||
|
||||
##########################################################################
|
||||
# INIT
|
||||
##########################################################################
|
||||
def __init__(self, waste_data_after_date_selected, default_label):
|
||||
self.waste_data_after_date_selected = sorted(
|
||||
waste_data_after_date_selected, key=lambda d: d["date"]
|
||||
)
|
||||
|
||||
TODAY = datetime.now().strftime("%d-%m-%Y")
|
||||
self.today_date = datetime.strptime(TODAY, "%d-%m-%Y")
|
||||
self.default_label = default_label
|
||||
|
||||
self.next_waste_date = self.__get_next_waste_date()
|
||||
self.next_waste_in_days = self.__get_next_waste_in_days()
|
||||
self.next_waste_type = self.__get_next_waste_type()
|
||||
|
||||
self.data = self._gen_next_sensor_data()
|
||||
|
||||
##########################################################################
|
||||
# GENERATE NEXT SENSOR(S)
|
||||
##########################################################################
|
||||
|
||||
# Generate sensor next_waste_date
|
||||
def __get_next_waste_date(self):
|
||||
next_waste_date = self.default_label
|
||||
try:
|
||||
next_waste_date = self.waste_data_after_date_selected[0]["date"]
|
||||
except Exception as err:
|
||||
_LOGGER.error(f"Other error occurred _get_next_waste_date: {err}")
|
||||
return next_waste_date
|
||||
|
||||
# Generate sensor next_waste_in_days
|
||||
def __get_next_waste_in_days(self):
|
||||
next_waste_in_days = self.default_label
|
||||
try:
|
||||
next_waste_in_days = abs(self.today_date - self.next_waste_date).days # type: ignore
|
||||
except Exception as err:
|
||||
_LOGGER.error(f"Other error occurred _get_next_waste_in_days: {err}")
|
||||
return next_waste_in_days
|
||||
|
||||
# Generate sensor next_waste_type
|
||||
def __get_next_waste_type(self):
|
||||
next_waste_type = []
|
||||
try:
|
||||
for waste in self.waste_data_after_date_selected:
|
||||
item_date = waste["date"]
|
||||
if item_date == self.next_waste_date:
|
||||
item_name = waste["type"]
|
||||
next_waste_type.append(item_name)
|
||||
if not next_waste_type:
|
||||
next_waste_type.append(self.default_label)
|
||||
except Exception as err:
|
||||
_LOGGER.error(f"Other error occurred _get_next_waste_type: {err}")
|
||||
return next_waste_type
|
||||
|
||||
# Generate sensor data for custom sensors
|
||||
def _gen_next_sensor_data(self):
|
||||
next_sensor = {}
|
||||
try:
|
||||
next_sensor["next_date"] = self.next_waste_date
|
||||
next_sensor["next_in_days"] = self.next_waste_in_days
|
||||
next_sensor["next_type"] = ", ".join(self.next_waste_type)
|
||||
except Exception as err:
|
||||
_LOGGER.error(f"Other error occurred _gen_next_sensor_data: {err}")
|
||||
return next_sensor
|
||||
|
||||
@property
|
||||
def next_sensor_data(self):
|
||||
return self.data
|
||||
@@ -1,181 +0,0 @@
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from ..common.day_sensor_data import DaySensorData
|
||||
from ..common.next_sensor_data import NextSensorData
|
||||
from ..const.const import _LOGGER
|
||||
|
||||
# import sys
|
||||
# def excepthook(type, value, traceback):
|
||||
# _LOGGER.error(value)
|
||||
# sys.excepthook = excepthook
|
||||
|
||||
|
||||
class WasteDataTransformer(object):
|
||||
|
||||
##########################################################################
|
||||
# INIT
|
||||
##########################################################################
|
||||
def __init__(
|
||||
self,
|
||||
waste_data_raw,
|
||||
exclude_pickup_today,
|
||||
exclude_list,
|
||||
default_label,
|
||||
):
|
||||
self.waste_data_raw = waste_data_raw
|
||||
self.exclude_pickup_today = exclude_pickup_today
|
||||
self.exclude_list = exclude_list.strip().lower()
|
||||
self.default_label = default_label
|
||||
|
||||
TODAY = datetime.now().strftime("%d-%m-%Y")
|
||||
self.DATE_TODAY = datetime.strptime(TODAY, "%d-%m-%Y")
|
||||
self.DATE_TOMORROW = datetime.strptime(TODAY, "%d-%m-%Y") + timedelta(days=1)
|
||||
|
||||
(
|
||||
self._waste_data_with_today,
|
||||
self._waste_data_without_today,
|
||||
) = self.__structure_waste_data() # type: ignore
|
||||
|
||||
(
|
||||
self._waste_data_provider,
|
||||
self._waste_types_provider,
|
||||
self._waste_data_custom,
|
||||
self._waste_types_custom,
|
||||
) = self.__gen_sensor_waste_data()
|
||||
|
||||
##########################################################################
|
||||
# STRUCTURE ALL WASTE DATA IN CUSTOM FORMAT
|
||||
#########################################################################
|
||||
def __structure_waste_data(self):
|
||||
try:
|
||||
waste_data_with_today = {}
|
||||
waste_data_without_today = {}
|
||||
|
||||
for item in self.waste_data_raw:
|
||||
item_date = datetime.strptime(item["date"], "%Y-%m-%d")
|
||||
item_name = item["type"].strip().lower()
|
||||
if (
|
||||
item_name not in self.exclude_list
|
||||
and item_name not in waste_data_with_today
|
||||
and item_date >= self.DATE_TODAY
|
||||
):
|
||||
waste_data_with_today[item_name] = item_date
|
||||
|
||||
for item in self.waste_data_raw:
|
||||
item_date = datetime.strptime(item["date"], "%Y-%m-%d")
|
||||
item_name = item["type"].strip().lower()
|
||||
if (
|
||||
item_name not in self.exclude_list
|
||||
and item_name not in waste_data_without_today
|
||||
and item_date > self.DATE_TODAY
|
||||
):
|
||||
waste_data_without_today[item_name] = item_date
|
||||
|
||||
try:
|
||||
for item in self.waste_data_raw:
|
||||
item_name = item["type"].strip().lower()
|
||||
if item_name not in self.exclude_list:
|
||||
if item_name not in waste_data_with_today.keys():
|
||||
waste_data_with_today[item_name] = self.default_label
|
||||
if item_name not in waste_data_without_today.keys():
|
||||
waste_data_without_today[item_name] = self.default_label
|
||||
except Exception as err:
|
||||
_LOGGER.error(f"Other error occurred: {err}")
|
||||
|
||||
return waste_data_with_today, waste_data_without_today
|
||||
except Exception as err:
|
||||
_LOGGER.error(f"Other error occurred: {err}")
|
||||
|
||||
##########################################################################
|
||||
# GENERATE REQUIRED DATA FOR HASS SENSORS
|
||||
##########################################################################
|
||||
def __gen_sensor_waste_data(self):
|
||||
if self.exclude_pickup_today.casefold() in ("false", "no"):
|
||||
date_selected = self.DATE_TODAY
|
||||
waste_data_provider = self._waste_data_with_today
|
||||
else:
|
||||
date_selected = self.DATE_TOMORROW
|
||||
waste_data_provider = self._waste_data_without_today
|
||||
|
||||
try:
|
||||
waste_types_provider = sorted(
|
||||
{
|
||||
waste["type"]
|
||||
for waste in self.waste_data_raw
|
||||
if waste["type"] not in self.exclude_list
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as err:
|
||||
_LOGGER.error(f"Other error occurred waste_types_provider: {err}")
|
||||
|
||||
try:
|
||||
waste_data_formatted = [
|
||||
{
|
||||
"type": waste["type"],
|
||||
"date": datetime.strptime(waste["date"], "%Y-%m-%d"),
|
||||
}
|
||||
for waste in self.waste_data_raw
|
||||
if waste["type"] in waste_types_provider
|
||||
]
|
||||
|
||||
except Exception as err:
|
||||
_LOGGER.error(f"Other error occurred waste_data_formatted: {err}")
|
||||
|
||||
days = DaySensorData(waste_data_formatted, self.default_label)
|
||||
|
||||
try:
|
||||
waste_data_after_date_selected = list(
|
||||
filter(
|
||||
lambda waste: waste["date"] >= date_selected, waste_data_formatted
|
||||
)
|
||||
)
|
||||
except Exception as err:
|
||||
_LOGGER.error(f"Other error occurred waste_data_after_date_selected: {err}")
|
||||
|
||||
next_data = NextSensorData(waste_data_after_date_selected, self.default_label)
|
||||
|
||||
try:
|
||||
waste_data_custom = {**next_data.next_sensor_data, **days.day_sensor_data}
|
||||
except Exception as err:
|
||||
_LOGGER.error(f"Other error occurred waste_data_custom: {err}")
|
||||
|
||||
try:
|
||||
waste_types_custom = list(sorted(waste_data_custom.keys()))
|
||||
except Exception as err:
|
||||
_LOGGER.error(f"Other error occurred waste_types_custom: {err}")
|
||||
|
||||
return (
|
||||
waste_data_provider,
|
||||
waste_types_provider,
|
||||
waste_data_custom,
|
||||
waste_types_custom,
|
||||
)
|
||||
|
||||
##########################################################################
|
||||
# PROPERTIES FOR EXECUTION
|
||||
##########################################################################
|
||||
|
||||
@property
|
||||
def waste_data_with_today(self):
|
||||
return self._waste_data_with_today
|
||||
|
||||
@property
|
||||
def waste_data_without_today(self):
|
||||
return self._waste_data_without_today
|
||||
|
||||
@property
|
||||
def waste_data_provider(self):
|
||||
return self._waste_data_provider
|
||||
|
||||
@property
|
||||
def waste_types_provider(self):
|
||||
return self._waste_types_provider
|
||||
|
||||
@property
|
||||
def waste_data_custom(self):
|
||||
return self._waste_data_custom
|
||||
|
||||
@property
|
||||
def waste_types_custom(self):
|
||||
return self._waste_types_custom
|
||||
@@ -1,131 +0,0 @@
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
API = "api"
|
||||
NAME = "afvalwijzer"
|
||||
VERSION = "2023.01.01"
|
||||
ISSUE_URL = "https://github.com/xirixiz/homeassistant-afvalwijzer/issues"
|
||||
|
||||
SENSOR_COLLECTOR_TO_URL = {
|
||||
"afvalwijzer_data_default": [
|
||||
"https://api.{0}.nl/webservices/appsinput/?apikey=5ef443e778f41c4f75c69459eea6e6ae0c2d92de729aa0fc61653815fbd6a8ca&method=postcodecheck&postcode={1}&street=&huisnummer={2}&toevoeging={3}&app_name=afvalwijzer&platform=web&afvaldata={4}&langs=nl&"
|
||||
],
|
||||
"afvalstoffendienstkalender": [
|
||||
"https://{0}.afvalstoffendienstkalender.nl/nl/{1}/{2}/"
|
||||
],
|
||||
"afvalstoffendienstkalender-s-hertogenbosch": [
|
||||
"https://afvalstoffendienstkalender.nl/nl/{0}/{1}/"
|
||||
],
|
||||
"ximmio01": [
|
||||
"https://wasteapi.ximmio.com/api/FetchAdress",
|
||||
"https://wasteapi.ximmio.com/api/GetCalendar",
|
||||
],
|
||||
"ximmio02": [
|
||||
"https://wasteprod2api.ximmio.com/api/FetchAdress",
|
||||
"https://wasteprod2api.ximmio.com/api/GetCalendar",
|
||||
],
|
||||
}
|
||||
|
||||
SENSOR_COLLECTORS_OPZET = {
|
||||
"alkmaar": "https://www.stadswerk072.nl",
|
||||
"alphenaandenrijn": "https://afvalkalender.alphenaandenrijn.nl",
|
||||
"berkelland": "https://afvalkalender.gemeenteberkelland.nl",
|
||||
"blink": "https://mijnblink.nl",
|
||||
"cranendonck": "https://afvalkalender.cranendonck.nl",
|
||||
"cyclus": "https://afvalkalender.cyclusnv.nl",
|
||||
"dar": "https://afvalkalender.dar.nl",
|
||||
"denhaag": "https://huisvuilkalender.denhaag.nl",
|
||||
"gad": "https://inzamelkalender.gad.nl",
|
||||
"hvc": "https://inzamelkalender.hvcgroep.nl",
|
||||
"lingewaard": "https://afvalwijzer.lingewaard.nl",
|
||||
"middelburg-vlissingen": "https://afvalwijzer.middelburgvlissingen.nl",
|
||||
"montfoort": "https://afvalkalender.cyclusnv.nl",
|
||||
"peelenmaas": "https://afvalkalender.peelenmaas.nl",
|
||||
"prezero": "https://inzamelwijzer.prezero.nl",
|
||||
"purmerend": "https://afvalkalender.purmerend.nl",
|
||||
"rmn": "https://inzamelschema.rmn.nl",
|
||||
"schouwen-duiveland": "https://afvalkalender.schouwen-duiveland.nl",
|
||||
"spaarnelanden": "https://afvalwijzer.spaarnelanden.nl",
|
||||
"sudwestfryslan": "https://afvalkalender.sudwestfryslan.nl",
|
||||
"suez": "https://inzamelwijzer.prezero.nl",
|
||||
"venray": "https://afvalkalender.venray.nl",
|
||||
"voorschoten": "https://afvalkalender.voorschoten.nl",
|
||||
"waalre": "https://afvalkalender.waalre.nl",
|
||||
"zrd": "https://afvalkalender.zrd.nl",
|
||||
}
|
||||
|
||||
SENSOR_COLLECTORS_ICALENDAR = {
|
||||
"eemsdelta": "https://www.eemsdelta.nl/trash-calendar/download/{1}/{2}",
|
||||
}
|
||||
|
||||
SENSOR_COLLECTORS_AFVALWIJZER = [
|
||||
"mijnafvalwijzer",
|
||||
"afvalstoffendienstkalender",
|
||||
"afvalstoffendienstkalender-s-hertogenbosch",
|
||||
"rova",
|
||||
]
|
||||
|
||||
SENSOR_COLLECTORS_XIMMIO = {
|
||||
"acv": "f8e2844a-095e-48f9-9f98-71fceb51d2c3",
|
||||
"almere": "53d8db94-7945-42fd-9742-9bbc71dbe4c1",
|
||||
"areareiniging": "adc418da-d19b-11e5-ab30-625662870761",
|
||||
"avalex": "f7a74ad1-fdbf-4a43-9f91-44644f4d4222",
|
||||
"avri": "78cd4156-394b-413d-8936-d407e334559a",
|
||||
"bar": "bb58e633-de14-4b2a-9941-5bc419f1c4b0",
|
||||
"hellendoorn": "24434f5b-7244-412b-9306-3a2bd1e22bc1",
|
||||
"meerlanden": "800bf8d7-6dd1-4490-ba9d-b419d6dc8a45",
|
||||
"meppel": "b7a594c7-2490-4413-88f9-94749a3ec62a",
|
||||
"rad": "13a2cad9-36d0-4b01-b877-efcb421a864d",
|
||||
"twentemilieu": "8d97bb56-5afd-4cbc-a651-b4f7314264b4",
|
||||
"waardlanden": "942abcf6-3775-400d-ae5d-7380d728b23c",
|
||||
"westland": "6fc75608-126a-4a50-9241-a002ce8c8a6c",
|
||||
"ximmio": "800bf8d7-6dd1-4490-ba9d-b419d6dc8a45",
|
||||
"reinis": "9dc25c8a-175a-4a41-b7a1-83f237a80b77",
|
||||
}
|
||||
|
||||
SENSOR_COLLECTORS_RD4 = {
|
||||
"rd4": "https://data.rd4.nl/api/v1/waste-calendar?postal_code={0}&house_number={1}&house_number_extension={2}&year={3}",
|
||||
}
|
||||
|
||||
SENSOR_COLLECTORS_DEAFVALAPP = {
|
||||
"deafvalapp": "https://dataservice.deafvalapp.nl/dataservice/DataServiceServlet?service=OPHAALSCHEMA&land=NL&postcode={0}&straatId=0&huisnr={1}&huisnrtoev={2}",
|
||||
}
|
||||
|
||||
CONF_COLLECTOR = "provider"
|
||||
CONF_API_TOKEN = "api_token"
|
||||
CONF_POSTAL_CODE = "postal_code"
|
||||
CONF_STREET_NUMBER = "street_number"
|
||||
CONF_SUFFIX = "suffix"
|
||||
CONF_DATE_FORMAT = "date_format"
|
||||
CONF_EXCLUDE_PICKUP_TODAY = "exclude_pickup_today"
|
||||
CONF_DEFAULT_LABEL = "default_label"
|
||||
CONF_ID = "id"
|
||||
CONF_EXCLUDE_LIST = "exclude_list"
|
||||
|
||||
SENSOR_PREFIX = "afvalwijzer "
|
||||
SENSOR_ICON = "mdi:recycle"
|
||||
|
||||
ATTR_LAST_UPDATE = "last_update"
|
||||
ATTR_IS_COLLECTION_DATE_TODAY = "is_collection_date_today"
|
||||
ATTR_IS_COLLECTION_DATE_TOMORROW = "is_collection_date_tomorrow"
|
||||
ATTR_IS_COLLECTION_DATE_DAY_AFTER_TOMORROW = "is_collection_date_day_after_tomorrow"
|
||||
ATTR_DAYS_UNTIL_COLLECTION_DATE = "days_until_collection_date"
|
||||
ATTR_YEAR_MONTH_DAY_DATE = "year_month_day_date"
|
||||
|
||||
MIN_TIME_BETWEEN_UPDATES = timedelta(hours=1)
|
||||
PARALLEL_UPDATES = 1
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
DOMAIN = "afvalwijzer"
|
||||
DOMAIN_DATA = "afvalwijzer_data"
|
||||
|
||||
STARTUP_MESSAGE = f"""
|
||||
-------------------------------------------------------------------,
|
||||
Afvalwijzer - {VERSION},
|
||||
This is a custom integration!,
|
||||
If you have any issues with this you need to open an issue here:,
|
||||
https://github.com/xirixiz/homeassistant-afvalwijzer/issues,
|
||||
-------------------------------------------------------------------,
|
||||
"""
|
||||
@@ -1,14 +0,0 @@
|
||||
{
|
||||
"domain": "afvalwijzer",
|
||||
"name": "Afvalwijzer",
|
||||
"version": "2023.01.01",
|
||||
"iot_class": "cloud_polling",
|
||||
"documentation": "https://github.com/xirixiz/homeassistant-afvalwijzer/blob/master/README.md",
|
||||
"issue_tracker": "https://github.com/xirixiz/homeassistant-afvalwijzer/issues",
|
||||
"config_flow": false,
|
||||
"dependencies": [],
|
||||
"codeowners": [
|
||||
"@xirixiz"
|
||||
],
|
||||
"requirements": []
|
||||
}
|
||||
@@ -1,147 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Sensor component Afvalwijzer
|
||||
Author: Bram van Dartel - xirixiz
|
||||
"""
|
||||
|
||||
from functools import partial
|
||||
|
||||
from homeassistant.components.sensor import PLATFORM_SCHEMA
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.util import Throttle
|
||||
import voluptuous as vol
|
||||
|
||||
from .collector.main_collector import MainCollector
|
||||
from .const.const import (
|
||||
_LOGGER,
|
||||
CONF_COLLECTOR,
|
||||
CONF_DEFAULT_LABEL,
|
||||
CONF_EXCLUDE_LIST,
|
||||
CONF_EXCLUDE_PICKUP_TODAY,
|
||||
CONF_ID,
|
||||
CONF_POSTAL_CODE,
|
||||
CONF_STREET_NUMBER,
|
||||
CONF_SUFFIX,
|
||||
MIN_TIME_BETWEEN_UPDATES,
|
||||
PARALLEL_UPDATES,
|
||||
SCAN_INTERVAL,
|
||||
STARTUP_MESSAGE,
|
||||
)
|
||||
from .sensor_custom import CustomSensor
|
||||
from .sensor_provider import ProviderSensor
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_COLLECTOR, default="mijnafvalwijzer"
|
||||
): cv.string,
|
||||
vol.Required(CONF_POSTAL_CODE, default="1234AB"): cv.string,
|
||||
vol.Required(CONF_STREET_NUMBER, default="5"): cv.string,
|
||||
vol.Optional(CONF_SUFFIX, default=""): cv.string,
|
||||
vol.Optional(CONF_EXCLUDE_PICKUP_TODAY, default="true"): cv.string,
|
||||
vol.Optional(CONF_EXCLUDE_LIST, default=""): cv.string,
|
||||
vol.Optional(CONF_DEFAULT_LABEL, default="geen"): cv.string,
|
||||
vol.Optional(CONF_ID.strip().lower(), default=""): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
_LOGGER.info(STARTUP_MESSAGE)
|
||||
|
||||
|
||||
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
|
||||
provider = config.get(CONF_COLLECTOR)
|
||||
postal_code = config.get(CONF_POSTAL_CODE)
|
||||
street_number = config.get(CONF_STREET_NUMBER)
|
||||
suffix = config.get(CONF_SUFFIX)
|
||||
exclude_pickup_today = config.get(CONF_EXCLUDE_PICKUP_TODAY)
|
||||
exclude_list = config.get(CONF_EXCLUDE_LIST)
|
||||
default_label = config.get(CONF_DEFAULT_LABEL)
|
||||
|
||||
_LOGGER.debug(f"Afvalwijzer provider = {provider}")
|
||||
_LOGGER.debug(f"Afvalwijzer zipcode = {postal_code}")
|
||||
_LOGGER.debug(f"Afvalwijzer street_number = {street_number}")
|
||||
|
||||
try:
|
||||
collector = await hass.async_add_executor_job(
|
||||
partial(
|
||||
MainCollector,
|
||||
provider,
|
||||
postal_code,
|
||||
street_number,
|
||||
suffix,
|
||||
exclude_pickup_today,
|
||||
exclude_list,
|
||||
default_label,
|
||||
)
|
||||
)
|
||||
except ValueError as err:
|
||||
_LOGGER.error(f"Check afvalwijzer platform settings {err.args}")
|
||||
|
||||
fetch_data = AfvalwijzerData(hass, config)
|
||||
|
||||
waste_types_provider = collector.waste_types_provider
|
||||
_LOGGER.debug(f"Generating waste_types_provider list = {waste_types_provider}")
|
||||
waste_types_custom = collector.waste_types_custom
|
||||
_LOGGER.debug(f"Generating waste_types_custom list = {waste_types_custom}")
|
||||
|
||||
entities = []
|
||||
|
||||
for waste_type in waste_types_provider:
|
||||
_LOGGER.debug(f"Adding sensor provider: {waste_type}")
|
||||
entities.append(ProviderSensor(hass, waste_type, fetch_data, config))
|
||||
for waste_type in waste_types_custom:
|
||||
_LOGGER.debug(f"Adding sensor custom: {waste_type}")
|
||||
entities.append(CustomSensor(hass, waste_type, fetch_data, config))
|
||||
|
||||
_LOGGER.debug(f"Entities appended = {entities}")
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class AfvalwijzerData(object):
|
||||
def __init__(self, hass, config):
|
||||
self._hass = hass
|
||||
self.config = config
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
def update(self):
|
||||
provider = self.config.get(CONF_COLLECTOR)
|
||||
postal_code = self.config.get(CONF_POSTAL_CODE)
|
||||
street_number = self.config.get(CONF_STREET_NUMBER)
|
||||
suffix = self.config.get(CONF_SUFFIX)
|
||||
exclude_pickup_today = self.config.get(CONF_EXCLUDE_PICKUP_TODAY)
|
||||
default_label = self.config.get(CONF_DEFAULT_LABEL)
|
||||
exclude_list = self.config.get(CONF_EXCLUDE_LIST)
|
||||
|
||||
try:
|
||||
collector = MainCollector(
|
||||
provider,
|
||||
postal_code,
|
||||
street_number,
|
||||
suffix,
|
||||
exclude_pickup_today,
|
||||
exclude_list,
|
||||
default_label,
|
||||
)
|
||||
except ValueError as err:
|
||||
_LOGGER.error(f"Check afvalwijzer platform settings {err.args}")
|
||||
|
||||
# waste data provider update - with today
|
||||
try:
|
||||
self.waste_data_with_today = collector.waste_data_with_today
|
||||
except ValueError as err:
|
||||
_LOGGER.error(f"Check waste_data_provider {err.args}")
|
||||
self.waste_data_with_today = default_label
|
||||
|
||||
# waste data provider update - without today
|
||||
try:
|
||||
self.waste_data_without_today = collector.waste_data_without_today
|
||||
except ValueError as err:
|
||||
_LOGGER.error(f"Check waste_data_provider {err.args}")
|
||||
self.waste_data_without_today = default_label
|
||||
|
||||
# waste data custom update
|
||||
try:
|
||||
self.waste_data_custom = collector.waste_data_custom
|
||||
except ValueError as err:
|
||||
_LOGGER.error(f"Check waste_data_custom {err.args}")
|
||||
self.waste_data_custom = default_label
|
||||
@@ -1,106 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
from datetime import datetime
|
||||
import hashlib
|
||||
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
from .const.const import (
|
||||
_LOGGER,
|
||||
ATTR_LAST_UPDATE,
|
||||
ATTR_YEAR_MONTH_DAY_DATE,
|
||||
CONF_DEFAULT_LABEL,
|
||||
CONF_ID,
|
||||
CONF_POSTAL_CODE,
|
||||
CONF_STREET_NUMBER,
|
||||
CONF_SUFFIX,
|
||||
MIN_TIME_BETWEEN_UPDATES,
|
||||
PARALLEL_UPDATES,
|
||||
SENSOR_ICON,
|
||||
SENSOR_PREFIX,
|
||||
)
|
||||
|
||||
|
||||
class CustomSensor(Entity):
|
||||
def __init__(self, hass, waste_type, fetch_data, config):
|
||||
self.hass = hass
|
||||
self.waste_type = waste_type
|
||||
self.fetch_data = fetch_data
|
||||
self.config = config
|
||||
self._id_name = self.config.get(CONF_ID)
|
||||
self._default_label = self.config.get(CONF_DEFAULT_LABEL)
|
||||
self._last_update = None
|
||||
self._name = (
|
||||
SENSOR_PREFIX + (f"{self._id_name} " if len(self._id_name) > 0 else "")
|
||||
) + self.waste_type
|
||||
|
||||
self._state = self.config.get(CONF_DEFAULT_LABEL)
|
||||
self._icon = SENSOR_ICON
|
||||
self._year_month_day_date = None
|
||||
self._unique_id = hashlib.sha1(
|
||||
f"{self.waste_type}{self.config.get(CONF_ID)}{self.config.get(CONF_POSTAL_CODE)}{self.config.get(CONF_STREET_NUMBER)}{self.config.get(CONF_SUFFIX,'')}".encode(
|
||||
"utf-8"
|
||||
)
|
||||
).hexdigest()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def unique_id(self):
|
||||
return self._unique_id
|
||||
|
||||
@property
|
||||
def icon(self):
|
||||
return self._icon
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
return self._state
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
if self._year_month_day_date is not None:
|
||||
return {
|
||||
ATTR_LAST_UPDATE: self._last_update,
|
||||
ATTR_YEAR_MONTH_DAY_DATE: self._year_month_day_date,
|
||||
}
|
||||
else:
|
||||
return {
|
||||
ATTR_LAST_UPDATE: self._last_update,
|
||||
}
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
async def async_update(self):
|
||||
await self.hass.async_add_executor_job(self.fetch_data.update)
|
||||
|
||||
waste_data_custom = self.fetch_data.waste_data_custom
|
||||
|
||||
try:
|
||||
# Add attribute, set the last updated status of the sensor
|
||||
self._last_update = datetime.now().strftime("%d-%m-%Y %H:%M")
|
||||
|
||||
if isinstance(waste_data_custom[self.waste_type], datetime):
|
||||
_LOGGER.debug(
|
||||
f"Generating state via AfvalwijzerCustomSensor for = {self.waste_type} with value {waste_data_custom[self.waste_type].date()}"
|
||||
)
|
||||
# Add the US date format
|
||||
collection_date_us = waste_data_custom[self.waste_type].date()
|
||||
self._year_month_day_date = str(collection_date_us)
|
||||
|
||||
# Add the NL date format as default state
|
||||
self._state = datetime.strftime(
|
||||
waste_data_custom[self.waste_type].date(), "%d-%m-%Y"
|
||||
)
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
f"Generating state via AfvalwijzerCustomSensor for = {self.waste_type} with value {waste_data_custom[self.waste_type]}"
|
||||
)
|
||||
# Add non-date as default state
|
||||
self._state = str(waste_data_custom[self.waste_type])
|
||||
except ValueError:
|
||||
_LOGGER.debug("ValueError AfvalwijzerCustomSensor - unable to set value!")
|
||||
self._state = self._default_label
|
||||
self._year_month_day_date = None
|
||||
self._last_update = datetime.now().strftime("%d-%m-%Y %H:%M")
|
||||
@@ -1,138 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
from datetime import date, datetime, timedelta
|
||||
import hashlib
|
||||
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
from .const.const import (
|
||||
_LOGGER,
|
||||
ATTR_DAYS_UNTIL_COLLECTION_DATE,
|
||||
ATTR_IS_COLLECTION_DATE_DAY_AFTER_TOMORROW,
|
||||
ATTR_IS_COLLECTION_DATE_TODAY,
|
||||
ATTR_IS_COLLECTION_DATE_TOMORROW,
|
||||
ATTR_LAST_UPDATE,
|
||||
ATTR_YEAR_MONTH_DAY_DATE,
|
||||
CONF_DEFAULT_LABEL,
|
||||
CONF_EXCLUDE_PICKUP_TODAY,
|
||||
CONF_ID,
|
||||
CONF_POSTAL_CODE,
|
||||
CONF_STREET_NUMBER,
|
||||
CONF_SUFFIX,
|
||||
MIN_TIME_BETWEEN_UPDATES,
|
||||
PARALLEL_UPDATES,
|
||||
SENSOR_ICON,
|
||||
SENSOR_PREFIX,
|
||||
)
|
||||
|
||||
|
||||
class ProviderSensor(Entity):
|
||||
def __init__(self, hass, waste_type, fetch_data, config):
|
||||
self.hass = hass
|
||||
self.waste_type = waste_type
|
||||
self.fetch_data = fetch_data
|
||||
self.config = config
|
||||
self._id_name = self.config.get(CONF_ID)
|
||||
self._default_label = self.config.get(CONF_DEFAULT_LABEL)
|
||||
self._exclude_pickup_today = self.config.get(CONF_EXCLUDE_PICKUP_TODAY)
|
||||
self._name = (
|
||||
SENSOR_PREFIX
|
||||
+ (self._id_name + " " if len(self._id_name) > 0 else "")
|
||||
+ self.waste_type
|
||||
)
|
||||
self._icon = SENSOR_ICON
|
||||
self._state = self.config.get(CONF_DEFAULT_LABEL)
|
||||
self._last_update = None
|
||||
self._days_until_collection_date = None
|
||||
self._is_collection_date_today = False
|
||||
self._is_collection_date_tomorrow = False
|
||||
self._is_collection_date_day_after_tomorrow = False
|
||||
self._year_month_day_date = None
|
||||
self._unique_id = hashlib.sha1(
|
||||
f"{self.waste_type}{self.config.get(CONF_ID)}{self.config.get(CONF_POSTAL_CODE)}{self.config.get(CONF_STREET_NUMBER)}{self.config.get(CONF_SUFFIX,'')}".encode(
|
||||
"utf-8"
|
||||
)
|
||||
).hexdigest()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def unique_id(self):
|
||||
return self._unique_id
|
||||
|
||||
@property
|
||||
def icon(self):
|
||||
return self._icon
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
return self._state
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
return {
|
||||
ATTR_LAST_UPDATE: self._last_update,
|
||||
ATTR_DAYS_UNTIL_COLLECTION_DATE: self._days_until_collection_date,
|
||||
ATTR_IS_COLLECTION_DATE_TODAY: self._is_collection_date_today,
|
||||
ATTR_IS_COLLECTION_DATE_TOMORROW: self._is_collection_date_tomorrow,
|
||||
ATTR_IS_COLLECTION_DATE_DAY_AFTER_TOMORROW: self._is_collection_date_day_after_tomorrow,
|
||||
ATTR_YEAR_MONTH_DAY_DATE: self._year_month_day_date,
|
||||
}
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
async def async_update(self):
|
||||
await self.hass.async_add_executor_job(self.fetch_data.update)
|
||||
|
||||
if self._exclude_pickup_today.casefold() in ("false", "no"):
|
||||
waste_data_provider = self.fetch_data.waste_data_with_today
|
||||
else:
|
||||
waste_data_provider = self.fetch_data.waste_data_without_today
|
||||
|
||||
try:
|
||||
if not waste_data_provider or self.waste_type not in waste_data_provider:
|
||||
raise (ValueError)
|
||||
# Add attribute, set the last updated status of the sensor
|
||||
self._last_update = datetime.now().strftime("%d-%m-%Y %H:%M")
|
||||
|
||||
if isinstance(waste_data_provider[self.waste_type], datetime):
|
||||
_LOGGER.debug(
|
||||
f"Generating state via AfvalwijzerCustomSensor for = {self.waste_type} with value {waste_data_provider[self.waste_type].date()}"
|
||||
)
|
||||
# Add the US date format
|
||||
collection_date_us = waste_data_provider[self.waste_type].date()
|
||||
self._year_month_day_date = str(collection_date_us)
|
||||
|
||||
# Add the days until the collection date
|
||||
delta = collection_date_us - date.today()
|
||||
self._days_until_collection_date = delta.days
|
||||
|
||||
# Check if the collection days are in today, tomorrow and/or the day after tomorrow
|
||||
self._is_collection_date_today = date.today() == collection_date_us
|
||||
self._is_collection_date_tomorrow = (
|
||||
date.today() + timedelta(days=1) == collection_date_us
|
||||
)
|
||||
self._is_collection_date_day_after_tomorrow = (
|
||||
date.today() + timedelta(days=2) == collection_date_us
|
||||
)
|
||||
|
||||
# Add the NL date format as default state
|
||||
self._state = datetime.strftime(
|
||||
waste_data_provider[self.waste_type].date(), "%d-%m-%Y"
|
||||
)
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
f"Generating state via AfvalwijzerCustomSensor for = {self.waste_type} with value {waste_data_provider[self.waste_type]}"
|
||||
)
|
||||
# Add non-date as default state
|
||||
self._state = str(waste_data_provider[self.waste_type])
|
||||
except ValueError:
|
||||
_LOGGER.debug("ValueError AfvalwijzerProviderSensor - unable to set value!")
|
||||
self._state = self._default_label
|
||||
self._days_until_collection_date = None
|
||||
self._year_month_day_date = None
|
||||
self._is_collection_date_today = False
|
||||
self._is_collection_date_tomorrow = False
|
||||
self._is_collection_date_day_after_tomorrow = False
|
||||
self._last_update = datetime.now().strftime("%d-%m-%Y %H:%M")
|
||||
@@ -1,397 +0,0 @@
|
||||
[
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-01-02"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-01-05"
|
||||
},
|
||||
{
|
||||
"nameType": "restafval",
|
||||
"type": "restafval",
|
||||
"date": "2021-01-08"
|
||||
},
|
||||
{
|
||||
"nameType": "kerstbomen",
|
||||
"type": "kerstbomen",
|
||||
"date": "2021-01-09"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-01-15"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-01-19"
|
||||
},
|
||||
{
|
||||
"nameType": "papier",
|
||||
"type": "papier",
|
||||
"date": "2021-01-20"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-01-29"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-02-02"
|
||||
},
|
||||
{
|
||||
"nameType": "restafval",
|
||||
"type": "restafval",
|
||||
"date": "2021-02-05"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-02-12"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-02-16"
|
||||
},
|
||||
{
|
||||
"nameType": "papier",
|
||||
"type": "papier",
|
||||
"date": "2021-02-17"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-02-26"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-03-02"
|
||||
},
|
||||
{
|
||||
"nameType": "restafval",
|
||||
"type": "restafval",
|
||||
"date": "2021-03-05"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-03-12"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-03-16"
|
||||
},
|
||||
{
|
||||
"nameType": "papier",
|
||||
"type": "papier",
|
||||
"date": "2021-03-17"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-03-26"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-03-30"
|
||||
},
|
||||
{
|
||||
"nameType": "restafval",
|
||||
"type": "restafval",
|
||||
"date": "2021-04-02"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-04-09"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-04-13"
|
||||
},
|
||||
{
|
||||
"nameType": "papier",
|
||||
"type": "papier",
|
||||
"date": "2021-04-21"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-04-23"
|
||||
},
|
||||
{
|
||||
"nameType": "restafval",
|
||||
"type": "restafval",
|
||||
"date": "2021-04-30"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-04-30"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-05-07"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-05-11"
|
||||
},
|
||||
{
|
||||
"nameType": "papier",
|
||||
"type": "papier",
|
||||
"date": "2021-05-19"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-05-21"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-05-25"
|
||||
},
|
||||
{
|
||||
"nameType": "restafval",
|
||||
"type": "restafval",
|
||||
"date": "2021-05-28"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-06-04"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-06-08"
|
||||
},
|
||||
{
|
||||
"nameType": "papier",
|
||||
"type": "papier",
|
||||
"date": "2021-06-16"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-06-18"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-06-22"
|
||||
},
|
||||
{
|
||||
"nameType": "restafval",
|
||||
"type": "restafval",
|
||||
"date": "2021-06-25"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-07-02"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-07-06"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-07-16"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-07-20"
|
||||
},
|
||||
{
|
||||
"nameType": "papier",
|
||||
"type": "papier",
|
||||
"date": "2021-07-21"
|
||||
},
|
||||
{
|
||||
"nameType": "restafval",
|
||||
"type": "restafval",
|
||||
"date": "2021-07-23"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-07-30"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-08-03"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-08-13"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-08-17"
|
||||
},
|
||||
{
|
||||
"nameType": "papier",
|
||||
"type": "papier",
|
||||
"date": "2021-08-18"
|
||||
},
|
||||
{
|
||||
"nameType": "restafval",
|
||||
"type": "restafval",
|
||||
"date": "2021-08-20"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-08-27"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-08-31"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-09-10"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-09-14"
|
||||
},
|
||||
{
|
||||
"nameType": "papier",
|
||||
"type": "papier",
|
||||
"date": "2021-09-15"
|
||||
},
|
||||
{
|
||||
"nameType": "restafval",
|
||||
"type": "restafval",
|
||||
"date": "2021-09-17"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-09-24"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-09-28"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-10-08"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-10-12"
|
||||
},
|
||||
{
|
||||
"nameType": "restafval",
|
||||
"type": "restafval",
|
||||
"date": "2021-10-15"
|
||||
},
|
||||
{
|
||||
"nameType": "papier",
|
||||
"type": "papier",
|
||||
"date": "2021-10-20"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-10-22"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-10-26"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-11-05"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-11-09"
|
||||
},
|
||||
{
|
||||
"nameType": "restafval",
|
||||
"type": "restafval",
|
||||
"date": "2021-11-12"
|
||||
},
|
||||
{
|
||||
"nameType": "papier",
|
||||
"type": "papier",
|
||||
"date": "2021-11-17"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-11-19"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-11-19"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-12-03"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-12-07"
|
||||
},
|
||||
{
|
||||
"nameType": "restafval",
|
||||
"type": "restafval",
|
||||
"date": "2021-12-10"
|
||||
},
|
||||
{
|
||||
"nameType": "papier",
|
||||
"type": "papier",
|
||||
"date": "2021-12-15"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-12-17"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-12-21"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-12-31"
|
||||
}
|
||||
]
|
||||
@@ -1,115 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Sensor component for AfvalDienst
|
||||
Author: Bram van Dartel - xirixiz
|
||||
|
||||
import afvalwijzer
|
||||
from afvalwijzer.collector.mijnafvalwijzer import AfvalWijzer
|
||||
AfvalWijzer().get_data('','','')
|
||||
|
||||
python3 -m afvalwijzer.tests.test_module
|
||||
|
||||
"""
|
||||
|
||||
|
||||
from ..collector.main_collector import MainCollector
|
||||
|
||||
# provider = "afvalstoffendienstkalender"
|
||||
# api_token = "5ef443e778f41c4f75c69459eea6e6ae0c2d92de729aa0fc61653815fbd6a8ca"
|
||||
|
||||
# Afvalstoffendienstkalender
|
||||
# postal_code = "5391KE"
|
||||
# street_number = "1"
|
||||
|
||||
# Common
|
||||
suffix = ""
|
||||
exclude_pickup_today = "True"
|
||||
default_label = "geen"
|
||||
exclude_list = ""
|
||||
|
||||
# DeAfvalapp
|
||||
# provider = "deafvalapp"
|
||||
# postal_code = "6105CN"
|
||||
# street_number = "1"
|
||||
|
||||
# Icalendar
|
||||
# provider = "eemsdelta"
|
||||
# postal_code = "9991AB"
|
||||
# street_number = "2"
|
||||
|
||||
# Afvalwijzer
|
||||
# provider = "mijnafvalwijzer"
|
||||
# postal_code = "5146eg"
|
||||
# street_number = "1"
|
||||
|
||||
provider = "rmn"
|
||||
postal_code = "3701XK"
|
||||
street_number = "24"
|
||||
suffix = "b"
|
||||
|
||||
# Opzet
|
||||
# provider = "prezero"
|
||||
# postal_code = "6665CN"
|
||||
# street_number = "1"
|
||||
|
||||
# RD4
|
||||
# provider = "rd4"
|
||||
# postal_code = "6301ET"
|
||||
# street_number = "24"
|
||||
# suffix = "C"
|
||||
|
||||
# Ximmio
|
||||
# provider = "meerlanden"
|
||||
# postal_code = "2121xt"
|
||||
# street_number = "38"
|
||||
|
||||
# Ximmio
|
||||
# provider = "acv"
|
||||
# postal_code = "6713CG"
|
||||
# street_number = "11"
|
||||
|
||||
# postal_code = postal_code.strip().upper()
|
||||
|
||||
collector = MainCollector(
|
||||
provider,
|
||||
postal_code,
|
||||
street_number,
|
||||
suffix,
|
||||
exclude_pickup_today,
|
||||
exclude_list,
|
||||
default_label,
|
||||
)
|
||||
|
||||
|
||||
# MainCollector(
|
||||
# provider,
|
||||
# postal_code,
|
||||
# street_number,
|
||||
# suffix,
|
||||
# exclude_pickup_today,
|
||||
# exclude_list,
|
||||
# default_label,
|
||||
# )
|
||||
|
||||
# data = XimmioCollector().get_waste_data_provider("meerlanden", postal_code2, street_number2, suffix, default_label, exclude_list)
|
||||
# data2 = MijnAfvalWijzerCollector().get_waste_data_provider("mijnafvalwijzer", postal_code, street_number, suffix, default_label, exclude_list)
|
||||
|
||||
|
||||
#########################################################################################################
|
||||
print("\n")
|
||||
|
||||
print(collector.waste_data_with_today)
|
||||
print(collector.waste_data_without_today)
|
||||
print(collector.waste_data_custom)
|
||||
print(collector.waste_types_provider)
|
||||
print(collector.waste_types_custom)
|
||||
|
||||
print("\n")
|
||||
|
||||
# for key, value in afval1.items():
|
||||
# print(key, value)
|
||||
|
||||
# print("\n")
|
||||
|
||||
# for key, value in afval2.items():
|
||||
# print(key, value)
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -160,9 +160,9 @@ class HacsCommon:
|
||||
|
||||
categories: set[str] = field(default_factory=set)
|
||||
renamed_repositories: dict[str, str] = field(default_factory=dict)
|
||||
archived_repositories: list[str] = field(default_factory=list)
|
||||
ignored_repositories: list[str] = field(default_factory=list)
|
||||
skip: list[str] = field(default_factory=list)
|
||||
archived_repositories: set[str] = field(default_factory=set)
|
||||
ignored_repositories: set[str] = field(default_factory=set)
|
||||
skip: set[str] = field(default_factory=set)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -197,20 +197,20 @@ class HacsRepositories:
|
||||
"""HACS Repositories."""
|
||||
|
||||
_default_repositories: set[str] = field(default_factory=set)
|
||||
_repositories: list[HacsRepository] = field(default_factory=list)
|
||||
_repositories: set[HacsRepository] = field(default_factory=set)
|
||||
_repositories_by_full_name: dict[str, HacsRepository] = field(default_factory=dict)
|
||||
_repositories_by_id: dict[str, HacsRepository] = field(default_factory=dict)
|
||||
_removed_repositories: list[RemovedRepository] = field(default_factory=list)
|
||||
_removed_repositories_by_full_name: dict[str, RemovedRepository] = field(default_factory=dict)
|
||||
|
||||
@property
|
||||
def list_all(self) -> list[HacsRepository]:
|
||||
"""Return a list of repositories."""
|
||||
return self._repositories
|
||||
return list(self._repositories)
|
||||
|
||||
@property
|
||||
def list_removed(self) -> list[RemovedRepository]:
|
||||
"""Return a list of removed repositories."""
|
||||
return self._removed_repositories
|
||||
return list(self._removed_repositories_by_full_name.values())
|
||||
|
||||
@property
|
||||
def list_downloaded(self) -> list[HacsRepository]:
|
||||
@@ -235,7 +235,7 @@ class HacsRepositories:
|
||||
repository = registered_repo
|
||||
|
||||
if repository not in self._repositories:
|
||||
self._repositories.append(repository)
|
||||
self._repositories.add(repository)
|
||||
|
||||
self._repositories_by_id[repo_id] = repository
|
||||
self._repositories_by_full_name[repository.data.full_name_lower] = repository
|
||||
@@ -333,22 +333,15 @@ class HacsRepositories:
|
||||
|
||||
def is_removed(self, repository_full_name: str) -> bool:
|
||||
"""Check if a repository is removed."""
|
||||
return repository_full_name in (
|
||||
repository.repository for repository in self._removed_repositories
|
||||
)
|
||||
return repository_full_name in self._removed_repositories_by_full_name
|
||||
|
||||
def removed_repository(self, repository_full_name: str) -> RemovedRepository:
|
||||
"""Get repository by full name."""
|
||||
if self.is_removed(repository_full_name):
|
||||
if removed := [
|
||||
repository
|
||||
for repository in self._removed_repositories
|
||||
if repository.repository == repository_full_name
|
||||
]:
|
||||
return removed[0]
|
||||
if removed := self._removed_repositories_by_full_name.get(repository_full_name):
|
||||
return removed
|
||||
|
||||
removed = RemovedRepository(repository=repository_full_name)
|
||||
self._removed_repositories.append(removed)
|
||||
self._removed_repositories_by_full_name[repository_full_name] = removed
|
||||
return removed
|
||||
|
||||
|
||||
@@ -457,7 +450,9 @@ class HacsBase:
|
||||
|
||||
try:
|
||||
await self.hass.async_add_executor_job(_write_file)
|
||||
except BaseException as error: # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||
except (
|
||||
BaseException # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||
) as error:
|
||||
self.log.error("Could not write data to %s - %s", file_path, error)
|
||||
return False
|
||||
|
||||
@@ -476,7 +471,9 @@ class HacsBase:
|
||||
f"{reset.hour}:{reset.minute}:{reset.second}",
|
||||
)
|
||||
self.disable_hacs(HacsDisabledReason.RATE_LIMIT)
|
||||
except BaseException as exception: # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||
except (
|
||||
BaseException # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||
) as exception:
|
||||
self.log.exception(exception)
|
||||
|
||||
return 0
|
||||
@@ -515,7 +512,9 @@ class HacsBase:
|
||||
raise exception
|
||||
except GitHubException as exception:
|
||||
_exception = exception
|
||||
except BaseException as exception: # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||
except (
|
||||
BaseException # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||
) as exception:
|
||||
self.log.exception(exception)
|
||||
_exception = exception
|
||||
|
||||
@@ -547,7 +546,12 @@ class HacsBase:
|
||||
raise AddonRepositoryException()
|
||||
|
||||
if category not in RERPOSITORY_CLASSES:
|
||||
raise HacsException(f"{category} is not a valid repository category.")
|
||||
self.log.warning(
|
||||
"%s is not a valid repository category, %s will not be registered.",
|
||||
category,
|
||||
repository_full_name,
|
||||
)
|
||||
return
|
||||
|
||||
if (renamed := self.common.renamed_repositories.get(repository_full_name)) is not None:
|
||||
repository_full_name = renamed
|
||||
@@ -557,7 +561,7 @@ class HacsBase:
|
||||
try:
|
||||
await repository.async_registration(ref)
|
||||
if repository.validate.errors:
|
||||
self.common.skip.append(repository.data.full_name)
|
||||
self.common.skip.add(repository.data.full_name)
|
||||
if not self.status.startup:
|
||||
self.log.error("Validation for %s failed.", repository_full_name)
|
||||
if self.system.action:
|
||||
@@ -576,7 +580,7 @@ class HacsBase:
|
||||
)
|
||||
return
|
||||
except AIOGitHubAPIException as exception:
|
||||
self.common.skip.append(repository.data.full_name)
|
||||
self.common.skip.add(repository.data.full_name)
|
||||
raise HacsException(
|
||||
f"Validation for {repository_full_name} failed with {exception}."
|
||||
) from exception
|
||||
@@ -726,7 +730,9 @@ class HacsBase:
|
||||
await asyncio.sleep(1)
|
||||
continue
|
||||
|
||||
except BaseException as exception: # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||
except (
|
||||
BaseException # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||
) as exception:
|
||||
self.log.exception("Download failed - %s", exception)
|
||||
|
||||
return None
|
||||
@@ -742,7 +748,9 @@ class HacsBase:
|
||||
entry=self.configuration.config_entry,
|
||||
platforms=platforms,
|
||||
)
|
||||
self.hass.config_entries.async_setup_platforms(self.configuration.config_entry, platforms)
|
||||
await self.hass.config_entries.async_forward_entry_setups(
|
||||
self.configuration.config_entry, platforms
|
||||
)
|
||||
|
||||
@callback
|
||||
def async_dispatch(self, signal: HacsDispatchEvent, data: dict | None = None) -> None:
|
||||
@@ -755,6 +763,9 @@ class HacsBase:
|
||||
for category in (HacsCategory.INTEGRATION, HacsCategory.PLUGIN):
|
||||
self.enable_hacs_category(HacsCategory(category))
|
||||
|
||||
if self.configuration.experimental and self.core.ha_version >= "2023.4.0b0":
|
||||
self.enable_hacs_category(HacsCategory.TEMPLATE)
|
||||
|
||||
if HacsCategory.PYTHON_SCRIPT in self.hass.config.components:
|
||||
self.enable_hacs_category(HacsCategory.PYTHON_SCRIPT)
|
||||
|
||||
@@ -764,7 +775,18 @@ class HacsBase:
|
||||
if self.configuration.appdaemon:
|
||||
self.enable_hacs_category(HacsCategory.APPDAEMON)
|
||||
if self.configuration.netdaemon:
|
||||
self.enable_hacs_category(HacsCategory.NETDAEMON)
|
||||
downloaded_netdaemon = [
|
||||
x
|
||||
for x in self.repositories.list_downloaded
|
||||
if x.data.category == HacsCategory.NETDAEMON
|
||||
]
|
||||
if len(downloaded_netdaemon) != 0:
|
||||
self.log.warning(
|
||||
"NetDaemon in HACS is deprectaded. It will stop working in the future. "
|
||||
"Please remove all your current NetDaemon repositories from HACS "
|
||||
"and download them manually if you want to continue using them."
|
||||
)
|
||||
self.enable_hacs_category(HacsCategory.NETDAEMON)
|
||||
|
||||
async def async_load_hacs_from_github(self, _=None) -> None:
|
||||
"""Load HACS from GitHub."""
|
||||
@@ -849,6 +871,15 @@ class HacsBase:
|
||||
repository.repository_manifest.update_data(
|
||||
{**dict(HACS_MANIFEST_KEYS_TO_EXPORT), **manifest}
|
||||
)
|
||||
self.async_dispatch(
|
||||
HacsDispatchEvent.REPOSITORY,
|
||||
{
|
||||
"id": 1337,
|
||||
"action": "update",
|
||||
"repository": repository.data.full_name,
|
||||
"repository_id": repository.data.id,
|
||||
},
|
||||
)
|
||||
|
||||
if category == "integration":
|
||||
self.status.inital_fetch_done = True
|
||||
|
||||
@@ -31,6 +31,7 @@ class HacsCategory(StrEnum):
|
||||
PLUGIN = "plugin" # Kept for legacy purposes
|
||||
NETDAEMON = "netdaemon"
|
||||
PYTHON_SCRIPT = "python_script"
|
||||
TEMPLATE = "template"
|
||||
THEME = "theme"
|
||||
REMOVED = "removed"
|
||||
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
""""Starting setup task: Frontend"."""
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from aiohttp import web
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from .const import DOMAIN, URL_BASE
|
||||
@@ -26,11 +25,13 @@ def async_register_frontend(hass: HomeAssistant, hacs: HacsBase) -> None:
|
||||
hacs.async_setup_frontend_endpoint_themes()
|
||||
|
||||
# Register frontend
|
||||
if hacs.configuration.frontend_repo_url:
|
||||
if hacs.configuration.dev and (frontend_path := os.getenv("HACS_FRONTEND_DIR")):
|
||||
hacs.log.warning(
|
||||
"<HacsFrontend> Frontend development mode enabled. Do not run in production!"
|
||||
)
|
||||
hass.http.register_view(HacsFrontendDev())
|
||||
hass.http.register_static_path(
|
||||
f"{URL_BASE}/frontend", f"{frontend_path}/hacs_frontend", cache_headers=False
|
||||
)
|
||||
elif hacs.configuration.experimental:
|
||||
hacs.log.info("<HacsFrontend> Using experimental frontend")
|
||||
hass.http.register_static_path(
|
||||
@@ -72,23 +73,3 @@ def async_register_frontend(hass: HomeAssistant, hacs: HacsBase) -> None:
|
||||
|
||||
# Setup plugin endpoint if needed
|
||||
hacs.async_setup_frontend_endpoint_plugin()
|
||||
|
||||
|
||||
class HacsFrontendDev(HomeAssistantView):
|
||||
"""Dev View Class for HACS."""
|
||||
|
||||
requires_auth = False
|
||||
name = "hacs_files:frontend"
|
||||
url = r"/hacsfiles/frontend/{requested_file:.+}"
|
||||
|
||||
async def get(self, request, requested_file): # pylint: disable=unused-argument
|
||||
"""Handle HACS Web requests."""
|
||||
hacs: HacsBase = request.app["hass"].data.get(DOMAIN)
|
||||
requested = requested_file.split("/")[-1]
|
||||
request = await hacs.session.get(f"{hacs.configuration.frontend_repo_url}/{requested}")
|
||||
if request.status == 200:
|
||||
result = await request.read()
|
||||
response = web.Response(body=result)
|
||||
response.headers["Content-Type"] = "application/javascript"
|
||||
|
||||
return response
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -1,9 +1,9 @@
|
||||
|
||||
try {
|
||||
new Function("import('/hacsfiles/frontend/main-aeda8d41.js')")();
|
||||
new Function("import('/hacsfiles/frontend/main-85e087f9.js')")();
|
||||
} catch (err) {
|
||||
var el = document.createElement('script');
|
||||
el.src = '/hacsfiles/frontend/main-aeda8d41.js';
|
||||
el.src = '/hacsfiles/frontend/main-85e087f9.js';
|
||||
el.type = 'module';
|
||||
document.body.appendChild(el);
|
||||
}
|
||||
|
||||
Binary file not shown.
@@ -1,3 +1,3 @@
|
||||
{
|
||||
"./src/main.ts": "main-aeda8d41.js"
|
||||
"./src/main.ts": "main-85e087f9.js"
|
||||
}
|
||||
@@ -1 +1 @@
|
||||
VERSION="20230127100107"
|
||||
VERSION="20230406083157"
|
||||
@@ -1,4 +1,6 @@
|
||||
{
|
||||
"domain": "hacs",
|
||||
"name": "HACS",
|
||||
"codeowners": [
|
||||
"@ludeeus"
|
||||
],
|
||||
@@ -12,12 +14,10 @@
|
||||
"repairs"
|
||||
],
|
||||
"documentation": "https://hacs.xyz/docs/configuration/start",
|
||||
"domain": "hacs",
|
||||
"iot_class": "cloud_polling",
|
||||
"issue_tracker": "https://github.com/hacs/integration/issues",
|
||||
"name": "HACS",
|
||||
"requirements": [
|
||||
"aiogithubapi>=22.10.1"
|
||||
],
|
||||
"version": "1.30.1"
|
||||
"version": "1.32.1"
|
||||
}
|
||||
@@ -25,7 +25,7 @@ class RestartRequiredFixFlow(RepairsFlow):
|
||||
) -> data_entry_flow.FlowResult:
|
||||
"""Handle the first step of a fix flow."""
|
||||
|
||||
return await (self.async_step_confirm_restart())
|
||||
return await self.async_step_confirm_restart()
|
||||
|
||||
async def async_step_confirm_restart(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
|
||||
@@ -8,6 +8,7 @@ from .integration import HacsIntegrationRepository
|
||||
from .netdaemon import HacsNetdaemonRepository
|
||||
from .plugin import HacsPluginRepository
|
||||
from .python_script import HacsPythonScriptRepository
|
||||
from .template import HacsTemplateRepository
|
||||
from .theme import HacsThemeRepository
|
||||
|
||||
RERPOSITORY_CLASSES: dict[HacsCategory, HacsRepository] = {
|
||||
@@ -17,4 +18,5 @@ RERPOSITORY_CLASSES: dict[HacsCategory, HacsRepository] = {
|
||||
HacsCategory.APPDAEMON: HacsAppdaemonRepository,
|
||||
HacsCategory.NETDAEMON: HacsNetdaemonRepository,
|
||||
HacsCategory.PLUGIN: HacsPluginRepository,
|
||||
HacsCategory.TEMPLATE: HacsTemplateRepository,
|
||||
}
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -91,6 +91,8 @@ TOPIC_FILTER = (
|
||||
"sensor",
|
||||
"smart-home",
|
||||
"smarthome",
|
||||
"template",
|
||||
"templates",
|
||||
"theme",
|
||||
"themes",
|
||||
)
|
||||
@@ -102,6 +104,7 @@ REPOSITORY_KEYS_TO_EXPORT = (
|
||||
("description", ""),
|
||||
("downloads", 0),
|
||||
("domain", None),
|
||||
("etag_releases", None),
|
||||
("etag_repository", None),
|
||||
("full_name", ""),
|
||||
("last_commit", None),
|
||||
@@ -143,6 +146,7 @@ class RepositoryData:
|
||||
domain: str = None
|
||||
downloads: int = 0
|
||||
etag_repository: str = None
|
||||
etag_releases: str = None
|
||||
file_name: str = ""
|
||||
first_install: bool = False
|
||||
full_name: str = ""
|
||||
@@ -505,14 +509,18 @@ class HacsRepository:
|
||||
self.data.description = self.data.description
|
||||
|
||||
@concurrent(concurrenttasks=10, backoff_time=5)
|
||||
async def common_update(self, ignore_issues=False, force=False) -> bool:
|
||||
async def common_update(self, ignore_issues=False, force=False, skip_releases=False) -> bool:
|
||||
"""Common information update steps of the repository."""
|
||||
self.logger.debug("%s Getting repository information", self.string)
|
||||
|
||||
# Attach repository
|
||||
current_etag = self.data.etag_repository
|
||||
try:
|
||||
await self.common_update_data(ignore_issues=ignore_issues, force=force)
|
||||
await self.common_update_data(
|
||||
ignore_issues=ignore_issues,
|
||||
force=force,
|
||||
skip_releases=skip_releases,
|
||||
)
|
||||
except HacsRepositoryExistException:
|
||||
self.data.full_name = self.hacs.common.renamed_repositories[self.data.full_name]
|
||||
await self.common_update_data(ignore_issues=ignore_issues, force=force)
|
||||
@@ -746,9 +754,8 @@ class HacsRepository:
|
||||
|
||||
def remove(self) -> None:
|
||||
"""Run remove tasks."""
|
||||
self.logger.info("%s Starting removal", self.string)
|
||||
|
||||
if self.hacs.repositories.is_registered(repository_id=str(self.data.id)):
|
||||
self.logger.info("%s Starting removal", self.string)
|
||||
self.hacs.repositories.unregister(self)
|
||||
|
||||
async def uninstall(self) -> None:
|
||||
@@ -767,6 +774,8 @@ class HacsRepository:
|
||||
await self.hacs.hass.services.async_call("frontend", "reload_themes", {})
|
||||
except BaseException: # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||
pass
|
||||
elif self.data.category == "template":
|
||||
await self.hacs.hass.services.async_call("homeassistant", "reload_custom_templates", {})
|
||||
|
||||
await async_remove_store(self.hacs.hass, f"hacs/{self.data.id}.hacs")
|
||||
|
||||
@@ -791,6 +800,8 @@ class HacsRepository:
|
||||
try:
|
||||
if self.data.category == "python_script":
|
||||
local_path = f"{self.content.path.local}/{self.data.name}.py"
|
||||
elif self.data.category == "template":
|
||||
local_path = f"{self.content.path.local}/{self.data.file_name}"
|
||||
elif self.data.category == "theme":
|
||||
path = (
|
||||
f"{self.hacs.core.config_path}/"
|
||||
@@ -818,7 +829,7 @@ class HacsRepository:
|
||||
return False
|
||||
self.logger.debug("%s Removing %s", self.string, local_path)
|
||||
|
||||
if self.data.category in ["python_script"]:
|
||||
if self.data.category in ["python_script", "template"]:
|
||||
os.remove(local_path)
|
||||
else:
|
||||
shutil.rmtree(local_path)
|
||||
@@ -830,7 +841,9 @@ class HacsRepository:
|
||||
"%s Presumed local content path %s does not exist", self.string, local_path
|
||||
)
|
||||
|
||||
except BaseException as exception: # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||
except (
|
||||
BaseException # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||
) as exception:
|
||||
self.logger.debug("%s Removing %s failed with %s", self.string, local_path, exception)
|
||||
return False
|
||||
return True
|
||||
@@ -1048,6 +1061,7 @@ class HacsRepository:
|
||||
ignore_issues: bool = False,
|
||||
force: bool = False,
|
||||
retry=False,
|
||||
skip_releases=False,
|
||||
) -> None:
|
||||
"""Common update data."""
|
||||
releases = []
|
||||
@@ -1085,7 +1099,7 @@ class HacsRepository:
|
||||
if self.data.archived and not ignore_issues:
|
||||
self.validate.errors.append("Repository is archived.")
|
||||
if self.data.full_name not in self.hacs.common.archived_repositories:
|
||||
self.hacs.common.archived_repositories.append(self.data.full_name)
|
||||
self.hacs.common.archived_repositories.add(self.data.full_name)
|
||||
raise HacsRepositoryArchivedException(f"{self} Repository is archived.")
|
||||
|
||||
# Make sure the repository is not in the blacklist.
|
||||
@@ -1096,19 +1110,20 @@ class HacsRepository:
|
||||
raise HacsException(f"{self} Repository has been requested to be removed.")
|
||||
|
||||
# Get releases.
|
||||
try:
|
||||
releases = await self.get_releases(
|
||||
prerelease=self.data.show_beta,
|
||||
returnlimit=self.hacs.configuration.release_limit,
|
||||
)
|
||||
if releases:
|
||||
self.data.releases = True
|
||||
self.releases.objects = releases
|
||||
self.data.published_tags = [x.tag_name for x in self.releases.objects]
|
||||
self.data.last_version = next(iter(self.data.published_tags))
|
||||
if not skip_releases:
|
||||
try:
|
||||
releases = await self.get_releases(
|
||||
prerelease=self.data.show_beta,
|
||||
returnlimit=self.hacs.configuration.release_limit,
|
||||
)
|
||||
if releases:
|
||||
self.data.releases = True
|
||||
self.releases.objects = releases
|
||||
self.data.published_tags = [x.tag_name for x in self.releases.objects]
|
||||
self.data.last_version = next(iter(self.data.published_tags))
|
||||
|
||||
except HacsException:
|
||||
self.data.releases = False
|
||||
except HacsException:
|
||||
self.data.releases = False
|
||||
|
||||
if not self.force_branch:
|
||||
self.ref = self.version_to_download()
|
||||
@@ -1118,6 +1133,9 @@ class HacsRepository:
|
||||
if assets := release.assets:
|
||||
downloads = next(iter(assets)).download_count
|
||||
self.data.downloads = downloads
|
||||
elif self.hacs.system.generator and self.repository_object:
|
||||
await self.repository_object.set_last_commit()
|
||||
self.data.last_commit = self.repository_object.last_commit
|
||||
|
||||
self.hacs.log.debug(
|
||||
"%s Running checks against %s", self.string, self.ref.replace("tags/", "")
|
||||
@@ -1247,7 +1265,9 @@ class HacsRepository:
|
||||
return
|
||||
self.validate.errors.append(f"[{content.name}] was not downloaded.")
|
||||
|
||||
except BaseException as exception: # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||
except (
|
||||
BaseException # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||
) as exception:
|
||||
self.validate.errors.append(f"Download was not completed [{exception}]")
|
||||
|
||||
async def async_remove_entity_device(self) -> None:
|
||||
|
||||
@@ -47,7 +47,7 @@
|
||||
"release_limit": "Number of releases to show.",
|
||||
"debug": "Enable debug.",
|
||||
"appdaemon": "Enable AppDaemon apps discovery & tracking",
|
||||
"netdaemon": "Enable NetDaemon apps discovery & tracking",
|
||||
"netdaemon": "[DEPRECATED] Enable NetDaemon apps discovery & tracking",
|
||||
"sidepanel_icon": "Side panel icon",
|
||||
"sidepanel_title": "Side panel title"
|
||||
}
|
||||
@@ -68,7 +68,7 @@
|
||||
},
|
||||
"removed": {
|
||||
"title": "Repository removed from HACS",
|
||||
"description": "{name} has been removed from HACS for {reason} visit the [HACS Panel](/hacs/repository/{repositry_id}) to remove it."
|
||||
"description": "Because {reason}, '{name}' has been removed from HACS. Please visit the [HACS Panel](/hacs/repository/{repositry_id}) to remove it."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -74,7 +74,9 @@ class Backup:
|
||||
self.local_path,
|
||||
self.backup_path_full,
|
||||
)
|
||||
except BaseException as exception: # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||
except (
|
||||
BaseException # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||
) as exception:
|
||||
self.hacs.log.warning("Could not create backup: %s", exception)
|
||||
|
||||
def restore(self) -> None:
|
||||
|
||||
@@ -207,8 +207,8 @@ class HacsData:
|
||||
self.logger.info("<HacsData restore> Restore started")
|
||||
|
||||
# Hacs
|
||||
self.hacs.common.archived_repositories = []
|
||||
self.hacs.common.ignored_repositories = []
|
||||
self.hacs.common.archived_repositories = set()
|
||||
self.hacs.common.ignored_repositories = set()
|
||||
self.hacs.common.renamed_repositories = {}
|
||||
|
||||
# Clear out doubble renamed values
|
||||
@@ -219,14 +219,14 @@ class HacsData:
|
||||
self.hacs.common.renamed_repositories[entry] = value
|
||||
|
||||
# Clear out doubble archived values
|
||||
for entry in hacs.get("archived_repositories", []):
|
||||
for entry in hacs.get("archived_repositories", set()):
|
||||
if entry not in self.hacs.common.archived_repositories:
|
||||
self.hacs.common.archived_repositories.append(entry)
|
||||
self.hacs.common.archived_repositories.add(entry)
|
||||
|
||||
# Clear out doubble ignored values
|
||||
for entry in hacs.get("ignored_repositories", []):
|
||||
for entry in hacs.get("ignored_repositories", set()):
|
||||
if entry not in self.hacs.common.ignored_repositories:
|
||||
self.hacs.common.ignored_repositories.append(entry)
|
||||
self.hacs.common.ignored_repositories.add(entry)
|
||||
|
||||
try:
|
||||
await self.register_unknown_repositories(repositories)
|
||||
@@ -241,7 +241,9 @@ class HacsData:
|
||||
self.async_restore_repository(entry, repo_data)
|
||||
|
||||
self.logger.info("<HacsData restore> Restore done")
|
||||
except BaseException as exception: # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||
except (
|
||||
BaseException # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||
) as exception:
|
||||
self.logger.critical(
|
||||
"<HacsData restore> [%s] Restore Failed!", exception, exc_info=exception
|
||||
)
|
||||
@@ -282,6 +284,8 @@ class HacsData:
|
||||
repository.data.description = repository_data.get("description", "")
|
||||
repository.data.downloads = repository_data.get("downloads", 0)
|
||||
repository.data.last_updated = repository_data.get("last_updated", 0)
|
||||
if self.hacs.system.generator:
|
||||
repository.data.etag_releases = repository_data.get("etag_releases")
|
||||
repository.data.etag_repository = repository_data.get("etag_repository")
|
||||
repository.data.topics = [
|
||||
topic for topic in repository_data.get("topics", []) if topic not in TOPIC_FILTER
|
||||
|
||||
@@ -17,4 +17,5 @@ def is_safe(hacs: HacsBase, path: str | Path) -> bool:
|
||||
Path(f"{hacs.core.config_path}/{hacs.configuration.python_script_path}").as_posix(),
|
||||
Path(f"{hacs.core.config_path}/{hacs.configuration.theme_path}").as_posix(),
|
||||
Path(f"{hacs.core.config_path}/custom_components/").as_posix(),
|
||||
Path(f"{hacs.core.config_path}/custom_templates/").as_posix(),
|
||||
)
|
||||
|
||||
@@ -17,7 +17,9 @@ class HACSStore(Store):
|
||||
"""Load the data from disk if version matches."""
|
||||
try:
|
||||
data = json_util.load_json(self.path)
|
||||
except BaseException as exception: # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||
except (
|
||||
BaseException # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||
) as exception:
|
||||
_LOGGER.critical(
|
||||
"Could not load '%s', restore it from a backup or delete the file: %s",
|
||||
self.path,
|
||||
|
||||
@@ -31,6 +31,8 @@ def render_template(hacs: HacsBase, content: str, context: HacsRepository) -> st
|
||||
version_available=context.releases.last_release,
|
||||
version_installed=context.display_installed_version,
|
||||
)
|
||||
except BaseException as exception: # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||
except (
|
||||
BaseException # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||
) as exception:
|
||||
context.logger.debug(exception)
|
||||
return content
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user