Compare commits
2 Commits
c948b95622
...
9cc6076da0
| Author | SHA1 | Date | |
|---|---|---|---|
| 9cc6076da0 | |||
| 831f676068 |
@@ -1 +1 @@
|
||||
2023.2.5
|
||||
2023.6.3
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -30,3 +30,5 @@ home-assistant.log.fault
|
||||
ip_bans.yaml
|
||||
secrets.yaml
|
||||
known_devices.yaml
|
||||
*.pyc
|
||||
*.gz
|
||||
|
||||
582
automations.yaml
582
automations.yaml
@@ -125,23 +125,14 @@
|
||||
use_blueprint:
|
||||
path: freakshock88/motion_illuminance_activated_entity.yaml
|
||||
input:
|
||||
motion_sensor: binary_sensor.lumi_lumi_sensor_motion_aq2_occupancy
|
||||
target_entity: light.lamp_wc_beneden_light
|
||||
illuminance_sensor: sensor.lumi_lumi_sensor_motion_aq2_illuminance
|
||||
no_motion_wait: input_number.beweging_toilet_timer
|
||||
target_off_entity: light.lamp_wc_beneden_light
|
||||
illuminance_cutoff: input_number.beweging_toilet_helderheid
|
||||
- id: '1660502406213'
|
||||
alias: Tuin - Lights On At Sunset
|
||||
description: ''
|
||||
use_blueprint:
|
||||
path: CyanAutomation/lights_on_at_sunset.yaml
|
||||
input:
|
||||
target_brightness: 40
|
||||
target_light:
|
||||
area_id: tuin
|
||||
motion_sensor: binary_sensor.lumi_lumi_sensor_motion_aq2_occupancy
|
||||
- id: '1660507765057'
|
||||
alias: woonkamer - alles uit
|
||||
alias: woonkamer - alles uit - sfeer schakelaar
|
||||
description: ''
|
||||
trigger:
|
||||
- platform: device
|
||||
@@ -164,43 +155,25 @@
|
||||
action:
|
||||
- service: scene.turn_on
|
||||
target:
|
||||
entity_id: scene.beneden_alles_uit
|
||||
entity_id: scene.woonkamer_ochtend_scene
|
||||
metadata: {}
|
||||
mode: single
|
||||
- id: '1660508989788'
|
||||
alias: Slaapkamer - bedlamp aan lage helderheid
|
||||
description: ''
|
||||
trigger:
|
||||
- device_id: dc42e9871d6dfc0ce76c594054038cd9
|
||||
domain: zha
|
||||
platform: device
|
||||
type: remote_button_short_press
|
||||
subtype: button_1
|
||||
condition: []
|
||||
action:
|
||||
- choose:
|
||||
- conditions:
|
||||
- condition: device
|
||||
type: is_off
|
||||
device_id: 01b9a993ffea323f6f094e9c231f6d3c
|
||||
entity_id: light.lamp_bed_willem_light
|
||||
domain: light
|
||||
sequence:
|
||||
- type: turn_on
|
||||
device_id: 01b9a993ffea323f6f094e9c231f6d3c
|
||||
entity_id: light.lamp_bed_willem_light
|
||||
domain: light
|
||||
brightness_pct: 10
|
||||
- conditions:
|
||||
- condition: device
|
||||
type: is_on
|
||||
device_id: 01b9a993ffea323f6f094e9c231f6d3c
|
||||
entity_id: light.lamp_bed_willem_light
|
||||
domain: light
|
||||
sequence:
|
||||
- type: turn_off
|
||||
device_id: 01b9a993ffea323f6f094e9c231f6d3c
|
||||
entity_id: light.lamp_bed_willem_light
|
||||
device_id: 6652a530d4f2c349be36ea58904e613f
|
||||
entity_id: switch.display_woonkamer_browsee_screen
|
||||
domain: switch
|
||||
- delay:
|
||||
hours: 0
|
||||
minutes: 0
|
||||
seconds: 20
|
||||
milliseconds: 0
|
||||
- service: scene.turn_on
|
||||
data:
|
||||
transition: 19
|
||||
target:
|
||||
entity_id: scene.tuin_nacht
|
||||
- type: turn_off
|
||||
device_id: 1281186f8b90c2b00053decdf33a6d72
|
||||
entity_id: light.lamp_aanrecht
|
||||
domain: light
|
||||
mode: single
|
||||
- id: '1661107342293'
|
||||
@@ -250,51 +223,33 @@
|
||||
target:
|
||||
entity_id: scene.tuin_uit
|
||||
metadata: {}
|
||||
mode: single
|
||||
- id: '1661711305212'
|
||||
alias: Tuin - waterklep fix
|
||||
description: ''
|
||||
trigger:
|
||||
- type: opened
|
||||
platform: device
|
||||
device_id: 172891d014f4ffcaefd3e0310574ed3a
|
||||
entity_id: binary_sensor.waterklep_contact_contact
|
||||
domain: binary_sensor
|
||||
condition:
|
||||
- condition: device
|
||||
type: is_on
|
||||
device_id: 14791754a4e8dd8e44b075ab2b932296
|
||||
entity_id: switch.waterklep
|
||||
domain: switch
|
||||
for:
|
||||
- delay:
|
||||
hours: 0
|
||||
minutes: 0
|
||||
seconds: 4
|
||||
action:
|
||||
- type: turn_on
|
||||
device_id: 14791754a4e8dd8e44b075ab2b932296
|
||||
entity_id: switch.waterklep
|
||||
domain: switch
|
||||
mode: restart
|
||||
seconds: 30
|
||||
milliseconds: 0
|
||||
- service: light.turn_off
|
||||
data: {}
|
||||
target:
|
||||
entity_id:
|
||||
- light.tuin_verlichting
|
||||
- light.tuin_achtertuin
|
||||
mode: single
|
||||
- id: '1661803600011'
|
||||
alias: Woonkamer - Beweging
|
||||
description: ''
|
||||
trigger:
|
||||
- type: motion
|
||||
platform: device
|
||||
device_id: dba81805bffdc5ab5eb183b0c2d845dc
|
||||
entity_id: binary_sensor.beweging_woonkamer2_iaszone
|
||||
device_id: 4cf96197bf033071d030814729b20dfb
|
||||
entity_id: binary_sensor.ikea_of_sweden_tradfri_motion_sensor_motion
|
||||
domain: binary_sensor
|
||||
for:
|
||||
hours: 0
|
||||
minutes: 0
|
||||
seconds: 3
|
||||
condition:
|
||||
- condition: or
|
||||
conditions:
|
||||
- condition: time
|
||||
after: 06:30:00
|
||||
before: 08:00:00
|
||||
after: 04:30:00
|
||||
before: sensor.sun_next_rising
|
||||
weekday:
|
||||
- sun
|
||||
- mon
|
||||
@@ -303,8 +258,9 @@
|
||||
- thu
|
||||
- sat
|
||||
- fri
|
||||
alias: Before sun rise
|
||||
- condition: time
|
||||
after: '17:30:00'
|
||||
after: sensor.sun_next_setting
|
||||
before: '23:00:00'
|
||||
weekday:
|
||||
- sun
|
||||
@@ -314,6 +270,7 @@
|
||||
- thu
|
||||
- fri
|
||||
- sat
|
||||
alias: After Sunset
|
||||
- condition: device
|
||||
type: is_off
|
||||
device_id: bc61b8f4ddf2fc04d3a0a6001ea7c7c8
|
||||
@@ -323,85 +280,13 @@
|
||||
hours: 0
|
||||
minutes: 10
|
||||
seconds: 0
|
||||
enabled: true
|
||||
action:
|
||||
- service: scene.turn_on
|
||||
target:
|
||||
entity_id: scene.woonkamer_sfeer_verlichting_aan
|
||||
metadata: {}
|
||||
mode: single
|
||||
- id: '1662235717886'
|
||||
alias: Woonkamer - CO2 melding
|
||||
description: ''
|
||||
trigger:
|
||||
- platform: numeric_state
|
||||
entity_id: sensor.woonkamer_co2
|
||||
for:
|
||||
hours: 0
|
||||
minutes: 5
|
||||
seconds: 0
|
||||
attribute: state_class
|
||||
above: '1000'
|
||||
condition: []
|
||||
action:
|
||||
- service: notify.mobile_app_iphone
|
||||
data:
|
||||
message: Co2 in de woonkamer te hoog, nu ventileren
|
||||
title: Let op!
|
||||
- service: notify.mobile_app_iphone_van_ilse
|
||||
data:
|
||||
message: Co2 in de woonkamer te hoog, nu ventileren
|
||||
title: Let op!
|
||||
mode: single
|
||||
- id: '1662613235716'
|
||||
alias: Slaapkamer - toggle rgb lamp
|
||||
description: ''
|
||||
trigger:
|
||||
- device_id: dc42e9871d6dfc0ce76c594054038cd9
|
||||
domain: zha
|
||||
platform: device
|
||||
type: remote_button_short_press
|
||||
subtype: button_2
|
||||
condition: []
|
||||
action:
|
||||
- if:
|
||||
- condition: device
|
||||
type: is_off
|
||||
device_id: 486c39036f87bee0fb2ed8521eb89559
|
||||
entity_id: light.lamp_rgb_slaapkamer
|
||||
domain: light
|
||||
then:
|
||||
- service: scene.turn_on
|
||||
target:
|
||||
entity_id: scene.slaapkamer_dim_wit
|
||||
metadata: {}
|
||||
else:
|
||||
- type: turn_off
|
||||
device_id: 486c39036f87bee0fb2ed8521eb89559
|
||||
entity_id: light.lamp_rgb_slaapkamer
|
||||
domain: light
|
||||
mode: single
|
||||
- id: '1666338442880'
|
||||
alias: Tuin - melding poort
|
||||
description: ''
|
||||
trigger:
|
||||
- type: opened
|
||||
platform: device
|
||||
device_id: 692b4399bddfc992385e65ea0fcf8af6
|
||||
entity_id: binary_sensor.deurcontact_poort_contact
|
||||
domain: binary_sensor
|
||||
for:
|
||||
hours: 0
|
||||
minutes: 0
|
||||
seconds: 3
|
||||
condition: []
|
||||
action:
|
||||
- service: notify.mobile_app_iphone_van_ilse
|
||||
data:
|
||||
message: sensor van de poort getriggerd (poort open)
|
||||
- service: notify.mobile_app_iphone_van_willem
|
||||
data:
|
||||
message: sensor van de poort getriggerd (poort open)
|
||||
mode: single
|
||||
- id: '1666506600186'
|
||||
alias: Slaapkamer - bed lamp feller
|
||||
description: ''
|
||||
@@ -450,7 +335,7 @@
|
||||
brightness_pct: 20
|
||||
- type: turn_on
|
||||
device_id: 4c2d45d53cd5318e416fdc1cc942f65a
|
||||
entity_id: light.lamp_dressoir_light
|
||||
entity_id: light.lamp_dressoir
|
||||
domain: light
|
||||
brightness_pct: 20
|
||||
- conditions:
|
||||
@@ -466,7 +351,7 @@
|
||||
domain: light
|
||||
- type: turn_off
|
||||
device_id: 4c2d45d53cd5318e416fdc1cc942f65a
|
||||
entity_id: light.lamp_dressoir_light
|
||||
entity_id: light.lamp_dressoir
|
||||
domain: light
|
||||
mode: single
|
||||
- id: '1671052282402'
|
||||
@@ -491,23 +376,6 @@
|
||||
domain: light
|
||||
brightness_pct: 100
|
||||
mode: single
|
||||
- id: '1671659606183'
|
||||
alias: Alles uit
|
||||
description: ''
|
||||
trigger:
|
||||
- platform: device
|
||||
domain: mqtt
|
||||
device_id: c976ab1909dcc67895eccdce5708b0dc
|
||||
type: action
|
||||
subtype: brightness_move_down_1
|
||||
discovery_id: 0x60a423fffe28320f action_brightness_move_down_1
|
||||
condition: []
|
||||
action:
|
||||
- service: scene.turn_on
|
||||
data: {}
|
||||
target:
|
||||
entity_id: scene.beneden_alles_uit
|
||||
mode: single
|
||||
- id: '1672424581965'
|
||||
alias: slaapkamer tim - schakelaar (Actions)
|
||||
description: ''
|
||||
@@ -710,38 +578,6 @@
|
||||
device_id: 9f4dd21a83f9473a5350876da52296d6
|
||||
entity_id: light.lamp_bank_light
|
||||
domain: light
|
||||
- id: '1674681637235'
|
||||
alias: Slaapkamer i&w - knoppen bed willem [BP]
|
||||
description: ''
|
||||
use_blueprint:
|
||||
path: andordavoti/zha-aqara-wireless-switch-WXKG02LM.yaml
|
||||
input:
|
||||
aqara_switch: dc42e9871d6dfc0ce76c594054038cd9
|
||||
button_single_press_right:
|
||||
- choose:
|
||||
- conditions:
|
||||
- condition: device
|
||||
type: is_off
|
||||
device_id: 01b9a993ffea323f6f094e9c231f6d3c
|
||||
entity_id: light.lamp_bed_willem_light
|
||||
domain: light
|
||||
sequence:
|
||||
- type: turn_on
|
||||
device_id: 01b9a993ffea323f6f094e9c231f6d3c
|
||||
entity_id: light.lamp_bed_willem_light
|
||||
domain: light
|
||||
brightness_pct: 10
|
||||
- conditions:
|
||||
- condition: device
|
||||
type: is_on
|
||||
device_id: 01b9a993ffea323f6f094e9c231f6d3c
|
||||
entity_id: light.lamp_bed_willem_light
|
||||
domain: light
|
||||
sequence:
|
||||
- type: turn_off
|
||||
device_id: 01b9a993ffea323f6f094e9c231f6d3c
|
||||
entity_id: light.lamp_bed_willem_light
|
||||
domain: light
|
||||
- id: '1675520145446'
|
||||
alias: Zolder - knoppen bureau willem (BP)
|
||||
description: ''
|
||||
@@ -760,15 +596,15 @@
|
||||
target:
|
||||
entity_id: scene.klaar_met_werken_op_zolder
|
||||
on_button_2_short:
|
||||
- device_id: 9186cc61ae5a6d7206c02fc67cfdb878
|
||||
- device_id: 53a4e48d13f3f94e7ca1ffe0557ac135
|
||||
domain: climate
|
||||
entity_id: climate.ac_zolder
|
||||
entity_id: climate.airco_4
|
||||
type: set_hvac_mode
|
||||
hvac_mode: heat_cool
|
||||
off_button_2_short:
|
||||
- device_id: 9186cc61ae5a6d7206c02fc67cfdb878
|
||||
- device_id: 53a4e48d13f3f94e7ca1ffe0557ac135
|
||||
domain: climate
|
||||
entity_id: climate.ac_zolder
|
||||
entity_id: climate.airco_4
|
||||
type: set_hvac_mode
|
||||
hvac_mode: 'off'
|
||||
- id: '1675628615548'
|
||||
@@ -776,23 +612,28 @@
|
||||
description: ''
|
||||
trigger:
|
||||
- platform: time
|
||||
at: 00:01:00
|
||||
at: 03:00:00
|
||||
condition:
|
||||
- condition: or
|
||||
conditions:
|
||||
- condition: template
|
||||
value_template: '{{ (as_timestamp(now()) - as_timestamp(states.sensor.roborock_vacuum_s5e_last_clean_end.last_updated)
|
||||
)/3600 > 8}}'
|
||||
enabled: false
|
||||
- condition: and
|
||||
conditions:
|
||||
- condition: template
|
||||
value_template: " - condition: template\n value_template: >-\n {{
|
||||
(as_timestamp(now()) -\n as_timestamp(states.sensor.roborock_vacuum_s5e_last_clean_end.last_updated)\n
|
||||
\ )/3600 > 1}}"
|
||||
- condition: numeric_state
|
||||
entity_id: sensor.roborock_vacuum_s5e_last_clean_area
|
||||
below: 7
|
||||
enabled: false
|
||||
enabled: false
|
||||
action:
|
||||
- service: vacuum.set_fan_speed
|
||||
data:
|
||||
fan_speed: Low
|
||||
target:
|
||||
device_id: b99930765798a4796ca3ebb87e84299e
|
||||
- device_id: b99930765798a4796ca3ebb87e84299e
|
||||
domain: vacuum
|
||||
entity_id: vacuum.roborock_vacuum_s5e
|
||||
@@ -807,6 +648,24 @@
|
||||
offset: 0
|
||||
condition: []
|
||||
action:
|
||||
- service: scene.turn_on
|
||||
target:
|
||||
entity_id: scene.tuin_avond
|
||||
metadata: {}
|
||||
- delay:
|
||||
hours: 0
|
||||
minutes: 0
|
||||
seconds: 30
|
||||
milliseconds: 0
|
||||
- service: scene.turn_on
|
||||
target:
|
||||
entity_id: scene.tuin_avond
|
||||
metadata: {}
|
||||
- delay:
|
||||
hours: 0
|
||||
minutes: 0
|
||||
seconds: 30
|
||||
milliseconds: 0
|
||||
- service: scene.turn_on
|
||||
target:
|
||||
entity_id: scene.tuin_avond
|
||||
@@ -832,7 +691,7 @@
|
||||
minutes: 1
|
||||
seconds: 0
|
||||
- condition: time
|
||||
after: '22:00:00'
|
||||
after: '20:30:00'
|
||||
before: 05:00:00
|
||||
- type: is_no_motion
|
||||
condition: device
|
||||
@@ -868,3 +727,302 @@
|
||||
- service: notify.mobile_app_iphone_van_willem
|
||||
data:
|
||||
message: sensor batteries are low {{sensors}}
|
||||
- id: '1677526576561'
|
||||
alias: slaapkamer i&w - knoppen bed willem (knop 2)
|
||||
description: ''
|
||||
trigger:
|
||||
- device_id: dc42e9871d6dfc0ce76c594054038cd9
|
||||
domain: zha
|
||||
platform: device
|
||||
type: remote_button_short_press
|
||||
subtype: button_2
|
||||
id: remote_button2_short_press
|
||||
- device_id: dc42e9871d6dfc0ce76c594054038cd9
|
||||
domain: zha
|
||||
platform: device
|
||||
type: remote_button_double_press
|
||||
subtype: button_2
|
||||
id: remote_button2_double_press
|
||||
- device_id: dc42e9871d6dfc0ce76c594054038cd9
|
||||
domain: zha
|
||||
platform: device
|
||||
type: remote_button_long_press
|
||||
subtype: button_2
|
||||
id: remote_button2_long_press
|
||||
- device_id: dc42e9871d6dfc0ce76c594054038cd9
|
||||
domain: zha
|
||||
platform: device
|
||||
type: remote_button_short_press
|
||||
subtype: button_1
|
||||
id: button_1_short
|
||||
condition: []
|
||||
action:
|
||||
- if:
|
||||
- condition: device
|
||||
type: is_off
|
||||
device_id: 01b9a993ffea323f6f094e9c231f6d3c
|
||||
entity_id: light.lamp_bed_willem_light
|
||||
domain: light
|
||||
then:
|
||||
- choose:
|
||||
- conditions:
|
||||
- condition: trigger
|
||||
id: remote_button2_short_press
|
||||
sequence:
|
||||
- type: turn_on
|
||||
device_id: 01b9a993ffea323f6f094e9c231f6d3c
|
||||
entity_id: light.lamp_bed_willem_light
|
||||
domain: light
|
||||
brightness_pct: 5
|
||||
- conditions:
|
||||
- condition: trigger
|
||||
id: remote_button2_double_press
|
||||
sequence:
|
||||
- type: turn_on
|
||||
device_id: 01b9a993ffea323f6f094e9c231f6d3c
|
||||
entity_id: light.lamp_bed_willem_light
|
||||
domain: light
|
||||
brightness_pct: 100
|
||||
- conditions:
|
||||
- condition: trigger
|
||||
id: remote_button2_long_press
|
||||
sequence:
|
||||
- service: light.turn_on
|
||||
target:
|
||||
entity_id: light.lamp_bed_willem_light
|
||||
data:
|
||||
brightness: '{% if state_attr("light.lamp_bed_willem_light","brightness")
|
||||
is none %} 8 {% elif state_attr("light.lamp_bed_willem_light","brightness")|int
|
||||
< 9 %} 32 {% elif state_attr("light.lamp_bed_willem_light","brightness")|int
|
||||
< 33 %} 128 {% elif state_attr("light.lamp_bed_willem_light","brightness")|int
|
||||
< 129 %} 160 {% elif state_attr("light.lamp_bed_willem_light","brightness")|int
|
||||
< 161 %} 192 {% elif state_attr("light.lamp_bed_willem_light","brightness")|int
|
||||
< 193 %} 224 {% elif state_attr("light.lamp_bed_willem_light","brightness")|int
|
||||
< 225 %} 255 {% else %} 10 {% endif %}
|
||||
|
||||
'
|
||||
else:
|
||||
- type: turn_off
|
||||
device_id: 01b9a993ffea323f6f094e9c231f6d3c
|
||||
entity_id: light.lamp_bed_willem_light
|
||||
domain: light
|
||||
mode: single
|
||||
- id: '1678901438847'
|
||||
alias: Knop kamer Luuk, verwarming uit
|
||||
description: ''
|
||||
trigger:
|
||||
- device_id: 17ebe217929cb75025e2e52c47113267
|
||||
domain: zha
|
||||
platform: device
|
||||
type: remote_button_short_press
|
||||
subtype: remote_button_short_press
|
||||
condition: []
|
||||
action:
|
||||
- service: climate.set_temperature
|
||||
data:
|
||||
temperature: 16
|
||||
target:
|
||||
entity_id: climate.kamer_luuk
|
||||
mode: single
|
||||
- id: '1678902018862'
|
||||
alias: knop luuk dubbel klik - verwarming aan
|
||||
description: ''
|
||||
trigger:
|
||||
- device_id: 17ebe217929cb75025e2e52c47113267
|
||||
domain: zha
|
||||
platform: device
|
||||
type: remote_button_double_press
|
||||
subtype: remote_button_double_press
|
||||
condition: []
|
||||
action:
|
||||
- service: climate.set_preset_mode
|
||||
data:
|
||||
preset_mode: None
|
||||
target:
|
||||
entity_id: climate.kamer_luuk
|
||||
- service: climate.set_temperature
|
||||
data:
|
||||
temperature: 18.5
|
||||
target:
|
||||
entity_id: climate.kamer_luuk
|
||||
mode: single
|
||||
- id: '1679258969777'
|
||||
alias: badkamer - thermostaat automatisch uit
|
||||
description: ''
|
||||
trigger:
|
||||
- platform: numeric_state
|
||||
entity_id: climate.badkamer
|
||||
for:
|
||||
hours: 2
|
||||
minutes: 0
|
||||
seconds: 0
|
||||
above: 17.5
|
||||
attribute: temperature
|
||||
condition:
|
||||
- type: is_not_occupied
|
||||
condition: device
|
||||
device_id: dd0fea1459ae5d17823e14348b73cb24
|
||||
entity_id: binary_sensor.lumi_lumi_sensor_motion_aq2_occupancy_2
|
||||
domain: binary_sensor
|
||||
for:
|
||||
hours: 0
|
||||
minutes: 20
|
||||
seconds: 0
|
||||
action:
|
||||
- service: climate.set_temperature
|
||||
data:
|
||||
temperature: 16
|
||||
target:
|
||||
entity_id: climate.badkamer
|
||||
- service: climate.set_temperature
|
||||
data:
|
||||
temperature: 15
|
||||
target:
|
||||
entity_id: climate.vloerverwarming_badkamer
|
||||
- service: notify.mobile_app_iphone_van_willem
|
||||
data:
|
||||
message: Badkamer verwarming uit
|
||||
mode: single
|
||||
- id: '1679849595183'
|
||||
alias: woonkamer - alles uit als het licht is
|
||||
description: ''
|
||||
trigger:
|
||||
- platform: time
|
||||
at: sensor.sun_next_rising
|
||||
condition: []
|
||||
action:
|
||||
- service: scene.turn_on
|
||||
target:
|
||||
entity_id: scene.beneden_alles_uit
|
||||
metadata: {}
|
||||
mode: single
|
||||
- id: '1683562357537'
|
||||
alias: awtrix_weather_app
|
||||
description: ''
|
||||
use_blueprint:
|
||||
path: smarthomejunkie/awtrix_weather_app.yaml
|
||||
input:
|
||||
awtrix_display: 658104c6c966f75ddf9c747816e4a8f4
|
||||
my_sensor: weather.forecast_home
|
||||
switch_to_app: false
|
||||
toggle_helper: input_boolean.awtrix_weather_app_toggle
|
||||
show_rainbow: false
|
||||
duration: '10'
|
||||
push_icon: '1'
|
||||
show_windspeed: false
|
||||
- id: '1683562545849'
|
||||
alias: awtrix_rain_app
|
||||
description: ''
|
||||
use_blueprint:
|
||||
path: smarthomejunkie/awtrix_rain_forecast.yaml
|
||||
input:
|
||||
awtrix_display: 658104c6c966f75ddf9c747816e4a8f4
|
||||
toggle_helper: input_boolean.awtrix_rain_app_toggle
|
||||
my_sensor: weather.forecast_home
|
||||
graph_type: line
|
||||
switch_to_app: true
|
||||
- id: '1683566553338'
|
||||
alias: awtrix_sensor_power_app
|
||||
description: ''
|
||||
use_blueprint:
|
||||
path: smarthomejunkie/awtrix_create_sensor_app.yaml
|
||||
input:
|
||||
awtrix_display: 658104c6c966f75ddf9c747816e4a8f4
|
||||
toggle_helper: input_boolean.awtrix_power_sensor_toggle
|
||||
my_sensor: sensor.dsmr_reading_electricity_currently_delivered
|
||||
my_icon: '21256'
|
||||
show_rainbow: false
|
||||
- id: '1683882275516'
|
||||
alias: awtrix_sensor_pm2.5
|
||||
description: ''
|
||||
use_blueprint:
|
||||
path: smarthomejunkie/awtrix_create_sensor_app.yaml
|
||||
input:
|
||||
awtrix_display: 658104c6c966f75ddf9c747816e4a8f4
|
||||
toggle_helper: input_boolean.awtrix_power_sensor_toggle
|
||||
my_icon: '2718'
|
||||
push_icon: '1'
|
||||
my_sensor: sensor.particulate_matter_2_5um_concentration
|
||||
- id: '1683882676954'
|
||||
alias: awtrix_stock_batt_toggle
|
||||
description: ''
|
||||
use_blueprint:
|
||||
path: smarthomejunkie/awtrix_toggle_stock_app.yaml
|
||||
input:
|
||||
stock_app: bat
|
||||
awtrix_display: 658104c6c966f75ddf9c747816e4a8f4
|
||||
toggle_helper: input_boolean.awtrix_stock_batt_toggle
|
||||
- id: '1684882366818'
|
||||
alias: zolder - airco automatisch uit
|
||||
description: ''
|
||||
trigger:
|
||||
- type: not_occupied
|
||||
platform: device
|
||||
device_id: 029153653b8e5f423c3350efedb3b0d3
|
||||
entity_id: binary_sensor.aqs_zolder_still_target
|
||||
domain: binary_sensor
|
||||
for:
|
||||
hours: 0
|
||||
minutes: 20
|
||||
seconds: 0
|
||||
condition:
|
||||
- condition: device
|
||||
type: is_on
|
||||
device_id: 6f9cbe311638680986f710f63e99e576
|
||||
entity_id: switch.contact_airco_flap_zolder_switch
|
||||
domain: switch
|
||||
enabled: true
|
||||
action:
|
||||
- device_id: 53a4e48d13f3f94e7ca1ffe0557ac135
|
||||
domain: climate
|
||||
entity_id: climate.airco_4
|
||||
type: set_hvac_mode
|
||||
hvac_mode: 'off'
|
||||
mode: single
|
||||
- id: '1685739271048'
|
||||
alias: Woonkamer - display aan
|
||||
description: ''
|
||||
trigger:
|
||||
- platform: time
|
||||
at: sensor.sun_next_rising
|
||||
condition: []
|
||||
action:
|
||||
- type: turn_on
|
||||
device_id: 6652a530d4f2c349be36ea58904e613f
|
||||
entity_id: switch.display_woonkamer_browsee_screen
|
||||
domain: switch
|
||||
mode: single
|
||||
- id: '1686156742969'
|
||||
alias: woonkamer - beweging - leds - aan
|
||||
description: ''
|
||||
trigger:
|
||||
- type: occupied
|
||||
platform: device
|
||||
device_id: 5bb607cec18e50a97f334c94836fa9f0
|
||||
entity_id: binary_sensor.aqs_woonkamer2_aqs_woonkamer2_still_target
|
||||
domain: binary_sensor
|
||||
condition: []
|
||||
action:
|
||||
- type: turn_on
|
||||
device_id: 5bb607cec18e50a97f334c94836fa9f0
|
||||
entity_id: light.aqs_woonkamer2_aqs_woonkamer2_rgb_light
|
||||
domain: light
|
||||
brightness_pct: 50
|
||||
mode: single
|
||||
- id: '1686848917898'
|
||||
alias: Tuin - automatisch water op timer
|
||||
description: ''
|
||||
trigger:
|
||||
- platform: device
|
||||
type: turned_on
|
||||
device_id: e1fd7471fc9fcde9a19ec7175a486dd9
|
||||
entity_id: switch.valve
|
||||
domain: switch
|
||||
condition: []
|
||||
action:
|
||||
- service: notify.mobile_app_iphone_van_willem
|
||||
data:
|
||||
message: Water timer gestart
|
||||
- service: script.tuin_water_op_timer
|
||||
data: {}
|
||||
mode: single
|
||||
|
||||
@@ -1,55 +0,0 @@
|
||||
blueprint:
|
||||
name: Lights On At Sunset
|
||||
description: Turn on the following lights at sunset
|
||||
domain: automation
|
||||
input:
|
||||
target_light:
|
||||
name: Lights
|
||||
description: This is the light (or lights) that will be activated at sunset
|
||||
selector:
|
||||
target:
|
||||
entity:
|
||||
domain: light
|
||||
target_brightness:
|
||||
name: Brightness
|
||||
description: Brightness of the light(s) when they're activated
|
||||
default: 50
|
||||
selector:
|
||||
number:
|
||||
min: 5.0
|
||||
max: 100.0
|
||||
mode: slider
|
||||
step: 5.0
|
||||
unit_of_measurement: '%'
|
||||
elevation_shift:
|
||||
name: Elevation Shift
|
||||
description: Using an elevation offset (height of sun relative to the horizon)
|
||||
to shift the sunset trigger, either earlier or later. Positive values bring
|
||||
the automation start time forward, whilst negative values delay the start
|
||||
time. To approximate Golden Hour - set the Elevation Offset to 1.
|
||||
default: 0.0
|
||||
selector:
|
||||
number:
|
||||
min: -3.0
|
||||
max: 3.0
|
||||
mode: slider
|
||||
step: 1.0
|
||||
source_url: https://gist.github.com/CyanAutomation/1b8bafd033f73e3c24e42e8f381ff906
|
||||
mode: single
|
||||
variables:
|
||||
target_brightness: !input target_brightness
|
||||
target_light: !input target_light
|
||||
trigger:
|
||||
platform: numeric_state
|
||||
entity_id: sun.sun
|
||||
attribute: elevation
|
||||
below: !input elevation_shift
|
||||
condition:
|
||||
condition: sun
|
||||
after: sunrise
|
||||
after_offset: 01:00:00
|
||||
action:
|
||||
- service: light.turn_on
|
||||
target: !input target_light
|
||||
data_template:
|
||||
brightness_pct: '{{ target_brightness | int }}'
|
||||
@@ -1,183 +0,0 @@
|
||||
blueprint:
|
||||
name: Turn on light, switch, scene, script or group based on motion and illuminance.
|
||||
description: "Turn on a light, switch, scene, script or group based on motion detection,\
|
||||
\ and low light level.\nThis blueprint uses helper entities you have to create\
|
||||
\ yourself for some input values, to be able to dynamically set limits. For instructions\
|
||||
\ on creating the helper entities take a look in the Home Assistant Community\
|
||||
\ forum topic: https://community.home-assistant.io/t/turn-on-light-switch-scene-or-script-based-on-motion-and-illuminance-more-conditions/257085\n\
|
||||
\nRequired entities:\n - Motion sensor (single sensor or group)\n - Target entity\
|
||||
\ (light, switch, scene or script)\n\n\nOptional features:\n- You can set a cutoff\
|
||||
\ entity of which the value determines whether the illuminance level is low and\
|
||||
\ the automation needs to trigger.\n- You can define a blocking entity, which\
|
||||
\ blocks the automation from running when this entity's state is on.\n- You van\
|
||||
\ define a turn-off blocking entity, which blocks the entity from turning off\
|
||||
\ after the set delay.\n- Time limits can also be defined to limit the time before\
|
||||
\ and after the automation should trigger.\n- If you want the entity to turn off\
|
||||
\ after a certain amount of minutes, you can use the Wait Time input.\n- If you\
|
||||
\ want another entity than the target_entity to turn off after the delay, you\
|
||||
\ can define a separate Turn-off entity.\n- If you do not enable the optional\
|
||||
\ entities the automation will skip these conditions.\n\n\nOptional entities:\n\
|
||||
- Illuminance sensor (sensor in illuminance class)\n- Illuminance cutoff value\
|
||||
\ (input_number)\n- Blocking entity (any entity with state on/off)\n- Time limit\
|
||||
\ before (input_datetime)\n- Time limit after (input_datetime)\n- Turn off wait\
|
||||
\ time (input_number defining amount in minutes)\n- Turn off entity (any entity_id\
|
||||
\ that needs to be turned off after wait)\n"
|
||||
domain: automation
|
||||
input:
|
||||
motion_sensor:
|
||||
name: Motion Sensor
|
||||
description: This sensor will trigger the turning on of the target entity.
|
||||
selector:
|
||||
entity: {}
|
||||
target_entity:
|
||||
name: Target entity.
|
||||
description: The light, switch, scene to turn on (or script to run) when the
|
||||
automation is triggered.
|
||||
selector:
|
||||
entity: {}
|
||||
illuminance_sensor:
|
||||
name: (OPTIONAL) Illuminance sensor
|
||||
description: This sensor will be used to determine the illumination.
|
||||
default:
|
||||
selector:
|
||||
entity:
|
||||
domain: sensor
|
||||
device_class: illuminance
|
||||
multiple: false
|
||||
illuminance_cutoff:
|
||||
name: (OPTIONAL) Illuminance cutoff value
|
||||
description: This input_number will be used to compare to the current illumination
|
||||
to determine if it is low.
|
||||
default:
|
||||
selector:
|
||||
entity:
|
||||
domain: input_number
|
||||
multiple: false
|
||||
blocker_entity:
|
||||
name: (OPTIONAL) Blocking entity
|
||||
description: If this entity's state is on, it will prevent the automation from
|
||||
running. E.g. sleepmode or away mode.
|
||||
default:
|
||||
selector:
|
||||
entity: {}
|
||||
time_limit_after:
|
||||
name: (OPTIONAL) Only run after time.
|
||||
description: Automation will only run when time is later than this input_datetime
|
||||
value.
|
||||
default:
|
||||
selector:
|
||||
entity:
|
||||
domain: input_datetime
|
||||
multiple: false
|
||||
time_limit_before:
|
||||
name: (OPTIONAL) Only run before time.
|
||||
description: Automation will only run when time is earlier than this input_datetime
|
||||
value.
|
||||
default:
|
||||
selector:
|
||||
entity:
|
||||
domain: input_datetime
|
||||
multiple: false
|
||||
no_motion_wait:
|
||||
name: (OPTIONAL) Turn off wait time (minutes)
|
||||
description: Time in minutes to leave the target entity on after last motion
|
||||
is detected. If not used entity will not auto turn off.
|
||||
default:
|
||||
selector:
|
||||
entity:
|
||||
domain: input_number
|
||||
multiple: false
|
||||
turn_off_blocker_entity:
|
||||
name: (OPTIONAL) Turn-off Blocking entity
|
||||
description: If this entity's state is on, it will prevent the target entity
|
||||
from turning off after the set delay.
|
||||
default:
|
||||
selector:
|
||||
entity: {}
|
||||
target_off_entity:
|
||||
name: (OPTIONAL) Turn-off entity
|
||||
description: If defined, this entity will be turned off instead of the default
|
||||
target entity. This can be helpful when using target entities of type scene
|
||||
or script.
|
||||
default:
|
||||
selector:
|
||||
entity: {}
|
||||
source_url: https://gist.github.com/freakshock88/2311759ba64f929f6affad4c0a67110b
|
||||
mode: restart
|
||||
max_exceeded: silent
|
||||
variables:
|
||||
target_entity: !input 'target_entity'
|
||||
illuminance_currently: !input 'illuminance_sensor'
|
||||
illuminance_cutoff: !input 'illuminance_cutoff'
|
||||
blocker_entity: !input 'blocker_entity'
|
||||
time_limit_before: !input 'time_limit_before'
|
||||
time_limit_after: !input 'time_limit_after'
|
||||
no_motion_wait: !input 'no_motion_wait'
|
||||
entity_domain: '{{ states[target_entity].domain }}'
|
||||
turn_off_blocker_entity: !input 'turn_off_blocker_entity'
|
||||
target_off_entity: !input 'target_off_entity'
|
||||
trigger:
|
||||
platform: state
|
||||
entity_id: !input 'motion_sensor'
|
||||
to: 'on'
|
||||
condition:
|
||||
- condition: template
|
||||
value_template: '{% set illuminance_defined = illuminance_currently != none and
|
||||
illuminance_cutoff != none %} {% set illuminance_defined_and_low = (illuminance_defined
|
||||
and (states(illuminance_currently) | int(0) < states(illuminance_cutoff) | int(0))) %}
|
||||
|
||||
{% set target_entity_domain_supports_on_state_check = entity_domain != ''scene''
|
||||
and entity_domain != ''script'' %} {{ ( target_entity_domain_supports_on_state_check
|
||||
and states(target_entity) == ''on'') or ( target_entity_domain_supports_on_state_check
|
||||
and states(target_entity) == ''off'' and not illuminance_defined) or ( target_entity_domain_supports_on_state_check
|
||||
and states(target_entity) == ''off'' and illuminance_defined_and_low) or ( not
|
||||
target_entity_domain_supports_on_state_check and illuminance_defined_and_low)
|
||||
or ( not target_entity_domain_supports_on_state_check and not illuminance_defined)
|
||||
}}
|
||||
|
||||
'
|
||||
- condition: template
|
||||
value_template: '{{ (blocker_entity == none) or (states(blocker_entity) == ''off'')
|
||||
}}'
|
||||
- condition: template
|
||||
value_template: "{% set current_time = now().strftime(\"%H:%M\") %}\n{% if time_limit_before\
|
||||
\ == none and time_limit_after == none %} true {% endif %}\n{% if time_limit_before\
|
||||
\ != none and time_limit_after == none %} {% set current_time_is_before_limit\
|
||||
\ = current_time < states(time_limit_before) %} {{ current_time_is_before_limit\
|
||||
\ }} {% elif time_limit_before == none and time_limit_after != none %} {% set\
|
||||
\ current_time_is_after_limit = current_time > states(time_limit_after) %} {{\
|
||||
\ current_time_is_after_limit }} {% endif %}\n{% if time_limit_before != none\
|
||||
\ and time_limit_after != none %} {% set before_limit_is_tomorrow = states(time_limit_before)\
|
||||
\ < states(time_limit_after) %} {% set current_time_is_before_limit = current_time\
|
||||
\ < states(time_limit_before) %} {% set current_time_is_after_limit = current_time\
|
||||
\ > states(time_limit_after) %} {% set time_window_spans_midnight = states(time_limit_after)\
|
||||
\ > states(time_limit_before) %}\n {% if time_window_spans_midnight != none\
|
||||
\ and time_window_spans_midnight and before_limit_is_tomorrow %}\n {{ current_time_is_after_limit\
|
||||
\ or current_time_is_before_limit }}\n {% elif time_window_spans_midnight !=\
|
||||
\ none and not time_window_spans_midnight %}\n {{ current_time_is_before_limit\
|
||||
\ and current_time_is_after_limit }}\n {% endif %}\n{% endif %}\n"
|
||||
action:
|
||||
- service: homeassistant.turn_on
|
||||
entity_id: !input 'target_entity'
|
||||
- condition: template
|
||||
value_template: '{{ no_motion_wait != none }}'
|
||||
- wait_for_trigger:
|
||||
platform: state
|
||||
entity_id: !input 'motion_sensor'
|
||||
from: 'on'
|
||||
to: 'off'
|
||||
- delay:
|
||||
minutes: '{{ states(no_motion_wait) | int(0) }}'
|
||||
- condition: template
|
||||
value_template: '{{ (turn_off_blocker_entity == none) or (states(turn_off_blocker_entity)
|
||||
== ''off'') }}'
|
||||
- choose:
|
||||
- conditions:
|
||||
- condition: template
|
||||
value_template: '{{ (target_off_entity != none) }}'
|
||||
sequence:
|
||||
- service: homeassistant.turn_off
|
||||
entity_id: !input 'target_off_entity'
|
||||
default:
|
||||
- service: homeassistant.turn_off
|
||||
entity_id: !input 'target_entity'
|
||||
@@ -1,54 +0,0 @@
|
||||
blueprint:
|
||||
name: Motion-activated Light
|
||||
description: Turn on a light when motion is detected.
|
||||
domain: automation
|
||||
source_url: https://github.com/home-assistant/core/blob/dev/homeassistant/components/automation/blueprints/motion_light.yaml
|
||||
input:
|
||||
motion_entity:
|
||||
name: Motion Sensor
|
||||
selector:
|
||||
entity:
|
||||
domain: binary_sensor
|
||||
device_class: motion
|
||||
light_target:
|
||||
name: Light
|
||||
selector:
|
||||
target:
|
||||
entity:
|
||||
domain: light
|
||||
no_motion_wait:
|
||||
name: Wait time
|
||||
description: Time to leave the light on after last motion is detected.
|
||||
default: 120
|
||||
selector:
|
||||
number:
|
||||
min: 0
|
||||
max: 3600
|
||||
unit_of_measurement: seconds
|
||||
|
||||
# If motion is detected within the delay,
|
||||
# we restart the script.
|
||||
mode: restart
|
||||
max_exceeded: silent
|
||||
|
||||
trigger:
|
||||
platform: state
|
||||
entity_id: !input motion_entity
|
||||
from: "off"
|
||||
to: "on"
|
||||
|
||||
action:
|
||||
- alias: "Turn on the light"
|
||||
service: light.turn_on
|
||||
target: !input light_target
|
||||
- alias: "Wait until there is no motion from device"
|
||||
wait_for_trigger:
|
||||
platform: state
|
||||
entity_id: !input motion_entity
|
||||
from: "on"
|
||||
to: "off"
|
||||
- alias: "Wait the number of seconds that has been set"
|
||||
delay: !input no_motion_wait
|
||||
- alias: "Turn off the light"
|
||||
service: light.turn_off
|
||||
target: !input light_target
|
||||
@@ -1,46 +0,0 @@
|
||||
blueprint:
|
||||
name: Zone Notification
|
||||
description: Send a notification to a device when a person leaves a specific zone.
|
||||
domain: automation
|
||||
source_url: https://github.com/home-assistant/core/blob/dev/homeassistant/components/automation/blueprints/notify_leaving_zone.yaml
|
||||
input:
|
||||
person_entity:
|
||||
name: Person
|
||||
selector:
|
||||
entity:
|
||||
domain: person
|
||||
zone_entity:
|
||||
name: Zone
|
||||
selector:
|
||||
entity:
|
||||
domain: zone
|
||||
notify_device:
|
||||
name: Device to notify
|
||||
description: Device needs to run the official Home Assistant app to receive notifications.
|
||||
selector:
|
||||
device:
|
||||
integration: mobile_app
|
||||
|
||||
trigger:
|
||||
platform: state
|
||||
entity_id: !input person_entity
|
||||
|
||||
variables:
|
||||
zone_entity: !input zone_entity
|
||||
# This is the state of the person when it's in this zone.
|
||||
zone_state: "{{ states[zone_entity].name }}"
|
||||
person_entity: !input person_entity
|
||||
person_name: "{{ states[person_entity].name }}"
|
||||
|
||||
condition:
|
||||
condition: template
|
||||
# The first case handles leaving the Home zone which has a special state when zoning called 'home'.
|
||||
# The second case handles leaving all other zones.
|
||||
value_template: "{{ zone_entity == 'zone.home' and trigger.from_state.state == 'home' and trigger.to_state.state != 'home' or trigger.from_state.state == zone_state and trigger.to_state.state != zone_state }}"
|
||||
|
||||
action:
|
||||
- alias: "Notify that a person has left the zone"
|
||||
domain: mobile_app
|
||||
type: notify
|
||||
device_id: !input notify_device
|
||||
message: "{{ person_name }} has left {{ zone_state }}"
|
||||
@@ -1,671 +0,0 @@
|
||||
blueprint:
|
||||
name: Monitor the state of an appliance - by leofabri
|
||||
description: "`- Version: 2.1.1 -`\n\nThis automation can detect and monitor the
|
||||
state of your appliances by observing their power consumption.\nThis automation
|
||||
blueprint is universal and very versatile. You can use it with anything that consumes
|
||||
energy: a washing machine, a dishwasher, your fridge, the TV, etc. I refer to
|
||||
the appliance's operations with the generic word job. A job could be anything
|
||||
(washing, rinsing...).\n\nYou can pair this project with other automations and
|
||||
services. I designed it with flexibility in mind. For instance, if you want to
|
||||
\ send alerts when the washing machine is not resuming a job, you want to send
|
||||
TTS notifications, or if your fridge is somehow not working and de-icing you
|
||||
can see that happening. All you needed is just a little bit of creativity. You
|
||||
can use the state machine and the custom actions to extend it.\n\nThe state machine:\n*
|
||||
**<ins>unplugged</ins>** - The appliance is no longer powered. It happens when
|
||||
the user manually turns off the smart socket (from HA or the socket itself).\n*
|
||||
**<ins>idle</ins>** - There is no pending job, the machine is powered but idling.\n*
|
||||
**paused** - Indicates that a job is pending (incomplete cycle), but the appliance
|
||||
is not performing it. The inhibitors of these state are the ***detached_overload***
|
||||
and ***unplugged*** states. In this condition the power consumption is lower than
|
||||
the finishing power threshold. The appliance must be off (maybe the user turned
|
||||
it off manually, or maybe the job needs some time to recover). The blueprint is
|
||||
waiting for the appliance to resume. **Pro Tip!** You could also use this to diagnose
|
||||
and warn if a job is not resumed after x minutes.\n* **<ins>detached_overload</ins>**
|
||||
- This happens when, during a cycle, the appliance used too much power and was
|
||||
suspended. It is also technically unplugged but we don't say that.\n* **<ins>job_ongoing</ins>**
|
||||
- Triggered in two cases:\n * when a new job cycle begins: the previous one is
|
||||
completed, and the Starting Power threshold is surpassed.\n * when a job is resumed.\n\n*
|
||||
**<ins>job_completed</ins>** - Triggered when the current incomplete job cycle
|
||||
is finished. The appliance consumes less than the Finishing Power threshold (with
|
||||
the possibility of selecting for how long) \n\n<strong>First setup?</strong> <i>[Follow
|
||||
the instructions](https://github.com/leofabri/hassio_appliance-status-monitor)</i>"
|
||||
domain: automation
|
||||
input:
|
||||
appliance_socket:
|
||||
name: Appliance Smart Socket
|
||||
description: '(*REQUIRED)
|
||||
|
||||
|
||||
The socket that is used to control this appliance.'
|
||||
default: []
|
||||
selector:
|
||||
entity:
|
||||
domain: switch
|
||||
multiple: false
|
||||
appliance_power_sensor:
|
||||
name: Appliance Power Consumption
|
||||
description: '(*REQUIRED)
|
||||
|
||||
|
||||
The power entity with the current power absorption in Watts.'
|
||||
default: []
|
||||
selector:
|
||||
entity:
|
||||
domain: sensor
|
||||
multiple: false
|
||||
appliance_starting_power_threshold:
|
||||
name: Starting power threshold
|
||||
description: '(*REQUIRED)
|
||||
|
||||
|
||||
Power threshold above which we assume the appliance has started a new job
|
||||
or is resuming the current one (job_ongoing state).'
|
||||
default: 5
|
||||
selector:
|
||||
number:
|
||||
min: 1.0
|
||||
max: 100.0
|
||||
unit_of_measurement: W
|
||||
mode: slider
|
||||
step: 1.0
|
||||
appliance_finishing_power_threshold:
|
||||
name: Finishing power threshold
|
||||
description: '(*REQUIRED)
|
||||
|
||||
|
||||
Power threshold below which we assume the appliance has finished a job (job_completed
|
||||
state).'
|
||||
default: 3
|
||||
selector:
|
||||
number:
|
||||
min: 1.0
|
||||
max: 100.0
|
||||
unit_of_measurement: W
|
||||
mode: slider
|
||||
step: 1.0
|
||||
appliance_suspended_sensor:
|
||||
name: Appliance Suspended entity
|
||||
description: '(OPTIONAL)
|
||||
|
||||
|
||||
An input_number variable that turns into a value > 0 when an overload occurs.
|
||||
That would indicate that the machine was disconnected.'
|
||||
default: []
|
||||
selector:
|
||||
entity:
|
||||
domain: input_number
|
||||
multiple: false
|
||||
appliance_state_machine:
|
||||
name: Appliance State Machine
|
||||
description: '(*REQUIRED | Helper | Name: <i><strong><your_appliance_name>_state_machine</strong></i>
|
||||
| [?](https://github.com/leofabri/hassio_appliance-status-monitor/blob/main/home%20assistant/packages/your_appliance_name.yaml#L18))
|
||||
|
||||
|
||||
The State Machine entity of this appliance.'
|
||||
default: []
|
||||
selector:
|
||||
entity:
|
||||
domain: input_select
|
||||
multiple: false
|
||||
appliance_job_cycle:
|
||||
name: Appliance Job Cycle
|
||||
description: '(*REQUIRED | Helper | Name: <i><strong><your_appliance_name>_job_cycle</strong></i>
|
||||
| [?](https://github.com/leofabri/hassio_appliance-status-monitor/blob/main/home%20assistant/packages/your_appliance_name.yaml#L9))
|
||||
|
||||
|
||||
A sensor that stores whether the appliance is still in a job cycle or not.<br>
|
||||
|
||||
This has to be a boolean (so: 0 or 1).<br> <strong>off</strong> -> the appliance
|
||||
is not performing any job<br> <strong>on</strong> -> the job is incomplete.
|
||||
<br>
|
||||
|
||||
<strong>Note that this entity does not provide any information about the detailed
|
||||
status of the machine (like an overload stuation). For that, you need the
|
||||
state machine.</strong> <br>'
|
||||
default: []
|
||||
selector:
|
||||
entity:
|
||||
domain: input_boolean
|
||||
multiple: false
|
||||
delayed_job_completion_timer:
|
||||
name: Delayed Job Completion timer
|
||||
description: '(*REQUIRED | Helper | Name: <i><strong><your_appliance_name>_delayed_job_completion_timer</i></strong>
|
||||
| [?](https://github.com/leofabri/hassio_appliance-status-monitor/blob/main/home%20assistant/packages/your_appliance_name.yaml#L2))
|
||||
|
||||
|
||||
The timer that will allow to ''wait'' & ''see'' before assuming that a job
|
||||
has been completed'
|
||||
default: []
|
||||
selector:
|
||||
entity:
|
||||
domain: timer
|
||||
multiple: false
|
||||
automation_self_trigger:
|
||||
name: Automation Self-triggering entity
|
||||
description: '(*REQUIRED | Helper | Name: <i><strong><your_appliance_name>_automation_self_trigger</i></strong>
|
||||
| [?](https://github.com/leofabri/hassio_appliance-status-monitor/blob/main/home%20assistant/packages/your_appliance_name.yaml#L13))
|
||||
|
||||
|
||||
This entity is in charge of triggering the execution of the automation when
|
||||
it changes from off -> on.
|
||||
|
||||
Sometimes, if the power consumption of the appliance is perfectly steady,
|
||||
no other trigger will work, but this will.
|
||||
|
||||
This variable allows the automation to call itself when some conditions are
|
||||
met.'
|
||||
default: []
|
||||
selector:
|
||||
entity:
|
||||
domain: input_boolean
|
||||
multiple: false
|
||||
delayed_job_completion_duration:
|
||||
name: Delayed Job Completion duration
|
||||
description: '(OPTIONAL | Helper | <i><strong>Suggested: 0, Default: 0 | DISABLED</strong></i>)
|
||||
|
||||
|
||||
During a job cycle, some appliances may intermittently use less power than
|
||||
the finishing power threshold, thus entering the job_completed state (even
|
||||
when the job is not finished).
|
||||
|
||||
With this value set, the automation will wait for the indicated time in seconds,
|
||||
and see if in that timespan the power consumption rises.
|
||||
|
||||
...
|
||||
|
||||
<strong>WARNING:</strong> Setting a duration introduces a delay on the transition
|
||||
to the ''job_completed'' state. Please make sure that you really need this,
|
||||
or leave it 0 if unsure.'
|
||||
default: 0.0
|
||||
selector:
|
||||
number:
|
||||
min: 0.0
|
||||
max: 900.0
|
||||
step: 1.0
|
||||
unit_of_measurement: seconds
|
||||
mode: slider
|
||||
actions_new_job_cycle_begins:
|
||||
name: Action(s) when a new job cycle begins
|
||||
description: 'Executed when the appliance starts a new job cycle (<strong>idle
|
||||
-> job_ongoing</strong> state). Note that here the job cycle indicator is
|
||||
off, which means that no previous job has to be completed.
|
||||
|
||||
...
|
||||
|
||||
**WARNING:** Just use non-blocking actions in this space! No delays, actionable
|
||||
notifications, TTS, waits, or anything that takes time to execute. Please
|
||||
consider that the permanence in this state could last for a limited amount
|
||||
of time (seconds, potentially!). This section is meant to be used to trigger
|
||||
other things.
|
||||
|
||||
If you really need to trigger long operations, a clean solution is to dispatch
|
||||
the work by calling other services or using the State Machine entity to wake
|
||||
up other external automations.'
|
||||
default: []
|
||||
selector:
|
||||
action: {}
|
||||
actions_job_cycle_resumes:
|
||||
name: Action(s) when a job cycle resumes
|
||||
description: 'Executed when a pending job cycle is resumed (<strong>paused |
|
||||
unplugged | detached_overload -> job_ongoing</strong> state). Note that in
|
||||
this situation, the job cycle indicator is still on. That''s how I know that
|
||||
the appliance is resuming and not startig a job.
|
||||
|
||||
...
|
||||
|
||||
**WARNING:** Just use non-blocking actions in this space! No delays, actionable
|
||||
notifications, TTS, waits, or anything that takes time to execute. Please
|
||||
consider that the permanence in this state could last for a limited amount
|
||||
of time (seconds, potentially!). This section is meant to be used to trigger
|
||||
other things.
|
||||
|
||||
If you really need to trigger long operations, a clean solution is to dispatch
|
||||
the work by calling other services or using the State Machine entity to wake
|
||||
up other external automations.'
|
||||
default: []
|
||||
selector:
|
||||
action: {}
|
||||
actions_job_cycle_ends:
|
||||
name: Action(s) when a job cycle is finished
|
||||
description: 'Executed when the appliance finishes a job cycle (<strong>job_ongoing
|
||||
-> job_completed</strong> state).
|
||||
|
||||
...
|
||||
|
||||
**WARNING:** Just use non-blocking actions in this space! No delays, actionable
|
||||
notifications, TTS, waits, or anything that takes time to execute. Please
|
||||
consider that the permanence in this state could last for a limited amount
|
||||
of time (seconds, potentially!). This section is meant to be used to trigger
|
||||
other things.
|
||||
|
||||
If you really need to trigger long operations, a clean solution is to dispatch
|
||||
the work by calling other services or using the State Machine entity to wake
|
||||
up other external automations.'
|
||||
default: []
|
||||
selector:
|
||||
action: {}
|
||||
actions_unplugged_overload:
|
||||
name: Action(s) when an overload occurs
|
||||
description: 'Executed when the appliance is detected as unplugged (because
|
||||
of an overload situation).
|
||||
|
||||
...
|
||||
|
||||
**WARNING:** Just use non-blocking actions in this space! No delays, actionable
|
||||
notifications, TTS, waits, or anything that takes time to execute. Please
|
||||
consider that the permanence in this state could last for a limited amount
|
||||
of time (seconds, potentially!). This section is meant to be used to trigger
|
||||
other things.
|
||||
|
||||
If you really need to trigger long operations, a clean solution is to dispatch
|
||||
the work by calling other services or using the State Machine entity to wake
|
||||
up other external automations.'
|
||||
default: []
|
||||
selector:
|
||||
action: {}
|
||||
actions_paused_after_overload:
|
||||
name: Action(s) when the overload situation is solved, now paused
|
||||
description: 'Executed when the state changes from <strong>detached_overload
|
||||
-> paused</strong> (NOT resuming the job).
|
||||
|
||||
...
|
||||
|
||||
**WARNING:** Just use non-blocking actions in this space! No delays, actionable
|
||||
notifications, TTS, waits, or anything that takes time to execute. Please
|
||||
consider that the permanence in this state could last for a limited amount
|
||||
of time (seconds, potentially!). This section is meant to be used to trigger
|
||||
other things.
|
||||
|
||||
If you really need to trigger long operations, a clean solution is to dispatch
|
||||
the work by calling other services or using the State Machine entity to wake
|
||||
up other external automations.'
|
||||
default: []
|
||||
selector:
|
||||
action: {}
|
||||
actions_resuming_after_overload:
|
||||
name: Action(s) when the overload situation is solved, now resuming
|
||||
description: 'Executed when the state changes from <strong>detached_overload
|
||||
-> job_ongoing</strong> (resuming the previous job).
|
||||
|
||||
...
|
||||
|
||||
**WARNING:** Just use non-blocking actions in this space! No delays, actionable
|
||||
notifications, TTS, waits, or anything that takes time to execute. Please
|
||||
consider that the permanence in this state could last for a limited amount
|
||||
of time (seconds, potentially!). This section is meant to be used to trigger
|
||||
other things.
|
||||
|
||||
If you really need to trigger long operations, a clean solution is to dispatch
|
||||
the work by calling other services or using the State Machine entity to wake
|
||||
up other external automations.'
|
||||
default: []
|
||||
selector:
|
||||
action: {}
|
||||
actions_paused_after_unplugged:
|
||||
name: Action(s) when the appliance is plugged back in, now paused
|
||||
description: 'Executed when the state changes from <strong>unplugged -> paused</strong>
|
||||
(NOT resuming the job).
|
||||
|
||||
...
|
||||
|
||||
**WARNING:** Just use non-blocking actions in this space! No delays, actionable
|
||||
notifications, TTS, waits, or anything that takes time to execute. Please
|
||||
consider that the permanence in this state could last for a limited amount
|
||||
of time (seconds, potentially!). This section is meant to be used to trigger
|
||||
other things.
|
||||
|
||||
If you really need to trigger long operations, a clean solution is to dispatch
|
||||
the work by calling other services or using the State Machine entity to wake
|
||||
up other external automations.'
|
||||
default: []
|
||||
selector:
|
||||
action: {}
|
||||
source_url: https://github.com/leofabri/hassio_appliance-status-monitor/blob/main/appliance-status-monitor.yaml
|
||||
variables:
|
||||
appliance_socket: !input appliance_socket
|
||||
appliance_suspended_sensor: !input appliance_suspended_sensor
|
||||
delayed_job_completion_duration: !input delayed_job_completion_duration
|
||||
delayed_job_completion_timer: !input delayed_job_completion_timer
|
||||
trigger:
|
||||
- platform: state
|
||||
entity_id: !input appliance_power_sensor
|
||||
id: power_event
|
||||
- platform: state
|
||||
entity_id: !input appliance_socket
|
||||
id: socket_state_change_event
|
||||
- platform: state
|
||||
entity_id: !input appliance_state_machine
|
||||
from: detached_overload
|
||||
to: paused
|
||||
id: paused_after_overload_event
|
||||
- platform: state
|
||||
entity_id: !input appliance_state_machine
|
||||
from: unplugged
|
||||
to: paused
|
||||
id: paused_after_unplugged_event
|
||||
- platform: state
|
||||
entity_id: !input appliance_state_machine
|
||||
from: detached_overload
|
||||
to: job_ongoing
|
||||
id: resuming_after_paused_overload_event
|
||||
- platform: state
|
||||
entity_id: !input automation_self_trigger
|
||||
from: 'off'
|
||||
to: 'on'
|
||||
id: automation_self_triggered
|
||||
- platform: event
|
||||
event_type: timer.finished
|
||||
event_data:
|
||||
entity_id: !input delayed_job_completion_timer
|
||||
id: job_completed_timer_finished
|
||||
- platform: homeassistant
|
||||
event: start
|
||||
id: home_assistant_started_event
|
||||
- platform: event
|
||||
event_type:
|
||||
- automation_reloaded
|
||||
id: automation_reloaded_event
|
||||
condition:
|
||||
- condition: or
|
||||
conditions:
|
||||
- condition: trigger
|
||||
id: power_event
|
||||
- condition: trigger
|
||||
id: socket_state_change_event
|
||||
- condition: trigger
|
||||
id: paused_after_overload_event
|
||||
- condition: trigger
|
||||
id: paused_after_unplugged_event
|
||||
- condition: trigger
|
||||
id: resuming_after_paused_overload_event
|
||||
- condition: trigger
|
||||
id: automation_self_triggered
|
||||
- condition: trigger
|
||||
id: job_completed_timer_finished
|
||||
- condition: trigger
|
||||
id: home_assistant_started_event
|
||||
- condition: trigger
|
||||
id: automation_reloaded_event
|
||||
action:
|
||||
- service: input_boolean.turn_off
|
||||
data: {}
|
||||
target:
|
||||
entity_id: !input automation_self_trigger
|
||||
- choose:
|
||||
- conditions:
|
||||
- condition: template
|
||||
value_template: '{{ appliance_suspended_sensor|length > 0 }}'
|
||||
- condition: and
|
||||
conditions:
|
||||
- condition: template
|
||||
value_template: '{{ states(appliance_suspended_sensor) | float > 0.0 }}'
|
||||
- condition: state
|
||||
entity_id: !input appliance_job_cycle
|
||||
state: 'on'
|
||||
sequence:
|
||||
- condition: not
|
||||
conditions:
|
||||
- condition: state
|
||||
entity_id: !input appliance_state_machine
|
||||
state: detached_overload
|
||||
- service: input_select.select_option
|
||||
data:
|
||||
option: detached_overload
|
||||
target:
|
||||
entity_id: !input appliance_state_machine
|
||||
- choose: []
|
||||
default: !input actions_unplugged_overload
|
||||
- choose:
|
||||
- conditions:
|
||||
- condition: state
|
||||
entity_id: !input appliance_job_cycle
|
||||
state: 'on'
|
||||
- condition: template
|
||||
value_template: '{% if appliance_suspended_sensor|length > 0 %}{{ states(appliance_suspended_sensor)
|
||||
| float <= 0.0 }}{% else %}true{% endif %}'
|
||||
- condition: template
|
||||
value_template: '{{ states(appliance_socket) == ''on'' }}'
|
||||
- condition: numeric_state
|
||||
entity_id: !input appliance_power_sensor
|
||||
below: !input appliance_finishing_power_threshold
|
||||
- condition: or
|
||||
conditions:
|
||||
- condition: state
|
||||
entity_id: !input appliance_state_machine
|
||||
state: detached_overload
|
||||
- condition: state
|
||||
entity_id: !input appliance_state_machine
|
||||
state: unplugged
|
||||
- condition: not
|
||||
conditions:
|
||||
- condition: state
|
||||
entity_id: !input appliance_state_machine
|
||||
state: paused
|
||||
sequence:
|
||||
- service: input_select.select_option
|
||||
data:
|
||||
option: paused
|
||||
target:
|
||||
entity_id: !input appliance_state_machine
|
||||
- conditions:
|
||||
- condition: template
|
||||
value_template: '{{ states(appliance_socket) == ''off'' }}'
|
||||
- condition: not
|
||||
conditions:
|
||||
- condition: template
|
||||
value_template: '{{ appliance_suspended_sensor|length > 0 }}'
|
||||
- condition: and
|
||||
conditions:
|
||||
- condition: template
|
||||
value_template: '{% if appliance_suspended_sensor|length > 0 %}{{ states(appliance_suspended_sensor)
|
||||
| float > 0.0 }}{% else %}false{% endif %}'
|
||||
- condition: state
|
||||
entity_id: !input appliance_state_machine
|
||||
state: detached_overload
|
||||
sequence:
|
||||
- condition: not
|
||||
conditions:
|
||||
- condition: state
|
||||
entity_id: !input appliance_state_machine
|
||||
state: unplugged
|
||||
- service: input_select.select_option
|
||||
data:
|
||||
option: unplugged
|
||||
target:
|
||||
entity_id: !input appliance_state_machine
|
||||
- choose:
|
||||
- conditions:
|
||||
- condition: template
|
||||
value_template: '{{ states(delayed_job_completion_timer) == ''active'' }}'
|
||||
sequence:
|
||||
- service: timer.cancel
|
||||
data: {}
|
||||
target:
|
||||
entity_id: !input delayed_job_completion_timer
|
||||
- conditions:
|
||||
- condition: trigger
|
||||
id: paused_after_overload_event
|
||||
sequence:
|
||||
- choose: []
|
||||
default: !input actions_paused_after_overload
|
||||
- conditions:
|
||||
- condition: trigger
|
||||
id: paused_after_unplugged_event
|
||||
sequence:
|
||||
- choose: []
|
||||
default: !input actions_paused_after_unplugged
|
||||
- conditions:
|
||||
- condition: trigger
|
||||
id: resuming_after_paused_overload_event
|
||||
sequence:
|
||||
- choose: []
|
||||
default: !input actions_resuming_after_overload
|
||||
default:
|
||||
- choose:
|
||||
- conditions:
|
||||
- condition: template
|
||||
value_template: '{{ states(appliance_socket) == ''on'' }}'
|
||||
- condition: numeric_state
|
||||
entity_id: !input appliance_power_sensor
|
||||
above: !input appliance_starting_power_threshold
|
||||
sequence:
|
||||
- choose:
|
||||
- conditions:
|
||||
- condition: template
|
||||
value_template: '{{ states(delayed_job_completion_timer) == ''active''
|
||||
}}'
|
||||
sequence:
|
||||
- service: timer.cancel
|
||||
data: {}
|
||||
target:
|
||||
entity_id: !input delayed_job_completion_timer
|
||||
- condition: not
|
||||
conditions:
|
||||
- condition: state
|
||||
entity_id: !input appliance_state_machine
|
||||
state: job_ongoing
|
||||
- service: input_select.select_option
|
||||
data:
|
||||
option: job_ongoing
|
||||
target:
|
||||
entity_id: !input appliance_state_machine
|
||||
- choose:
|
||||
- conditions:
|
||||
- condition: state
|
||||
entity_id: !input appliance_job_cycle
|
||||
state: 'off'
|
||||
sequence:
|
||||
- service: input_boolean.turn_on
|
||||
data: {}
|
||||
target:
|
||||
entity_id: !input appliance_job_cycle
|
||||
- choose: []
|
||||
default: !input actions_new_job_cycle_begins
|
||||
default:
|
||||
- choose: []
|
||||
default: !input actions_job_cycle_resumes
|
||||
- conditions:
|
||||
- condition: state
|
||||
entity_id: !input appliance_state_machine
|
||||
state: job_ongoing
|
||||
- condition: state
|
||||
entity_id: !input appliance_job_cycle
|
||||
state: 'on'
|
||||
- condition: template
|
||||
value_template: '{{ states(appliance_socket) == ''on'' }}'
|
||||
- condition: numeric_state
|
||||
entity_id: !input appliance_power_sensor
|
||||
below: !input appliance_finishing_power_threshold
|
||||
sequence:
|
||||
- choose:
|
||||
- conditions:
|
||||
- condition: template
|
||||
value_template: '{{ states(delayed_job_completion_timer) != ''active''
|
||||
}}'
|
||||
- condition: not
|
||||
conditions:
|
||||
- condition: trigger
|
||||
id: job_completed_timer_finished
|
||||
sequence:
|
||||
- service: timer.start
|
||||
data: {}
|
||||
target:
|
||||
entity_id: !input delayed_job_completion_timer
|
||||
- choose:
|
||||
- conditions:
|
||||
- condition: template
|
||||
value_template: '{{ delayed_job_completion_duration > 0 }}'
|
||||
sequence:
|
||||
- choose:
|
||||
- conditions:
|
||||
- condition: template
|
||||
value_template: "{% if states(delayed_job_completion_timer) == 'active'
|
||||
%}\n {% set t_expiring_date = state_attr(delayed_job_completion_timer,
|
||||
'finishes_at') %}\n {% set t_remaining_sec = 0 if t_expiring_date
|
||||
== None else (as_datetime(t_expiring_date) - now()).total_seconds()
|
||||
| int %}\n {% set t_total_duration = state_attr(delayed_job_completion_timer,
|
||||
'duration') %}\n {% set duration_split = t_total_duration.split(':')
|
||||
%}\n {% set t_total_duration_sec = (duration_split[0] | int *
|
||||
3600) + (duration_split[1] | int * 60) + (duration_split[0] | int)
|
||||
%}\n {% set t_elapsed_sec = (t_total_duration_sec - t_remaining_sec)
|
||||
| int %}\n {{ t_elapsed_sec < (delayed_job_completion_duration)
|
||||
| int }}\n{% else %}\n {{0}}\n{% endif %}"
|
||||
sequence:
|
||||
- delay:
|
||||
seconds: "{% if states(delayed_job_completion_timer) == 'active'
|
||||
%}\n {% set t_expiring_date = state_attr(delayed_job_completion_timer,
|
||||
'finishes_at') %}\n {% set t_remaining_sec = 0 if t_expiring_date
|
||||
== None else (as_datetime(t_expiring_date) - now()).total_seconds()
|
||||
| int %}\n {% set t_total_duration = state_attr(delayed_job_completion_timer,
|
||||
'duration') %}\n {% set duration_split = t_total_duration.split(':')
|
||||
%}\n {% set t_total_duration_sec = (duration_split[0] | int
|
||||
* 3600) + (duration_split[1] | int * 60) + (duration_split[0]
|
||||
| int) %}\n {% set t_elapsed_sec = (t_total_duration_sec - t_remaining_sec)
|
||||
| int %}\n {% set t_remaining = ((delayed_job_completion_duration)
|
||||
| int) - t_elapsed_sec %}\n \n {{ 1 + t_remaining }}\n{% else
|
||||
%}\n {{ 1 + (delayed_job_completion_duration) | int }}\n{% endif
|
||||
%}"
|
||||
- service: input_boolean.turn_on
|
||||
data: {}
|
||||
target:
|
||||
entity_id: !input automation_self_trigger
|
||||
- condition: template
|
||||
value_template: '{{0}}'
|
||||
default: []
|
||||
- service: input_boolean.turn_off
|
||||
data: {}
|
||||
target:
|
||||
entity_id: !input appliance_job_cycle
|
||||
- service: input_select.select_option
|
||||
data:
|
||||
option: job_completed
|
||||
target:
|
||||
entity_id: !input appliance_state_machine
|
||||
- choose:
|
||||
- conditions:
|
||||
- condition: template
|
||||
value_template: '{{ states(delayed_job_completion_timer) == ''active''
|
||||
}}'
|
||||
sequence:
|
||||
- service: timer.cancel
|
||||
data: {}
|
||||
target:
|
||||
entity_id: !input delayed_job_completion_timer
|
||||
- choose: []
|
||||
default: !input actions_job_cycle_ends
|
||||
- choose:
|
||||
- conditions:
|
||||
- condition: or
|
||||
conditions:
|
||||
- condition: trigger
|
||||
id: automation_self_triggered
|
||||
- condition: template
|
||||
value_template: '{{ delayed_job_completion_duration <= 0 }}'
|
||||
sequence:
|
||||
- delay:
|
||||
minutes: 1
|
||||
- service: input_boolean.turn_on
|
||||
data: {}
|
||||
target:
|
||||
entity_id: !input automation_self_trigger
|
||||
default:
|
||||
- choose:
|
||||
- conditions:
|
||||
- condition: state
|
||||
entity_id: !input appliance_job_cycle
|
||||
state: 'off'
|
||||
- condition: not
|
||||
conditions:
|
||||
- condition: state
|
||||
entity_id: !input appliance_state_machine
|
||||
state: idle
|
||||
sequence:
|
||||
- service: input_select.select_option
|
||||
data:
|
||||
option: idle
|
||||
target:
|
||||
entity_id: !input appliance_state_machine
|
||||
mode: restart
|
||||
max_exceeded: silent
|
||||
trace:
|
||||
stored_traces: 10
|
||||
@@ -1,260 +0,0 @@
|
||||
blueprint:
|
||||
name: Yet Another Motion Automation
|
||||
description: "# YAMA V10\n\nTurn on lights or scenes when motion is detected. \n\
|
||||
Four different scenes can be defined depending on time of day.\n\nFor Details\
|
||||
\ see this forum post:\nhttps://community.home-assistant.io/t/yama-yet-another-motion-automation-scenes-ambient-light-and-some-conditions/257062?u=networkingcat\n\
|
||||
\nCapabilitys:\n\n - Trigger on motion (in fact can be triggered by anything that\
|
||||
\ switches between “on” and off\")\n - Wait time for turning off\n - Only run\
|
||||
\ if entity is in desired state (optional)\n - Sun elevation check (optional)\n\
|
||||
\ - 4 Scenes for different times of day (optional)\n - Ambient support with time\
|
||||
\ frame (optional)\n - Default scene when motion stops (optional)\n - “no motion\
|
||||
\ blocker” with user choosable state (optional)\n"
|
||||
domain: automation
|
||||
source_url: https://gist.github.com/networkingcat/a1876d7e706e07c8bdcf974113940fb8
|
||||
input:
|
||||
motion_entity:
|
||||
name: Motion Sensor
|
||||
description: Motion Sensor or a group with Motion Sensors (But can be anything
|
||||
switching between "on" and "off")
|
||||
selector:
|
||||
entity: {}
|
||||
light_target:
|
||||
name: Light
|
||||
selector:
|
||||
target:
|
||||
entity:
|
||||
domain: light
|
||||
no_motion_wait:
|
||||
name: Wait time
|
||||
description: Time to leave the light on after last motion is detected.
|
||||
default: 120
|
||||
selector:
|
||||
number:
|
||||
min: 0.0
|
||||
max: 3600.0
|
||||
unit_of_measurement: seconds
|
||||
mode: slider
|
||||
step: 1.0
|
||||
automation_blocker:
|
||||
name: Automation Blocker (Optional)
|
||||
description: Only run if this boolean is in desired state (see next input)
|
||||
default:
|
||||
selector:
|
||||
entity: {}
|
||||
automation_blocker_boolean:
|
||||
name: Automation Blocker Chooser (Optional)
|
||||
description: Desired state of automation blocker, choose on for on and off for
|
||||
off
|
||||
default: false
|
||||
selector:
|
||||
boolean: {}
|
||||
no_motion_blocker:
|
||||
name: No Motion Blocker (Optional)
|
||||
description: No motion sequence is not run if this boolean is in desired state
|
||||
(see next input)
|
||||
default:
|
||||
selector:
|
||||
entity: {}
|
||||
no_motion_blocker_boolean:
|
||||
name: No Motion Chooser (Optional)
|
||||
description: Desired state of no motion blocker, choose on for on and off for
|
||||
off
|
||||
default: false
|
||||
selector:
|
||||
boolean: {}
|
||||
elevation_check:
|
||||
name: Sun elevation check (Optional)
|
||||
description: This is the angle between the sun and the horizon. Negative values
|
||||
mean the sun is BELOW the horizon.
|
||||
default: none
|
||||
selector:
|
||||
number:
|
||||
min: -90.0
|
||||
max: 90.0
|
||||
unit_of_measurement: degrees
|
||||
mode: slider
|
||||
step: 1.0
|
||||
scene_ambient:
|
||||
name: Ambient Scene (Optional)
|
||||
description: Scene for ambient state. Will be activated when no motion is detected.
|
||||
default: scene.none
|
||||
selector:
|
||||
entity:
|
||||
domain: scene
|
||||
multiple: false
|
||||
time_scene_ambient_start:
|
||||
name: Ambient time frame start (Optional)
|
||||
description: Time from which on ambient scene will be activated
|
||||
default: 00:00:00
|
||||
selector:
|
||||
time: {}
|
||||
time_scene_ambient_end:
|
||||
name: Ambient time frame end (Optional)
|
||||
description: Time from which on ambient scene will be not activated
|
||||
default: 00:00:00
|
||||
selector:
|
||||
time: {}
|
||||
scene_morning:
|
||||
name: Scene for the morning (Optional)
|
||||
default: scene.none
|
||||
selector:
|
||||
entity:
|
||||
domain: scene
|
||||
multiple: false
|
||||
time_scene_morning:
|
||||
name: Time for the morning scene (Optional)
|
||||
description: A time input which defines the time from which on the scene will
|
||||
be activated if motion is detected.
|
||||
default: 00:00:00
|
||||
selector:
|
||||
time: {}
|
||||
scene_day:
|
||||
name: Scene for the bright day (Optional)
|
||||
default: scene.none
|
||||
selector:
|
||||
entity:
|
||||
domain: scene
|
||||
multiple: false
|
||||
time_scene_day:
|
||||
name: Time for the day scene (Optional)
|
||||
description: A time input which defines the time from which on the scene will
|
||||
be activated if motion is detected.
|
||||
default: 00:00:00
|
||||
selector:
|
||||
time: {}
|
||||
scene_evening:
|
||||
name: Scene for the evening (Optional)
|
||||
default: scene.none
|
||||
selector:
|
||||
entity:
|
||||
domain: scene
|
||||
multiple: false
|
||||
time_scene_evening:
|
||||
name: Time for the evening scene (Optional)
|
||||
description: A time input which defines the time from which on the scene will
|
||||
be activated if motion is detected.
|
||||
default: 00:00:00
|
||||
selector:
|
||||
time: {}
|
||||
scene_night:
|
||||
name: Scene for the dark night (Optional)
|
||||
default: scene.none
|
||||
selector:
|
||||
entity:
|
||||
domain: scene
|
||||
multiple: false
|
||||
time_scene_night:
|
||||
name: Time for the night scene (Optional)
|
||||
description: A time input which defines the time from which on the scene will
|
||||
be activated if motion is detectedd.
|
||||
default: 00:00:00
|
||||
selector:
|
||||
time: {}
|
||||
scene_no_motion:
|
||||
name: Default scene for no motion (Optional)
|
||||
description: Set this Scene if you want to activate a scene if motion stops
|
||||
default: scene.none
|
||||
selector:
|
||||
entity:
|
||||
domain: scene
|
||||
multiple: false
|
||||
mode: restart
|
||||
max_exceeded: silent
|
||||
variables:
|
||||
scene_ambient: !input 'scene_ambient'
|
||||
scene_morning: !input 'scene_morning'
|
||||
scene_day: !input 'scene_day'
|
||||
scene_evening: !input 'scene_evening'
|
||||
scene_night: !input 'scene_night'
|
||||
automation_blocker: !input 'automation_blocker'
|
||||
automation_blocker_boolean: !input 'automation_blocker_boolean'
|
||||
no_motion_blocker: !input 'no_motion_blocker'
|
||||
no_motion_blocker_boolean: !input 'no_motion_blocker_boolean'
|
||||
elevation_check: !input 'elevation_check'
|
||||
scene_no_motion: !input 'scene_no_motion'
|
||||
motion_entity: !input 'motion_entity'
|
||||
trigger:
|
||||
- platform: state
|
||||
entity_id: !input 'motion_entity'
|
||||
from: 'off'
|
||||
to: 'on'
|
||||
- platform: state
|
||||
entity_id: !input 'motion_entity'
|
||||
from: 'on'
|
||||
to: 'off'
|
||||
for: !input 'no_motion_wait'
|
||||
condition:
|
||||
- condition: or
|
||||
conditions:
|
||||
- '{{ automation_blocker == none }}'
|
||||
- '{{ automation_blocker_boolean and states[automation_blocker].state == ''on''
|
||||
}}'
|
||||
- '{{ not automation_blocker_boolean and states[automation_blocker].state == ''off''
|
||||
}}'
|
||||
- condition: template
|
||||
value_template: '{{ (elevation_check == none) or (state_attr(''sun.sun'',''elevation'')
|
||||
<= elevation_check | float(90)) }}'
|
||||
action:
|
||||
- choose:
|
||||
- conditions:
|
||||
- condition: template
|
||||
value_template: '{{ trigger.to_state.state == ''on'' }}'
|
||||
sequence:
|
||||
- choose:
|
||||
- conditions:
|
||||
- '{{ scene_morning != ''scene.none''}}'
|
||||
- condition: time
|
||||
after: !input 'time_scene_morning'
|
||||
before: !input 'time_scene_day'
|
||||
sequence:
|
||||
- scene: !input 'scene_morning'
|
||||
- conditions:
|
||||
- '{{ scene_day != ''scene.none''}}'
|
||||
- condition: time
|
||||
after: !input 'time_scene_day'
|
||||
before: !input 'time_scene_evening'
|
||||
sequence:
|
||||
- scene: !input 'scene_day'
|
||||
- conditions:
|
||||
- '{{ scene_evening != ''scene.none''}}'
|
||||
- condition: time
|
||||
after: !input 'time_scene_evening'
|
||||
before: !input 'time_scene_night'
|
||||
sequence:
|
||||
- scene: !input 'scene_evening'
|
||||
- conditions:
|
||||
- '{{ scene_night != ''scene.none''}}'
|
||||
- condition: time
|
||||
after: !input 'time_scene_night'
|
||||
before: !input 'time_scene_morning'
|
||||
sequence:
|
||||
- scene: !input 'scene_night'
|
||||
default:
|
||||
- service: light.turn_on
|
||||
target: !input 'light_target'
|
||||
- conditions:
|
||||
- condition: template
|
||||
value_template: '{{ trigger.to_state.state == ''off'' }}'
|
||||
- condition: or
|
||||
conditions:
|
||||
- '{{ no_motion_blocker == none }}'
|
||||
- '{{ no_motion_blocker_boolean and states[no_motion_blocker].state == ''on''
|
||||
}}'
|
||||
- '{{ not no_motion_blocker_boolean and states[no_motion_blocker].state == ''off''
|
||||
}}'
|
||||
sequence:
|
||||
- choose:
|
||||
- conditions:
|
||||
- '{{ scene_ambient != ''scene.none'' }}'
|
||||
- condition: time
|
||||
after: !input 'time_scene_ambient_start'
|
||||
before: !input 'time_scene_ambient_end'
|
||||
sequence:
|
||||
- scene: !input 'scene_ambient'
|
||||
- conditions:
|
||||
- '{{ scene_no_motion != ''scene.none'' }}'
|
||||
sequence:
|
||||
- scene: !input 'scene_no_motion'
|
||||
default:
|
||||
- service: light.turn_off
|
||||
target: !input 'light_target'
|
||||
@@ -1,20 +0,0 @@
|
||||
blueprint:
|
||||
name: Restore Samba Backup sensor on startup
|
||||
description: Restore Samba Backup sensor on startup
|
||||
domain: automation
|
||||
input:
|
||||
addon:
|
||||
name: Samba Backup Addon
|
||||
description: Select samba backup addon.
|
||||
selector:
|
||||
addon: {}
|
||||
source_url: https://github.com/thomasmauerer/hassio-addons/blob/master/samba-backup/blueprints/restore_samba_backup_sensor.yaml
|
||||
mode: single
|
||||
trigger:
|
||||
- event: start
|
||||
platform: homeassistant
|
||||
action:
|
||||
- service: hassio.addon_stdin
|
||||
data:
|
||||
addon: !input addon
|
||||
input: restore-sensor
|
||||
@@ -1,84 +0,0 @@
|
||||
blueprint:
|
||||
name: Confirmable Notification
|
||||
description: >-
|
||||
A script that sends an actionable notification with a confirmation before
|
||||
running the specified action.
|
||||
domain: script
|
||||
source_url: https://github.com/home-assistant/core/blob/master/homeassistant/components/script/blueprints/confirmable_notification.yaml
|
||||
input:
|
||||
notify_device:
|
||||
name: Device to notify
|
||||
description: Device needs to run the official Home Assistant app to receive notifications.
|
||||
selector:
|
||||
device:
|
||||
integration: mobile_app
|
||||
title:
|
||||
name: "Title"
|
||||
description: "The title of the button shown in the notification."
|
||||
default: ""
|
||||
selector:
|
||||
text:
|
||||
message:
|
||||
name: "Message"
|
||||
description: "The message body"
|
||||
selector:
|
||||
text:
|
||||
confirm_text:
|
||||
name: "Confirmation Text"
|
||||
description: "Text to show on the confirmation button"
|
||||
default: "Confirm"
|
||||
selector:
|
||||
text:
|
||||
confirm_action:
|
||||
name: "Confirmation Action"
|
||||
description: "Action to run when notification is confirmed"
|
||||
default: []
|
||||
selector:
|
||||
action:
|
||||
dismiss_text:
|
||||
name: "Dismiss Text"
|
||||
description: "Text to show on the dismiss button"
|
||||
default: "Dismiss"
|
||||
selector:
|
||||
text:
|
||||
dismiss_action:
|
||||
name: "Dismiss Action"
|
||||
description: "Action to run when notification is dismissed"
|
||||
default: []
|
||||
selector:
|
||||
action:
|
||||
|
||||
mode: restart
|
||||
|
||||
sequence:
|
||||
- alias: "Set up variables"
|
||||
variables:
|
||||
action_confirm: "{{ 'CONFIRM_' ~ context.id }}"
|
||||
action_dismiss: "{{ 'DISMISS_' ~ context.id }}"
|
||||
- alias: "Send notification"
|
||||
domain: mobile_app
|
||||
type: notify
|
||||
device_id: !input notify_device
|
||||
title: !input title
|
||||
message: !input message
|
||||
data:
|
||||
actions:
|
||||
- action: "{{ action_confirm }}"
|
||||
title: !input confirm_text
|
||||
- action: "{{ action_dismiss }}"
|
||||
title: !input dismiss_text
|
||||
- alias: "Awaiting response"
|
||||
wait_for_trigger:
|
||||
- platform: event
|
||||
event_type: mobile_app_notification_action
|
||||
event_data:
|
||||
action: "{{ action_confirm }}"
|
||||
- platform: event
|
||||
event_type: mobile_app_notification_action
|
||||
event_data:
|
||||
action: "{{ action_dismiss }}"
|
||||
- choose:
|
||||
- conditions: "{{ wait.trigger.event.data.action == action_confirm }}"
|
||||
sequence: !input confirm_action
|
||||
- conditions: "{{ wait.trigger.event.data.action == action_dismiss }}"
|
||||
sequence: !input dismiss_action
|
||||
@@ -7,6 +7,13 @@ frontend:
|
||||
homeassistant:
|
||||
#packages: !include_dir_named packages/
|
||||
packages: !include_dir_named "integrations"
|
||||
allowlist_external_dirs:
|
||||
- "/config/www/images"
|
||||
|
||||
media_dirs:
|
||||
media: /media
|
||||
|
||||
logger:
|
||||
default: warning
|
||||
logs:
|
||||
custom_components.kia_uvo: debug
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -1,91 +0,0 @@
|
||||
import logging
|
||||
from datetime import timedelta
|
||||
|
||||
SENSOR_TYPES = {
|
||||
"gft": ["GFT", "mdi:recycle"],
|
||||
"kerstboom": ["Kerstboom", "mdi:recycle"],
|
||||
"papier": ["Papier", "mdi:recycle"],
|
||||
"pbd": ["PBD", "mdi:recycle"],
|
||||
"restafval": ["Restafval", "mdi:recycle"],
|
||||
"takken": ["Takken", "mdi:recycle"],
|
||||
"textiel": ["Textiel", "mdi:recycle"],
|
||||
"trash_type_today": ["Today", "mdi:recycle"],
|
||||
"trash_type_tomorrow": ["Tomorrow", "mdi:recycle"],
|
||||
}
|
||||
|
||||
SENSOR_LOCATIONS_TO_URL = {
|
||||
"trashapi": [
|
||||
"http://trashapi.azurewebsites.net/trash?Location={0}&ZipCode={1}&HouseNumber={2}&HouseNumberSuffix={3}&District={4}&DiftarCode={5}&ShowWholeYear={6}"
|
||||
]
|
||||
}
|
||||
|
||||
MONTH_TO_NUMBER = {
|
||||
"jan": "01",
|
||||
"feb": "02",
|
||||
"mrt": "03",
|
||||
"apr": "04",
|
||||
"mei": "05",
|
||||
"jun": "06",
|
||||
"jul": "07",
|
||||
"aug": "08",
|
||||
"sep": "09",
|
||||
"okt": "10",
|
||||
"nov": "11",
|
||||
"dec": "12",
|
||||
"januari": "01",
|
||||
"februari": "02",
|
||||
"maart": "03",
|
||||
"april": "04",
|
||||
"mei": "05",
|
||||
"juni": "06",
|
||||
"juli": "07",
|
||||
"augustus": "08",
|
||||
"september": "09",
|
||||
"oktober": "10",
|
||||
"november": "11",
|
||||
"december": "12",
|
||||
}
|
||||
|
||||
NUMBER_TO_MONTH = {
|
||||
1: "januari",
|
||||
2: "februari",
|
||||
3: "maart",
|
||||
4: "april",
|
||||
5: "mei",
|
||||
6: "juni",
|
||||
7: "juli",
|
||||
8: "augustus",
|
||||
9: "september",
|
||||
10: "oktober",
|
||||
11: "november",
|
||||
12: "december",
|
||||
}
|
||||
|
||||
CONF_CITY = "city"
|
||||
CONF_LOCATION = "location"
|
||||
CONF_POSTCODE = "postcode"
|
||||
CONF_STREET_NUMBER = "streetnumber"
|
||||
CONF_STREET_NUMBER_SUFFIX = "streetnumbersuffix"
|
||||
CONF_DISTRICT = "district"
|
||||
CONF_GET_WHOLE_YEAR = "getwholeyear"
|
||||
CONF_DATE_FORMAT = "dateformat"
|
||||
CONF_TIMESPAN_IN_DAYS = "timespanindays"
|
||||
CONF_LOCALE = "locale"
|
||||
CONF_ID = "id"
|
||||
CONF_NO_TRASH_TEXT = "notrashtext"
|
||||
CONF_DIFTAR_CODE = "diftarcode"
|
||||
SENSOR_PREFIX = "Afvalinfo "
|
||||
ATTR_ERROR = "error"
|
||||
ATTR_LAST_UPDATE = "last_update"
|
||||
ATTR_HIDDEN = "hidden"
|
||||
ATTR_IS_COLLECTION_DATE_TODAY = "is_collection_date_today"
|
||||
ATTR_DAYS_UNTIL_COLLECTION_DATE = "days_until_collection_date"
|
||||
ATTR_YEAR_MONTH_DAY_DATE = "year_month_day_date"
|
||||
ATTR_FRIENDLY_NAME = "friendly_name"
|
||||
ATTR_LAST_COLLECTION_DATE = "last_collection_date"
|
||||
ATTR_TOTAL_COLLECTIONS_THIS_YEAR = "total_collections_this_year"
|
||||
ATTR_WHOLE_YEAR_DATES = "whole_year_dates"
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
MIN_TIME_BETWEEN_UPDATES = timedelta(hours=2, minutes=30)
|
||||
Binary file not shown.
Binary file not shown.
@@ -1,94 +0,0 @@
|
||||
from ..const.const import (
|
||||
MONTH_TO_NUMBER,
|
||||
SENSOR_LOCATIONS_TO_URL,
|
||||
_LOGGER,
|
||||
)
|
||||
from datetime import date, datetime, timedelta
|
||||
import urllib.request
|
||||
import urllib.error
|
||||
import requests
|
||||
|
||||
|
||||
class TrashApiAfval(object):
|
||||
def get_data(
|
||||
self,
|
||||
location,
|
||||
postcode,
|
||||
street_number,
|
||||
street_number_suffix,
|
||||
district,
|
||||
diftar_code,
|
||||
get_whole_year,
|
||||
resources,
|
||||
):
|
||||
_LOGGER.debug("Updating Waste collection dates")
|
||||
|
||||
try:
|
||||
API_ENDPOINT = SENSOR_LOCATIONS_TO_URL["trashapi"][0].format(
|
||||
location,
|
||||
postcode,
|
||||
street_number,
|
||||
street_number_suffix,
|
||||
district,
|
||||
diftar_code,
|
||||
get_whole_year,
|
||||
)
|
||||
|
||||
r = requests.get(url=API_ENDPOINT)
|
||||
dataList = r.json()
|
||||
|
||||
# Place all possible values in the dictionary even if they are not necessary
|
||||
waste_array = []
|
||||
|
||||
# _LOGGER.warning(dataList)
|
||||
|
||||
for data in dataList:
|
||||
|
||||
# find gft, kerstboom, papier, pbd, takken or textiel
|
||||
if (
|
||||
("gft" in resources and data["name"].lower() == "gft")
|
||||
or (
|
||||
"kerstboom" in resources and data["name"].lower() == "kerstboom"
|
||||
)
|
||||
or ("papier" in resources and data["name"].lower() == "papier")
|
||||
or ("pbd" in resources and data["name"].lower() == "pbd")
|
||||
or ("takken" in resources and data["name"].lower() == "takken")
|
||||
or ("textiel" in resources and data["name"].lower() == "textiel")
|
||||
):
|
||||
waste_array.append(
|
||||
{data["name"].lower(): data["date"].split("T")[0]}
|
||||
)
|
||||
# find restafval.
|
||||
if "restafval" in resources and data["name"].lower() == "restafval":
|
||||
if (
|
||||
date.today()
|
||||
<= datetime.strptime(
|
||||
data["date"].split("T")[0], "%Y-%m-%d"
|
||||
).date()
|
||||
):
|
||||
waste_array.append(
|
||||
{data["name"].lower(): data["date"].split("T")[0]}
|
||||
)
|
||||
else:
|
||||
waste_array.append(
|
||||
{"restafvaldiftardate": data["date"].split("T")[0]}
|
||||
)
|
||||
waste_array.append(
|
||||
{"restafvaldiftarcollections": data["totalThisYear"]}
|
||||
)
|
||||
|
||||
# _LOGGER.warning(waste_array)
|
||||
|
||||
return waste_array
|
||||
except urllib.error.URLError as exc:
|
||||
_LOGGER.error("Error occurred while fetching data: %r", exc.reason)
|
||||
return False
|
||||
except Exception as exc:
|
||||
_LOGGER.error(
|
||||
"""Error occurred. Please check the address with postcode: %r and huisnummer: %r%r on the website of your local waste collector in the gemeente: %r. It's probably a faulty address or the website of the waste collector is unreachable. If the address is working on the website of the local waste collector and this error still occured, please report the issue in the Github repository https://github.com/heyajohnny/afvalinfo with details of the location that isn't working""",
|
||||
postcode,
|
||||
street_number,
|
||||
street_number_suffix,
|
||||
location,
|
||||
)
|
||||
return False
|
||||
@@ -1,15 +0,0 @@
|
||||
{
|
||||
"domain": "afvalinfo",
|
||||
"name": "Afvalinfo",
|
||||
"version": "1.1.0",
|
||||
"documentation": "https://github.com/heyajohnny/afvalinfo",
|
||||
"issue_tracker": "https://github.com/heyajohnny/afvalinfo/issues",
|
||||
"dependencies": [],
|
||||
"codeowners": [
|
||||
"@heyajohnny"
|
||||
],
|
||||
"requirements": [
|
||||
"Babel==2.8.0",
|
||||
"python-dateutil==2.8.1"
|
||||
]
|
||||
}
|
||||
@@ -1,413 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Sensor component for Afvalinfo
|
||||
Author: Johnny Visser
|
||||
"""
|
||||
|
||||
import voluptuous as vol
|
||||
from datetime import datetime, date, timedelta
|
||||
from dateutil.relativedelta import relativedelta
|
||||
import urllib.error
|
||||
from babel import Locale
|
||||
from babel.dates import format_date, format_datetime, format_time
|
||||
import re
|
||||
|
||||
from .const.const import (
|
||||
MIN_TIME_BETWEEN_UPDATES,
|
||||
_LOGGER,
|
||||
CONF_CITY,
|
||||
CONF_DISTRICT,
|
||||
CONF_LOCATION,
|
||||
CONF_POSTCODE,
|
||||
CONF_STREET_NUMBER,
|
||||
CONF_STREET_NUMBER_SUFFIX,
|
||||
CONF_GET_WHOLE_YEAR,
|
||||
CONF_DATE_FORMAT,
|
||||
CONF_TIMESPAN_IN_DAYS,
|
||||
CONF_NO_TRASH_TEXT,
|
||||
CONF_DIFTAR_CODE,
|
||||
CONF_LOCALE,
|
||||
CONF_ID,
|
||||
SENSOR_PREFIX,
|
||||
ATTR_ERROR,
|
||||
ATTR_LAST_UPDATE,
|
||||
ATTR_HIDDEN,
|
||||
ATTR_DAYS_UNTIL_COLLECTION_DATE,
|
||||
ATTR_IS_COLLECTION_DATE_TODAY,
|
||||
ATTR_YEAR_MONTH_DAY_DATE,
|
||||
ATTR_FRIENDLY_NAME,
|
||||
ATTR_LAST_COLLECTION_DATE,
|
||||
ATTR_TOTAL_COLLECTIONS_THIS_YEAR,
|
||||
ATTR_WHOLE_YEAR_DATES,
|
||||
SENSOR_TYPES,
|
||||
)
|
||||
|
||||
from .location.trashapi import TrashApiAfval
|
||||
from .sensortomorrow import AfvalInfoTomorrowSensor
|
||||
from .sensortoday import AfvalInfoTodaySensor
|
||||
|
||||
from homeassistant.components.sensor import PLATFORM_SCHEMA
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.const import CONF_RESOURCES
|
||||
from homeassistant.util import Throttle
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_RESOURCES, default=[]): vol.All(cv.ensure_list),
|
||||
vol.Optional(CONF_CITY, default=""): cv.string,
|
||||
vol.Optional(CONF_LOCATION, default="sliedrecht"): cv.string,
|
||||
vol.Required(CONF_POSTCODE, default="3361AB"): cv.string,
|
||||
vol.Required(CONF_STREET_NUMBER, default="1"): cv.string,
|
||||
vol.Optional(CONF_STREET_NUMBER_SUFFIX, default=""): cv.string,
|
||||
vol.Optional(CONF_DISTRICT, default=""): cv.string,
|
||||
vol.Optional(CONF_DATE_FORMAT, default="%d-%m-%Y"): cv.string,
|
||||
vol.Optional(CONF_TIMESPAN_IN_DAYS, default="365"): cv.string,
|
||||
vol.Optional(CONF_LOCALE, default="en"): cv.string,
|
||||
vol.Optional(CONF_ID, default=""): cv.string,
|
||||
vol.Optional(CONF_NO_TRASH_TEXT, default="none"): cv.string,
|
||||
vol.Optional(CONF_DIFTAR_CODE, default=""): cv.string,
|
||||
vol.Optional(CONF_GET_WHOLE_YEAR, default="false"): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
_LOGGER.debug("Setup Afvalinfo sensor")
|
||||
|
||||
location = config.get(CONF_CITY).lower().strip()
|
||||
if len(location) == 0:
|
||||
location = config.get(CONF_LOCATION).lower().strip()
|
||||
postcode = config.get(CONF_POSTCODE).strip()
|
||||
street_number = config.get(CONF_STREET_NUMBER)
|
||||
street_number_suffix = config.get(CONF_STREET_NUMBER_SUFFIX)
|
||||
district = config.get(CONF_DISTRICT)
|
||||
date_format = config.get(CONF_DATE_FORMAT).strip()
|
||||
timespan_in_days = config.get(CONF_TIMESPAN_IN_DAYS)
|
||||
locale = config.get(CONF_LOCALE)
|
||||
id_name = config.get(CONF_ID)
|
||||
no_trash_text = config.get(CONF_NO_TRASH_TEXT)
|
||||
diftar_code = config.get(CONF_DIFTAR_CODE)
|
||||
get_whole_year = config.get(CONF_GET_WHOLE_YEAR)
|
||||
|
||||
try:
|
||||
resources = config[CONF_RESOURCES].copy()
|
||||
|
||||
# filter the types from the dict if it's a dictionary
|
||||
if isinstance(resources[0], dict):
|
||||
resourcesMinusTodayAndTomorrow = [obj["type"] for obj in resources]
|
||||
else:
|
||||
resourcesMinusTodayAndTomorrow = resources
|
||||
|
||||
if "trash_type_today" in resourcesMinusTodayAndTomorrow:
|
||||
resourcesMinusTodayAndTomorrow.remove("trash_type_today")
|
||||
if "trash_type_tomorrow" in resourcesMinusTodayAndTomorrow:
|
||||
resourcesMinusTodayAndTomorrow.remove("trash_type_tomorrow")
|
||||
|
||||
data = AfvalinfoData(
|
||||
location,
|
||||
postcode,
|
||||
street_number,
|
||||
street_number_suffix,
|
||||
district,
|
||||
diftar_code,
|
||||
get_whole_year,
|
||||
resourcesMinusTodayAndTomorrow,
|
||||
)
|
||||
except urllib.error.HTTPError as error:
|
||||
_LOGGER.error(error.reason)
|
||||
return False
|
||||
|
||||
entities = []
|
||||
|
||||
for resource in config[CONF_RESOURCES]:
|
||||
# old way, before 20220204
|
||||
if type(resource) == str:
|
||||
sensor_type = resource.lower()
|
||||
sensor_friendly_name = sensor_type
|
||||
# new way
|
||||
else:
|
||||
sensor_type = resource["type"].lower()
|
||||
if "friendly_name" in resource.keys():
|
||||
sensor_friendly_name = resource["friendly_name"]
|
||||
else:
|
||||
# If no friendly name is provided, use the sensor_type as friendly name
|
||||
sensor_friendly_name = sensor_type
|
||||
|
||||
# if sensor_type not in SENSOR_TYPES:
|
||||
if (
|
||||
sensor_type.title().lower() != "trash_type_today"
|
||||
and sensor_type.title().lower() != "trash_type_tomorrow"
|
||||
):
|
||||
entities.append(
|
||||
AfvalinfoSensor(
|
||||
data,
|
||||
sensor_type,
|
||||
sensor_friendly_name,
|
||||
date_format,
|
||||
timespan_in_days,
|
||||
locale,
|
||||
id_name,
|
||||
get_whole_year,
|
||||
)
|
||||
)
|
||||
|
||||
# Add sensor -trash_type_today
|
||||
if sensor_type.title().lower() == "trash_type_today":
|
||||
today = AfvalInfoTodaySensor(
|
||||
data,
|
||||
sensor_type,
|
||||
sensor_friendly_name,
|
||||
entities,
|
||||
id_name,
|
||||
no_trash_text,
|
||||
)
|
||||
entities.append(today)
|
||||
# Add sensor -trash_type_tomorrow
|
||||
if sensor_type.title().lower() == "trash_type_tomorrow":
|
||||
tomorrow = AfvalInfoTomorrowSensor(
|
||||
data,
|
||||
sensor_type,
|
||||
sensor_friendly_name,
|
||||
entities,
|
||||
id_name,
|
||||
no_trash_text,
|
||||
)
|
||||
entities.append(tomorrow)
|
||||
|
||||
add_entities(entities)
|
||||
|
||||
|
||||
class AfvalinfoData(object):
|
||||
def __init__(
|
||||
self,
|
||||
location,
|
||||
postcode,
|
||||
street_number,
|
||||
street_number_suffix,
|
||||
district,
|
||||
diftar_code,
|
||||
get_whole_year,
|
||||
resources,
|
||||
):
|
||||
self.data = None
|
||||
self.location = location
|
||||
self.postcode = postcode
|
||||
self.street_number = street_number
|
||||
self.street_number_suffix = street_number_suffix
|
||||
self.district = district
|
||||
self.diftar_code = diftar_code
|
||||
self.get_whole_year = get_whole_year
|
||||
self.resources = resources
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
def update(self):
|
||||
_LOGGER.debug("Updating Waste collection dates")
|
||||
self.data = TrashApiAfval().get_data(
|
||||
self.location,
|
||||
self.postcode,
|
||||
self.street_number,
|
||||
self.street_number_suffix,
|
||||
self.district,
|
||||
self.diftar_code,
|
||||
self.get_whole_year,
|
||||
self.resources,
|
||||
)
|
||||
|
||||
|
||||
class AfvalinfoSensor(Entity):
|
||||
def __init__(
|
||||
self,
|
||||
data,
|
||||
sensor_type,
|
||||
sensor_friendly_name,
|
||||
date_format,
|
||||
timespan_in_days,
|
||||
locale,
|
||||
id_name,
|
||||
get_whole_year,
|
||||
):
|
||||
self.data = data
|
||||
self.type = sensor_type
|
||||
self.friendly_name = sensor_friendly_name
|
||||
self.date_format = date_format
|
||||
self.timespan_in_days = timespan_in_days
|
||||
self.locale = locale
|
||||
self._name = sensor_friendly_name
|
||||
self._get_whole_year = get_whole_year
|
||||
self.entity_id = "sensor." + (
|
||||
(
|
||||
SENSOR_PREFIX
|
||||
+ (id_name + " " if len(id_name) > 0 else "")
|
||||
+ sensor_friendly_name
|
||||
)
|
||||
.lower()
|
||||
.replace(" ", "_")
|
||||
)
|
||||
self._attr_unique_id = (
|
||||
SENSOR_PREFIX
|
||||
+ (id_name + " " if len(id_name) > 0 else "")
|
||||
+ sensor_friendly_name
|
||||
)
|
||||
self._icon = SENSOR_TYPES[sensor_type][1]
|
||||
self._hidden = False
|
||||
self._error = False
|
||||
self._state = None
|
||||
self._last_update = None
|
||||
self._days_until_collection_date = None
|
||||
self._is_collection_date_today = False
|
||||
self._year_month_day_date = None
|
||||
self._last_collection_date = None
|
||||
self._total_collections_this_year = None
|
||||
self._whole_year_dates = None
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def icon(self):
|
||||
return self._icon
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
return self._state
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
return {
|
||||
ATTR_ERROR: self._error,
|
||||
ATTR_FRIENDLY_NAME: self.friendly_name,
|
||||
ATTR_YEAR_MONTH_DAY_DATE: self._year_month_day_date,
|
||||
ATTR_LAST_UPDATE: self._last_update,
|
||||
ATTR_HIDDEN: self._hidden,
|
||||
ATTR_DAYS_UNTIL_COLLECTION_DATE: self._days_until_collection_date,
|
||||
ATTR_IS_COLLECTION_DATE_TODAY: self._is_collection_date_today,
|
||||
ATTR_LAST_COLLECTION_DATE: self._last_collection_date,
|
||||
ATTR_TOTAL_COLLECTIONS_THIS_YEAR: self._total_collections_this_year,
|
||||
ATTR_WHOLE_YEAR_DATES: self._whole_year_dates,
|
||||
}
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
def update(self):
|
||||
self.data.update()
|
||||
waste_array = self.data.data
|
||||
self._error = False
|
||||
|
||||
# Loop through all the dates to put the dates in the whole_year_dates attribute
|
||||
if self._get_whole_year == "True":
|
||||
whole_year_dates = []
|
||||
for waste_data in waste_array:
|
||||
if self.type in waste_data:
|
||||
whole_year_dates.append(
|
||||
datetime.strptime(waste_data[self.type], "%Y-%m-%d").date()
|
||||
)
|
||||
|
||||
self._whole_year_dates = whole_year_dates
|
||||
|
||||
try:
|
||||
if waste_array:
|
||||
for waste_data in waste_array:
|
||||
if self.type in waste_data:
|
||||
collection_date = datetime.strptime(
|
||||
waste_data[self.type], "%Y-%m-%d"
|
||||
).date()
|
||||
|
||||
# Date in date format "%Y-%m-%d"
|
||||
self._year_month_day_date = str(collection_date)
|
||||
|
||||
if collection_date:
|
||||
# Set the values of the sensor
|
||||
self._last_update = datetime.today().strftime(
|
||||
"%d-%m-%Y %H:%M"
|
||||
)
|
||||
|
||||
# Is the collection date today?
|
||||
self._is_collection_date_today = (
|
||||
date.today() == collection_date
|
||||
)
|
||||
|
||||
if (
|
||||
self.type == "restafval"
|
||||
and "restafvaldiftardate" in waste_data
|
||||
):
|
||||
self._last_collection_date = str(
|
||||
datetime.strptime(
|
||||
waste_data["restafvaldiftardate"], "%Y-%m-%d"
|
||||
).date()
|
||||
)
|
||||
self._total_collections_this_year = waste_data[
|
||||
"restafvaldiftarcollections"
|
||||
]
|
||||
|
||||
# Days until collection date
|
||||
delta = collection_date - date.today()
|
||||
self._days_until_collection_date = delta.days
|
||||
|
||||
# Only show the value if the date is lesser than or equal to (today + timespan_in_days)
|
||||
if collection_date <= date.today() + relativedelta(
|
||||
days=int(self.timespan_in_days)
|
||||
):
|
||||
# if the date does not contain a named day or month, return the date as normal
|
||||
if (
|
||||
self.date_format.find("a") == -1
|
||||
and self.date_format.find("A") == -1
|
||||
and self.date_format.find("b") == -1
|
||||
and self.date_format.find("B") == -1
|
||||
):
|
||||
self._state = collection_date.strftime(
|
||||
self.date_format
|
||||
)
|
||||
# else convert the named values to the locale names
|
||||
else:
|
||||
edited_date_format = self.date_format.replace(
|
||||
"%a", "EEE"
|
||||
)
|
||||
edited_date_format = edited_date_format.replace(
|
||||
"%A", "EEEE"
|
||||
)
|
||||
edited_date_format = edited_date_format.replace(
|
||||
"%b", "MMM"
|
||||
)
|
||||
edited_date_format = edited_date_format.replace(
|
||||
"%B", "MMMM"
|
||||
)
|
||||
|
||||
# half babel, half date string... something like EEEE 04-MMMM-2020
|
||||
half_babel_half_date = collection_date.strftime(
|
||||
edited_date_format
|
||||
)
|
||||
|
||||
# replace the digits with qquoted digits 01 --> '01'
|
||||
half_babel_half_date = re.sub(
|
||||
r"(\d+)", r"'\1'", half_babel_half_date
|
||||
)
|
||||
# transform the EEE, EEEE etc... to a real locale date, with babel
|
||||
locale_date = format_date(
|
||||
collection_date,
|
||||
half_babel_half_date,
|
||||
locale=self.locale,
|
||||
)
|
||||
|
||||
self._state = locale_date
|
||||
break # we have a result, break the loop
|
||||
else:
|
||||
self._hidden = True
|
||||
else:
|
||||
# collection_date empty
|
||||
raise ValueError()
|
||||
# else:
|
||||
# No matching result data for current waste type, no problem
|
||||
else:
|
||||
raise ValueError()
|
||||
except ValueError:
|
||||
self._error = True
|
||||
# self._state = None
|
||||
# self._hidden = True
|
||||
# self._days_until_collection_date = None
|
||||
# self._year_month_day_date = None
|
||||
# self._is_collection_date_today = False
|
||||
# self._last_collection_date = None
|
||||
# self._total_collections_this_year = None
|
||||
# self._whole_year_dates = None
|
||||
self._last_update = datetime.today().strftime("%d-%m-%Y %H:%M")
|
||||
@@ -1,85 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
from datetime import datetime, date, timedelta
|
||||
from .const.const import (
|
||||
_LOGGER,
|
||||
ATTR_LAST_UPDATE,
|
||||
ATTR_FRIENDLY_NAME,
|
||||
ATTR_YEAR_MONTH_DAY_DATE,
|
||||
SENSOR_TYPES,
|
||||
SENSOR_PREFIX,
|
||||
)
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
|
||||
class AfvalInfoTodaySensor(Entity):
|
||||
def __init__(
|
||||
self, data, sensor_type, sensor_friendly_name, entities, id_name, no_trash_text
|
||||
):
|
||||
self.data = data
|
||||
self.type = sensor_type
|
||||
self.friendly_name = sensor_friendly_name
|
||||
self._last_update = None
|
||||
self._name = sensor_friendly_name
|
||||
self.entity_id = "sensor." + (
|
||||
(
|
||||
SENSOR_PREFIX
|
||||
+ (id_name + " " if len(id_name) > 0 else "")
|
||||
+ sensor_friendly_name
|
||||
)
|
||||
.lower()
|
||||
.replace(" ", "_")
|
||||
)
|
||||
self._attr_unique_id = (
|
||||
SENSOR_PREFIX
|
||||
+ (id_name + " " if len(id_name) > 0 else "")
|
||||
+ sensor_friendly_name
|
||||
)
|
||||
self._no_trash_text = no_trash_text
|
||||
self._state = None
|
||||
self._icon = SENSOR_TYPES[sensor_type][1]
|
||||
self._entities = entities
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def icon(self):
|
||||
return self._icon
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
return self._state
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
return {ATTR_LAST_UPDATE: self._last_update}
|
||||
|
||||
@Throttle(timedelta(minutes=1))
|
||||
def update(self):
|
||||
self.data.update()
|
||||
self._last_update = datetime.today().strftime("%d-%m-%Y %H:%M")
|
||||
# use a tempState to change the real state only on a change...
|
||||
tempState = self._no_trash_text
|
||||
numberOfMatches = 0
|
||||
today = str(date.today().strftime("%Y-%m-%d"))
|
||||
for entity in self._entities:
|
||||
if entity.extra_state_attributes.get(ATTR_YEAR_MONTH_DAY_DATE) == today:
|
||||
# reset tempState to empty string
|
||||
if numberOfMatches == 0:
|
||||
tempState = ""
|
||||
numberOfMatches = numberOfMatches + 1
|
||||
# add trash friendly name or if no friendly name is provided, trash type to string
|
||||
tempState = (
|
||||
(
|
||||
tempState
|
||||
+ ", "
|
||||
+ entity.extra_state_attributes.get(ATTR_FRIENDLY_NAME)
|
||||
)
|
||||
).strip()
|
||||
if tempState.startswith(", "):
|
||||
tempState = tempState[2:]
|
||||
# only change state if the new state is different than the last state
|
||||
if tempState != self._state:
|
||||
self._state = tempState
|
||||
@@ -1,85 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
from datetime import datetime, date, timedelta
|
||||
from .const.const import (
|
||||
_LOGGER,
|
||||
ATTR_LAST_UPDATE,
|
||||
ATTR_FRIENDLY_NAME,
|
||||
ATTR_YEAR_MONTH_DAY_DATE,
|
||||
SENSOR_TYPES,
|
||||
SENSOR_PREFIX,
|
||||
)
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
|
||||
class AfvalInfoTomorrowSensor(Entity):
|
||||
def __init__(
|
||||
self, data, sensor_type, sensor_friendly_name, entities, id_name, no_trash_text
|
||||
):
|
||||
self.data = data
|
||||
self.type = sensor_type
|
||||
self.friendly_name = sensor_friendly_name
|
||||
self._last_update = None
|
||||
self._name = sensor_friendly_name
|
||||
self.entity_id = "sensor." + (
|
||||
(
|
||||
SENSOR_PREFIX
|
||||
+ (id_name + " " if len(id_name) > 0 else "")
|
||||
+ sensor_friendly_name
|
||||
)
|
||||
.lower()
|
||||
.replace(" ", "_")
|
||||
)
|
||||
self._attr_unique_id = (
|
||||
SENSOR_PREFIX
|
||||
+ (id_name + " " if len(id_name) > 0 else "")
|
||||
+ sensor_friendly_name
|
||||
)
|
||||
self._no_trash_text = no_trash_text
|
||||
self._state = None
|
||||
self._icon = SENSOR_TYPES[sensor_type][1]
|
||||
self._entities = entities
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def icon(self):
|
||||
return self._icon
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
return self._state
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
return {ATTR_LAST_UPDATE: self._last_update}
|
||||
|
||||
@Throttle(timedelta(minutes=1))
|
||||
def update(self):
|
||||
self.data.update()
|
||||
self._last_update = datetime.today().strftime("%d-%m-%Y %H:%M")
|
||||
# use a tempState to change the real state only on a change...
|
||||
tempState = self._no_trash_text
|
||||
numberOfMatches = 0
|
||||
tomorrow = str((date.today() + timedelta(days=1)).strftime("%Y-%m-%d"))
|
||||
for entity in self._entities:
|
||||
if entity.extra_state_attributes.get(ATTR_YEAR_MONTH_DAY_DATE) == tomorrow:
|
||||
# reset tempState to empty string
|
||||
if numberOfMatches == 0:
|
||||
tempState = ""
|
||||
numberOfMatches = numberOfMatches + 1
|
||||
# add trash name to string
|
||||
tempState = (
|
||||
(
|
||||
tempState
|
||||
+ ", "
|
||||
+ entity.extra_state_attributes.get(ATTR_FRIENDLY_NAME)
|
||||
)
|
||||
).strip()
|
||||
if tempState.startswith(", "):
|
||||
tempState = tempState[2:]
|
||||
# only change state if the new state is different than the last state
|
||||
if tempState != self._state:
|
||||
self._state = tempState
|
||||
@@ -1,57 +0,0 @@
|
||||
from datetime import datetime
|
||||
import re
|
||||
|
||||
import requests
|
||||
|
||||
from ..common.main_functions import _waste_type_rename
|
||||
from ..const.const import _LOGGER, SENSOR_COLLECTORS_DEAFVALAPP
|
||||
|
||||
|
||||
def get_waste_data_raw(
|
||||
provider,
|
||||
postal_code,
|
||||
street_number,
|
||||
suffix,
|
||||
):
|
||||
if provider not in SENSOR_COLLECTORS_DEAFVALAPP.keys():
|
||||
raise ValueError(f"Invalid provider: {provider}, please verify")
|
||||
|
||||
corrected_postal_code_parts = re.search(r"(\d\d\d\d) ?([A-z][A-z])", postal_code)
|
||||
corrected_postal_code = (
|
||||
corrected_postal_code_parts[1] + corrected_postal_code_parts[2].upper()
|
||||
)
|
||||
|
||||
try:
|
||||
url = SENSOR_COLLECTORS_DEAFVALAPP[provider].format(
|
||||
corrected_postal_code,
|
||||
street_number,
|
||||
suffix,
|
||||
)
|
||||
raw_response = requests.get(url)
|
||||
except requests.exceptions.RequestException as err:
|
||||
raise ValueError(err) from err
|
||||
|
||||
try:
|
||||
response = raw_response.text
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Invalid and/or no data received from {url}") from e
|
||||
|
||||
if not response:
|
||||
_LOGGER.error("No waste data found!")
|
||||
return
|
||||
|
||||
waste_data_raw = []
|
||||
|
||||
for rows in response.strip().split("\n"):
|
||||
for ophaaldatum in rows.split(";")[1:-1]:
|
||||
temp = {"type": _waste_type_rename(rows.split(";")[0].strip().lower())}
|
||||
temp["date"] = datetime.strptime(ophaaldatum, "%d-%m-%Y").strftime(
|
||||
"%Y-%m-%d"
|
||||
)
|
||||
waste_data_raw.append(temp)
|
||||
|
||||
return waste_data_raw
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("Yell something at a mountain!")
|
||||
@@ -1,71 +0,0 @@
|
||||
from datetime import datetime
|
||||
import re
|
||||
|
||||
import requests
|
||||
|
||||
from ..common.main_functions import _waste_type_rename
|
||||
from ..const.const import _LOGGER, SENSOR_COLLECTORS_ICALENDAR
|
||||
|
||||
|
||||
def get_waste_data_raw(
|
||||
provider,
|
||||
postal_code,
|
||||
street_number,
|
||||
suffix,
|
||||
):
|
||||
if provider not in SENSOR_COLLECTORS_ICALENDAR.keys():
|
||||
raise ValueError(f"Invalid provider: {provider}, please verify")
|
||||
|
||||
DATE_PATTERN = re.compile(r"^\d{8}")
|
||||
|
||||
try:
|
||||
url = SENSOR_COLLECTORS_ICALENDAR[provider].format(
|
||||
provider,
|
||||
postal_code,
|
||||
street_number,
|
||||
suffix,
|
||||
datetime.now().strftime("%Y-%m-%d"),
|
||||
)
|
||||
raw_response = requests.get(url)
|
||||
except requests.exceptions.RequestException as err:
|
||||
raise ValueError(err) from err
|
||||
|
||||
try:
|
||||
response = raw_response.text
|
||||
except ValueError as exc:
|
||||
raise ValueError(f"Invalid and/or no data received from {url}") from exc
|
||||
|
||||
if not response:
|
||||
_LOGGER.error("No waste data found!")
|
||||
return
|
||||
|
||||
waste_data_raw = []
|
||||
date = None
|
||||
type = None
|
||||
|
||||
for line in response.splitlines():
|
||||
key, value = line.split(":", 2)
|
||||
field = key.split(";")[0]
|
||||
if field == "BEGIN" and value == "VEVENT":
|
||||
date = None
|
||||
type = None
|
||||
elif field == "SUMMARY":
|
||||
type = value.strip().lower()
|
||||
elif field == "DTSTART":
|
||||
if DATE_PATTERN.match(value):
|
||||
date = f"{value[:4]}-{value[4:6]}-{value[6:8]}"
|
||||
else:
|
||||
_LOGGER.debug(f"Unsupported date format: {value}")
|
||||
elif field == "END" and value == "VEVENT":
|
||||
if date and type:
|
||||
waste_data_raw.append({"type": type, "date": date})
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
f"No date or type extracted from event: date={date}, type={type}"
|
||||
)
|
||||
|
||||
return waste_data_raw
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("Yell something at a mountain!")
|
||||
@@ -1,122 +0,0 @@
|
||||
from ..common.waste_data_transformer import WasteDataTransformer
|
||||
from ..const.const import (
|
||||
_LOGGER,
|
||||
SENSOR_COLLECTORS_AFVALWIJZER,
|
||||
SENSOR_COLLECTORS_DEAFVALAPP,
|
||||
SENSOR_COLLECTORS_ICALENDAR,
|
||||
SENSOR_COLLECTORS_OPZET,
|
||||
SENSOR_COLLECTORS_RD4,
|
||||
SENSOR_COLLECTORS_XIMMIO,
|
||||
)
|
||||
|
||||
try:
|
||||
from . import deafvalapp, icalendar, mijnafvalwijzer, opzet, rd4, ximmio
|
||||
except ImportError as err:
|
||||
_LOGGER.error(f"Import error {err.args}")
|
||||
|
||||
|
||||
class MainCollector(object):
|
||||
def __init__(
|
||||
self,
|
||||
provider,
|
||||
postal_code,
|
||||
street_number,
|
||||
suffix,
|
||||
exclude_pickup_today,
|
||||
exclude_list,
|
||||
default_label,
|
||||
):
|
||||
self.provider = provider.strip().lower()
|
||||
self.postal_code = postal_code.strip().upper()
|
||||
self.street_number = street_number.strip()
|
||||
self.suffix = suffix.strip().lower()
|
||||
self.exclude_pickup_today = exclude_pickup_today.strip()
|
||||
self.exclude_list = exclude_list.strip().lower()
|
||||
self.default_label = default_label.strip()
|
||||
|
||||
try:
|
||||
if provider in SENSOR_COLLECTORS_AFVALWIJZER:
|
||||
waste_data_raw = mijnafvalwijzer.get_waste_data_raw(
|
||||
self.provider,
|
||||
self.postal_code,
|
||||
self.street_number,
|
||||
self.suffix,
|
||||
)
|
||||
elif provider in SENSOR_COLLECTORS_DEAFVALAPP.keys():
|
||||
waste_data_raw = deafvalapp.get_waste_data_raw(
|
||||
self.provider,
|
||||
self.postal_code,
|
||||
self.street_number,
|
||||
self.suffix,
|
||||
)
|
||||
elif provider in SENSOR_COLLECTORS_ICALENDAR.keys():
|
||||
waste_data_raw = icalendar.get_waste_data_raw(
|
||||
self.provider,
|
||||
self.postal_code,
|
||||
self.street_number,
|
||||
self.suffix,
|
||||
)
|
||||
elif provider in SENSOR_COLLECTORS_OPZET.keys():
|
||||
waste_data_raw = opzet.get_waste_data_raw(
|
||||
self.provider,
|
||||
self.postal_code,
|
||||
self.street_number,
|
||||
self.suffix,
|
||||
)
|
||||
elif provider in SENSOR_COLLECTORS_RD4.keys():
|
||||
waste_data_raw = rd4.get_waste_data_raw(
|
||||
self.provider,
|
||||
self.postal_code,
|
||||
self.street_number,
|
||||
self.suffix,
|
||||
)
|
||||
elif provider in SENSOR_COLLECTORS_XIMMIO.keys():
|
||||
waste_data_raw = ximmio.get_waste_data_raw(
|
||||
self.provider,
|
||||
self.postal_code,
|
||||
self.street_number,
|
||||
self.suffix,
|
||||
)
|
||||
else:
|
||||
_LOGGER.error(f"Unknown provider: {provider}")
|
||||
return False
|
||||
|
||||
except ValueError as err:
|
||||
_LOGGER.error(f"Check afvalwijzer platform settings {err.args}")
|
||||
|
||||
##########################################################################
|
||||
# COMMON CODE
|
||||
##########################################################################
|
||||
self._waste_data = WasteDataTransformer(
|
||||
waste_data_raw,
|
||||
self.exclude_pickup_today,
|
||||
self.exclude_list,
|
||||
self.default_label,
|
||||
)
|
||||
|
||||
##########################################################################
|
||||
# PROPERTIES FOR EXECUTION
|
||||
##########################################################################
|
||||
@property
|
||||
def waste_data_with_today(self):
|
||||
return self._waste_data.waste_data_with_today
|
||||
|
||||
@property
|
||||
def waste_data_without_today(self):
|
||||
return self._waste_data.waste_data_without_today
|
||||
|
||||
@property
|
||||
def waste_data_provider(self):
|
||||
return self._waste_data.waste_data_provider
|
||||
|
||||
@property
|
||||
def waste_types_provider(self):
|
||||
return self._waste_data.waste_types_provider
|
||||
|
||||
@property
|
||||
def waste_data_custom(self):
|
||||
return self._waste_data.waste_data_custom
|
||||
|
||||
@property
|
||||
def waste_types_custom(self):
|
||||
return self._waste_data.waste_types_custom
|
||||
@@ -1,58 +0,0 @@
|
||||
from datetime import datetime
|
||||
|
||||
import requests
|
||||
|
||||
from ..common.main_functions import _waste_type_rename
|
||||
from ..const.const import (
|
||||
_LOGGER,
|
||||
SENSOR_COLLECTOR_TO_URL,
|
||||
SENSOR_COLLECTORS_AFVALWIJZER,
|
||||
)
|
||||
|
||||
|
||||
def get_waste_data_raw(
|
||||
provider,
|
||||
postal_code,
|
||||
street_number,
|
||||
suffix,
|
||||
):
|
||||
if provider not in SENSOR_COLLECTORS_AFVALWIJZER:
|
||||
raise ValueError(f"Invalid provider: {provider}, please verify")
|
||||
|
||||
if provider == "rova":
|
||||
provider = "inzamelkalender.rova"
|
||||
|
||||
try:
|
||||
url = SENSOR_COLLECTOR_TO_URL["afvalwijzer_data_default"][0].format(
|
||||
provider,
|
||||
postal_code,
|
||||
street_number,
|
||||
suffix,
|
||||
datetime.now().strftime("%Y-%m-%d"),
|
||||
)
|
||||
|
||||
raw_response = requests.get(url)
|
||||
except requests.exceptions.RequestException as err:
|
||||
raise ValueError(err) from err
|
||||
|
||||
try:
|
||||
response = raw_response.json()
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Invalid and/or no data received from {url}") from e
|
||||
|
||||
if not response:
|
||||
_LOGGER.error("Address not found!")
|
||||
return
|
||||
|
||||
try:
|
||||
waste_data_raw = (
|
||||
response["ophaaldagen"]["data"] + response["ophaaldagenNext"]["data"]
|
||||
)
|
||||
except KeyError as exc:
|
||||
raise KeyError(f"Invalid and/or no data received from {url}") from exc
|
||||
|
||||
return waste_data_raw
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("Yell something at a mountain!")
|
||||
@@ -1,70 +0,0 @@
|
||||
from datetime import datetime
|
||||
|
||||
import requests
|
||||
|
||||
from ..common.main_functions import _waste_type_rename
|
||||
from ..const.const import _LOGGER, SENSOR_COLLECTORS_OPZET
|
||||
|
||||
|
||||
def get_waste_data_raw(
|
||||
provider,
|
||||
postal_code,
|
||||
street_number,
|
||||
suffix,
|
||||
):
|
||||
if provider not in SENSOR_COLLECTORS_OPZET.keys():
|
||||
raise ValueError(f"Invalid provider: {provider}, please verify")
|
||||
|
||||
try:
|
||||
bag_id = None
|
||||
suffix = suffix.strip().upper()
|
||||
_verify = provider != "suez"
|
||||
url = f"{SENSOR_COLLECTORS_OPZET[provider]}/rest/adressen/{postal_code}-{street_number}"
|
||||
raw_response = requests.get(url, verify=_verify)
|
||||
except requests.exceptions.RequestException as err:
|
||||
raise ValueError(err) from err
|
||||
|
||||
try:
|
||||
response = raw_response.json()
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Invalid and/or no data received from {url}") from e
|
||||
|
||||
if not response:
|
||||
_LOGGER.error("No waste data found!")
|
||||
return
|
||||
|
||||
try:
|
||||
if len(response) > 1 and suffix:
|
||||
for item in response:
|
||||
if (
|
||||
item["huisletter"] == suffix
|
||||
or item["huisnummerToevoeging"] == suffix
|
||||
):
|
||||
bag_id = item["bagId"]
|
||||
break
|
||||
else:
|
||||
bag_id = response[0]["bagId"]
|
||||
|
||||
url = f"{SENSOR_COLLECTORS_OPZET[provider]}/rest/adressen/{bag_id}/afvalstromen"
|
||||
waste_data_raw_temp = requests.get(url, verify=_verify).json()
|
||||
waste_data_raw = []
|
||||
|
||||
for item in waste_data_raw_temp:
|
||||
if not item["ophaaldatum"]:
|
||||
continue
|
||||
waste_type = item["menu_title"]
|
||||
if not waste_type:
|
||||
continue
|
||||
temp = {"type": _waste_type_rename(item["menu_title"].strip().lower())}
|
||||
temp["date"] = datetime.strptime(item["ophaaldatum"], "%Y-%m-%d").strftime(
|
||||
"%Y-%m-%d"
|
||||
)
|
||||
waste_data_raw.append(temp)
|
||||
except ValueError as exc:
|
||||
raise ValueError(f"Invalid and/or no data received from {url}") from exc
|
||||
|
||||
return waste_data_raw
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("Yell something at a mountain!")
|
||||
@@ -1,74 +0,0 @@
|
||||
from datetime import datetime
|
||||
import re
|
||||
|
||||
import requests
|
||||
|
||||
from ..common.main_functions import _waste_type_rename
|
||||
from ..const.const import _LOGGER, SENSOR_COLLECTORS_RD4
|
||||
|
||||
|
||||
def get_waste_data_raw(
|
||||
provider,
|
||||
postal_code,
|
||||
street_number,
|
||||
suffix,
|
||||
):
|
||||
if provider not in SENSOR_COLLECTORS_RD4.keys():
|
||||
raise ValueError(f"Invalid provider: {provider}, please verify")
|
||||
|
||||
TODAY = datetime.now()
|
||||
YEAR_CURRENT = TODAY.year
|
||||
|
||||
corrected_postal_code_parts = re.search(r"(\d\d\d\d) ?([A-z][A-z])", postal_code)
|
||||
corrected_postal_code = (
|
||||
f"{corrected_postal_code_parts[1]}+{corrected_postal_code_parts[2].upper()}"
|
||||
)
|
||||
|
||||
try:
|
||||
url = SENSOR_COLLECTORS_RD4[provider].format(
|
||||
corrected_postal_code,
|
||||
street_number,
|
||||
suffix,
|
||||
YEAR_CURRENT,
|
||||
)
|
||||
raw_response = requests.get(url)
|
||||
except requests.exceptions.RequestException as err:
|
||||
raise ValueError(err) from err
|
||||
|
||||
try:
|
||||
response = raw_response.json()
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Invalid and/or no data received from {url}") from e
|
||||
|
||||
if not response:
|
||||
_LOGGER.error("No waste data found!")
|
||||
return
|
||||
|
||||
if not response["success"]:
|
||||
_LOGGER.error("Address not found!")
|
||||
return
|
||||
|
||||
try:
|
||||
waste_data_raw_temp = response["data"]["items"][0]
|
||||
except KeyError as exc:
|
||||
raise KeyError(f"Invalid and/or no data received from {url}") from exc
|
||||
|
||||
waste_data_raw = []
|
||||
|
||||
for item in waste_data_raw_temp:
|
||||
if not item["date"]:
|
||||
continue
|
||||
|
||||
waste_type = item["type"]
|
||||
if not waste_type:
|
||||
continue
|
||||
|
||||
temp = {"type": _waste_type_rename(item["type"].strip().lower())}
|
||||
temp["date"] = datetime.strptime(item["date"], "%Y-%m-%d").strftime("%Y-%m-%d")
|
||||
waste_data_raw.append(temp)
|
||||
|
||||
return waste_data_raw
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("Yell something at a mountain!")
|
||||
@@ -1,83 +0,0 @@
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
import requests
|
||||
|
||||
from ..common.main_functions import _waste_type_rename
|
||||
from ..const.const import _LOGGER, SENSOR_COLLECTOR_TO_URL, SENSOR_COLLECTORS_XIMMIO
|
||||
|
||||
|
||||
def get_waste_data_raw(
|
||||
provider,
|
||||
postal_code,
|
||||
street_number,
|
||||
suffix,
|
||||
):
|
||||
if provider not in SENSOR_COLLECTORS_XIMMIO.keys():
|
||||
raise ValueError(f"Invalid provider: {provider}, please verify")
|
||||
|
||||
collectors = ("avalex", "meerlanden", "rad", "westland")
|
||||
provider_url = "ximmio02" if provider in collectors else "ximmio01"
|
||||
|
||||
TODAY = datetime.now().strftime("%d-%m-%Y")
|
||||
DATE_TODAY = datetime.strptime(TODAY, "%d-%m-%Y")
|
||||
DATE_TOMORROW = datetime.strptime(TODAY, "%d-%m-%Y") + timedelta(days=1)
|
||||
DATE_TODAY_NEXT_YEAR = (DATE_TODAY.date() + timedelta(days=365)).strftime(
|
||||
"%Y-%m-%d"
|
||||
)
|
||||
|
||||
##########################################################################
|
||||
# First request: get uniqueId and community
|
||||
##########################################################################
|
||||
try:
|
||||
url = SENSOR_COLLECTOR_TO_URL[provider_url][0]
|
||||
companyCode = SENSOR_COLLECTORS_XIMMIO[provider]
|
||||
data = {
|
||||
"postCode": postal_code,
|
||||
"houseNumber": street_number,
|
||||
"companyCode": companyCode,
|
||||
}
|
||||
raw_response = requests.post(url=url, data=data)
|
||||
uniqueId = raw_response.json()["dataList"][0]["UniqueId"]
|
||||
community = raw_response.json()["dataList"][0]["Community"]
|
||||
except requests.exceptions.RequestException as err:
|
||||
raise ValueError(err) from err
|
||||
|
||||
##########################################################################
|
||||
# Second request: get the dates
|
||||
##########################################################################
|
||||
try:
|
||||
url = SENSOR_COLLECTOR_TO_URL[provider_url][1]
|
||||
data = {
|
||||
"companyCode": companyCode,
|
||||
"startDate": DATE_TODAY.date(),
|
||||
"endDate": DATE_TODAY_NEXT_YEAR,
|
||||
"community": community,
|
||||
"uniqueAddressID": uniqueId,
|
||||
}
|
||||
raw_response = requests.post(url=url, data=data).json()
|
||||
except requests.exceptions.RequestException as err:
|
||||
raise ValueError(err) from err
|
||||
|
||||
if not raw_response:
|
||||
_LOGGER.error("Address not found!")
|
||||
return
|
||||
|
||||
try:
|
||||
response = raw_response["dataList"]
|
||||
except KeyError as e:
|
||||
raise KeyError(f"Invalid and/or no data received from {url}") from e
|
||||
|
||||
waste_data_raw = []
|
||||
|
||||
for item in response:
|
||||
temp = {"type": _waste_type_rename(item["_pickupTypeText"].strip().lower())}
|
||||
temp["date"] = datetime.strptime(
|
||||
sorted(item["pickupDates"])[0], "%Y-%m-%dT%H:%M:%S"
|
||||
).strftime("%Y-%m-%d")
|
||||
waste_data_raw.append(temp)
|
||||
|
||||
return waste_data_raw
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("Yell something at a mountain!")
|
||||
@@ -1,66 +0,0 @@
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from ..const.const import _LOGGER
|
||||
|
||||
|
||||
class DaySensorData(object):
|
||||
|
||||
##########################################################################
|
||||
# INIT
|
||||
##########################################################################
|
||||
def __init__(
|
||||
self,
|
||||
waste_data_formatted,
|
||||
default_label,
|
||||
):
|
||||
TODAY = datetime.now().strftime("%d-%m-%Y")
|
||||
|
||||
self.waste_data_formatted = sorted(
|
||||
waste_data_formatted, key=lambda d: d["date"]
|
||||
)
|
||||
self.today_date = datetime.strptime(TODAY, "%d-%m-%Y")
|
||||
self.tomorrow_date = datetime.strptime(TODAY, "%d-%m-%Y") + timedelta(days=1)
|
||||
self.day_after_tomorrow_date = datetime.strptime(TODAY, "%d-%m-%Y") + timedelta(
|
||||
days=2
|
||||
)
|
||||
self.default_label = default_label
|
||||
|
||||
self.waste_data_today = self.__gen_day_sensor(self.today_date)
|
||||
self.waste_data_tomorrow = self.__gen_day_sensor(self.tomorrow_date)
|
||||
self.waste_data_dot = self.__gen_day_sensor(self.day_after_tomorrow_date)
|
||||
|
||||
self.data = self._gen_day_sensor_data()
|
||||
|
||||
##########################################################################
|
||||
# GENERATE TODAY, TOMORROW, DOT SENSOR(S)
|
||||
##########################################################################
|
||||
|
||||
# Generate sensor data per date
|
||||
def __gen_day_sensor(self, date):
|
||||
day = []
|
||||
try:
|
||||
for waste in self.waste_data_formatted:
|
||||
item_date = waste["date"]
|
||||
if item_date == date:
|
||||
item_name = waste["type"]
|
||||
day.append(item_name)
|
||||
if not day:
|
||||
day.append(self.default_label)
|
||||
except Exception as err:
|
||||
_LOGGER.error(f"Other error occurred __gen_day_sensor: {err}")
|
||||
return day
|
||||
|
||||
# Generate sensor data for today, tomorrow, day after tomorrow
|
||||
def _gen_day_sensor_data(self):
|
||||
day_sensor = {}
|
||||
try:
|
||||
day_sensor["today"] = ", ".join(self.waste_data_today)
|
||||
day_sensor["tomorrow"] = ", ".join(self.waste_data_tomorrow)
|
||||
day_sensor["day_after_tomorrow"] = ", ".join(self.waste_data_dot)
|
||||
except Exception as err:
|
||||
_LOGGER.error(f"Other error occurred _gen_day_sensor_data: {err}")
|
||||
return day_sensor
|
||||
|
||||
@property
|
||||
def day_sensor_data(self):
|
||||
return self.data
|
||||
@@ -1,80 +0,0 @@
|
||||
def _waste_type_rename(item_name):
|
||||
# DEAFVALAPP
|
||||
if item_name == "gemengde plastics":
|
||||
item_name = "plastic"
|
||||
if item_name == "zak_blauw":
|
||||
item_name = "restafval"
|
||||
if item_name == "pbp":
|
||||
item_name = "pmd"
|
||||
if item_name == "rest":
|
||||
item_name = "restafval"
|
||||
if item_name == "kerstboom":
|
||||
item_name = "kerstbomen"
|
||||
# OPZET
|
||||
if item_name == "snoeiafval":
|
||||
item_name = "takken"
|
||||
if item_name == "sloop":
|
||||
item_name = "grofvuil"
|
||||
if item_name == "groente":
|
||||
item_name = "gft"
|
||||
if item_name == "groente-, fruit en tuinafval":
|
||||
item_name = "gft"
|
||||
if item_name == "groente, fruit- en tuinafval":
|
||||
item_name = "gft"
|
||||
if item_name == "kca":
|
||||
item_name = "chemisch"
|
||||
if item_name == "tariefzak restafval":
|
||||
item_name = "restafvalzakken"
|
||||
if item_name == "restafvalzakken":
|
||||
item_name = "restafvalzakken"
|
||||
if item_name == "rest":
|
||||
item_name = "restafval"
|
||||
if item_name == "plastic, blik & drinkpakken overbetuwe":
|
||||
item_name = "pmd"
|
||||
if item_name == "papier en karton":
|
||||
item_name = "papier"
|
||||
if item_name == "kerstb":
|
||||
item_name = "kerstboom"
|
||||
# RD4
|
||||
if item_name == "pruning":
|
||||
item_name = "takken"
|
||||
if item_name == "residual_waste":
|
||||
item_name = "restafval"
|
||||
if item_name == "best_bag":
|
||||
item_name = "best-tas"
|
||||
if item_name == "paper":
|
||||
item_name = "papier"
|
||||
if item_name == "christmas_trees":
|
||||
item_name = "kerstbomen"
|
||||
# XIMMIO
|
||||
if item_name == "branches":
|
||||
item_name = "takken"
|
||||
if item_name == "bulklitter":
|
||||
item_name = "grofvuil"
|
||||
if item_name == "bulkygardenwaste":
|
||||
item_name = "tuinafval"
|
||||
if item_name == "glass":
|
||||
item_name = "glas"
|
||||
if item_name == "green":
|
||||
item_name = "gft"
|
||||
if item_name == "grey":
|
||||
item_name = "restafval"
|
||||
if item_name == "kca":
|
||||
item_name = "chemisch"
|
||||
if item_name == "plastic":
|
||||
item_name = "plastic"
|
||||
if item_name == "packages":
|
||||
item_name = "pmd"
|
||||
if item_name == "paper":
|
||||
item_name = "papier"
|
||||
if item_name == "remainder":
|
||||
item_name = "restwagen"
|
||||
if item_name == "textile":
|
||||
item_name = "textiel"
|
||||
if item_name == "tree":
|
||||
item_name = "kerstbomen"
|
||||
return item_name
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("Yell something at a mountain!")
|
||||
@@ -1,76 +0,0 @@
|
||||
from datetime import datetime
|
||||
|
||||
from ..const.const import _LOGGER
|
||||
|
||||
|
||||
class NextSensorData(object):
|
||||
|
||||
##########################################################################
|
||||
# INIT
|
||||
##########################################################################
|
||||
def __init__(self, waste_data_after_date_selected, default_label):
|
||||
self.waste_data_after_date_selected = sorted(
|
||||
waste_data_after_date_selected, key=lambda d: d["date"]
|
||||
)
|
||||
|
||||
TODAY = datetime.now().strftime("%d-%m-%Y")
|
||||
self.today_date = datetime.strptime(TODAY, "%d-%m-%Y")
|
||||
self.default_label = default_label
|
||||
|
||||
self.next_waste_date = self.__get_next_waste_date()
|
||||
self.next_waste_in_days = self.__get_next_waste_in_days()
|
||||
self.next_waste_type = self.__get_next_waste_type()
|
||||
|
||||
self.data = self._gen_next_sensor_data()
|
||||
|
||||
##########################################################################
|
||||
# GENERATE NEXT SENSOR(S)
|
||||
##########################################################################
|
||||
|
||||
# Generate sensor next_waste_date
|
||||
def __get_next_waste_date(self):
|
||||
next_waste_date = self.default_label
|
||||
try:
|
||||
next_waste_date = self.waste_data_after_date_selected[0]["date"]
|
||||
except Exception as err:
|
||||
_LOGGER.error(f"Other error occurred _get_next_waste_date: {err}")
|
||||
return next_waste_date
|
||||
|
||||
# Generate sensor next_waste_in_days
|
||||
def __get_next_waste_in_days(self):
|
||||
next_waste_in_days = self.default_label
|
||||
try:
|
||||
next_waste_in_days = abs(self.today_date - self.next_waste_date).days # type: ignore
|
||||
except Exception as err:
|
||||
_LOGGER.error(f"Other error occurred _get_next_waste_in_days: {err}")
|
||||
return next_waste_in_days
|
||||
|
||||
# Generate sensor next_waste_type
|
||||
def __get_next_waste_type(self):
|
||||
next_waste_type = []
|
||||
try:
|
||||
for waste in self.waste_data_after_date_selected:
|
||||
item_date = waste["date"]
|
||||
if item_date == self.next_waste_date:
|
||||
item_name = waste["type"]
|
||||
next_waste_type.append(item_name)
|
||||
if not next_waste_type:
|
||||
next_waste_type.append(self.default_label)
|
||||
except Exception as err:
|
||||
_LOGGER.error(f"Other error occurred _get_next_waste_type: {err}")
|
||||
return next_waste_type
|
||||
|
||||
# Generate sensor data for custom sensors
|
||||
def _gen_next_sensor_data(self):
|
||||
next_sensor = {}
|
||||
try:
|
||||
next_sensor["next_date"] = self.next_waste_date
|
||||
next_sensor["next_in_days"] = self.next_waste_in_days
|
||||
next_sensor["next_type"] = ", ".join(self.next_waste_type)
|
||||
except Exception as err:
|
||||
_LOGGER.error(f"Other error occurred _gen_next_sensor_data: {err}")
|
||||
return next_sensor
|
||||
|
||||
@property
|
||||
def next_sensor_data(self):
|
||||
return self.data
|
||||
@@ -1,181 +0,0 @@
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from ..common.day_sensor_data import DaySensorData
|
||||
from ..common.next_sensor_data import NextSensorData
|
||||
from ..const.const import _LOGGER
|
||||
|
||||
# import sys
|
||||
# def excepthook(type, value, traceback):
|
||||
# _LOGGER.error(value)
|
||||
# sys.excepthook = excepthook
|
||||
|
||||
|
||||
class WasteDataTransformer(object):
|
||||
|
||||
##########################################################################
|
||||
# INIT
|
||||
##########################################################################
|
||||
def __init__(
|
||||
self,
|
||||
waste_data_raw,
|
||||
exclude_pickup_today,
|
||||
exclude_list,
|
||||
default_label,
|
||||
):
|
||||
self.waste_data_raw = waste_data_raw
|
||||
self.exclude_pickup_today = exclude_pickup_today
|
||||
self.exclude_list = exclude_list.strip().lower()
|
||||
self.default_label = default_label
|
||||
|
||||
TODAY = datetime.now().strftime("%d-%m-%Y")
|
||||
self.DATE_TODAY = datetime.strptime(TODAY, "%d-%m-%Y")
|
||||
self.DATE_TOMORROW = datetime.strptime(TODAY, "%d-%m-%Y") + timedelta(days=1)
|
||||
|
||||
(
|
||||
self._waste_data_with_today,
|
||||
self._waste_data_without_today,
|
||||
) = self.__structure_waste_data() # type: ignore
|
||||
|
||||
(
|
||||
self._waste_data_provider,
|
||||
self._waste_types_provider,
|
||||
self._waste_data_custom,
|
||||
self._waste_types_custom,
|
||||
) = self.__gen_sensor_waste_data()
|
||||
|
||||
##########################################################################
|
||||
# STRUCTURE ALL WASTE DATA IN CUSTOM FORMAT
|
||||
#########################################################################
|
||||
def __structure_waste_data(self):
|
||||
try:
|
||||
waste_data_with_today = {}
|
||||
waste_data_without_today = {}
|
||||
|
||||
for item in self.waste_data_raw:
|
||||
item_date = datetime.strptime(item["date"], "%Y-%m-%d")
|
||||
item_name = item["type"].strip().lower()
|
||||
if (
|
||||
item_name not in self.exclude_list
|
||||
and item_name not in waste_data_with_today
|
||||
and item_date >= self.DATE_TODAY
|
||||
):
|
||||
waste_data_with_today[item_name] = item_date
|
||||
|
||||
for item in self.waste_data_raw:
|
||||
item_date = datetime.strptime(item["date"], "%Y-%m-%d")
|
||||
item_name = item["type"].strip().lower()
|
||||
if (
|
||||
item_name not in self.exclude_list
|
||||
and item_name not in waste_data_without_today
|
||||
and item_date > self.DATE_TODAY
|
||||
):
|
||||
waste_data_without_today[item_name] = item_date
|
||||
|
||||
try:
|
||||
for item in self.waste_data_raw:
|
||||
item_name = item["type"].strip().lower()
|
||||
if item_name not in self.exclude_list:
|
||||
if item_name not in waste_data_with_today.keys():
|
||||
waste_data_with_today[item_name] = self.default_label
|
||||
if item_name not in waste_data_without_today.keys():
|
||||
waste_data_without_today[item_name] = self.default_label
|
||||
except Exception as err:
|
||||
_LOGGER.error(f"Other error occurred: {err}")
|
||||
|
||||
return waste_data_with_today, waste_data_without_today
|
||||
except Exception as err:
|
||||
_LOGGER.error(f"Other error occurred: {err}")
|
||||
|
||||
##########################################################################
|
||||
# GENERATE REQUIRED DATA FOR HASS SENSORS
|
||||
##########################################################################
|
||||
def __gen_sensor_waste_data(self):
|
||||
if self.exclude_pickup_today.casefold() in ("false", "no"):
|
||||
date_selected = self.DATE_TODAY
|
||||
waste_data_provider = self._waste_data_with_today
|
||||
else:
|
||||
date_selected = self.DATE_TOMORROW
|
||||
waste_data_provider = self._waste_data_without_today
|
||||
|
||||
try:
|
||||
waste_types_provider = sorted(
|
||||
{
|
||||
waste["type"]
|
||||
for waste in self.waste_data_raw
|
||||
if waste["type"] not in self.exclude_list
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as err:
|
||||
_LOGGER.error(f"Other error occurred waste_types_provider: {err}")
|
||||
|
||||
try:
|
||||
waste_data_formatted = [
|
||||
{
|
||||
"type": waste["type"],
|
||||
"date": datetime.strptime(waste["date"], "%Y-%m-%d"),
|
||||
}
|
||||
for waste in self.waste_data_raw
|
||||
if waste["type"] in waste_types_provider
|
||||
]
|
||||
|
||||
except Exception as err:
|
||||
_LOGGER.error(f"Other error occurred waste_data_formatted: {err}")
|
||||
|
||||
days = DaySensorData(waste_data_formatted, self.default_label)
|
||||
|
||||
try:
|
||||
waste_data_after_date_selected = list(
|
||||
filter(
|
||||
lambda waste: waste["date"] >= date_selected, waste_data_formatted
|
||||
)
|
||||
)
|
||||
except Exception as err:
|
||||
_LOGGER.error(f"Other error occurred waste_data_after_date_selected: {err}")
|
||||
|
||||
next_data = NextSensorData(waste_data_after_date_selected, self.default_label)
|
||||
|
||||
try:
|
||||
waste_data_custom = {**next_data.next_sensor_data, **days.day_sensor_data}
|
||||
except Exception as err:
|
||||
_LOGGER.error(f"Other error occurred waste_data_custom: {err}")
|
||||
|
||||
try:
|
||||
waste_types_custom = list(sorted(waste_data_custom.keys()))
|
||||
except Exception as err:
|
||||
_LOGGER.error(f"Other error occurred waste_types_custom: {err}")
|
||||
|
||||
return (
|
||||
waste_data_provider,
|
||||
waste_types_provider,
|
||||
waste_data_custom,
|
||||
waste_types_custom,
|
||||
)
|
||||
|
||||
##########################################################################
|
||||
# PROPERTIES FOR EXECUTION
|
||||
##########################################################################
|
||||
|
||||
@property
|
||||
def waste_data_with_today(self):
|
||||
return self._waste_data_with_today
|
||||
|
||||
@property
|
||||
def waste_data_without_today(self):
|
||||
return self._waste_data_without_today
|
||||
|
||||
@property
|
||||
def waste_data_provider(self):
|
||||
return self._waste_data_provider
|
||||
|
||||
@property
|
||||
def waste_types_provider(self):
|
||||
return self._waste_types_provider
|
||||
|
||||
@property
|
||||
def waste_data_custom(self):
|
||||
return self._waste_data_custom
|
||||
|
||||
@property
|
||||
def waste_types_custom(self):
|
||||
return self._waste_types_custom
|
||||
@@ -1,131 +0,0 @@
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
API = "api"
|
||||
NAME = "afvalwijzer"
|
||||
VERSION = "2023.01.01"
|
||||
ISSUE_URL = "https://github.com/xirixiz/homeassistant-afvalwijzer/issues"
|
||||
|
||||
SENSOR_COLLECTOR_TO_URL = {
|
||||
"afvalwijzer_data_default": [
|
||||
"https://api.{0}.nl/webservices/appsinput/?apikey=5ef443e778f41c4f75c69459eea6e6ae0c2d92de729aa0fc61653815fbd6a8ca&method=postcodecheck&postcode={1}&street=&huisnummer={2}&toevoeging={3}&app_name=afvalwijzer&platform=web&afvaldata={4}&langs=nl&"
|
||||
],
|
||||
"afvalstoffendienstkalender": [
|
||||
"https://{0}.afvalstoffendienstkalender.nl/nl/{1}/{2}/"
|
||||
],
|
||||
"afvalstoffendienstkalender-s-hertogenbosch": [
|
||||
"https://afvalstoffendienstkalender.nl/nl/{0}/{1}/"
|
||||
],
|
||||
"ximmio01": [
|
||||
"https://wasteapi.ximmio.com/api/FetchAdress",
|
||||
"https://wasteapi.ximmio.com/api/GetCalendar",
|
||||
],
|
||||
"ximmio02": [
|
||||
"https://wasteprod2api.ximmio.com/api/FetchAdress",
|
||||
"https://wasteprod2api.ximmio.com/api/GetCalendar",
|
||||
],
|
||||
}
|
||||
|
||||
SENSOR_COLLECTORS_OPZET = {
|
||||
"alkmaar": "https://www.stadswerk072.nl",
|
||||
"alphenaandenrijn": "https://afvalkalender.alphenaandenrijn.nl",
|
||||
"berkelland": "https://afvalkalender.gemeenteberkelland.nl",
|
||||
"blink": "https://mijnblink.nl",
|
||||
"cranendonck": "https://afvalkalender.cranendonck.nl",
|
||||
"cyclus": "https://afvalkalender.cyclusnv.nl",
|
||||
"dar": "https://afvalkalender.dar.nl",
|
||||
"denhaag": "https://huisvuilkalender.denhaag.nl",
|
||||
"gad": "https://inzamelkalender.gad.nl",
|
||||
"hvc": "https://inzamelkalender.hvcgroep.nl",
|
||||
"lingewaard": "https://afvalwijzer.lingewaard.nl",
|
||||
"middelburg-vlissingen": "https://afvalwijzer.middelburgvlissingen.nl",
|
||||
"montfoort": "https://afvalkalender.cyclusnv.nl",
|
||||
"peelenmaas": "https://afvalkalender.peelenmaas.nl",
|
||||
"prezero": "https://inzamelwijzer.prezero.nl",
|
||||
"purmerend": "https://afvalkalender.purmerend.nl",
|
||||
"rmn": "https://inzamelschema.rmn.nl",
|
||||
"schouwen-duiveland": "https://afvalkalender.schouwen-duiveland.nl",
|
||||
"spaarnelanden": "https://afvalwijzer.spaarnelanden.nl",
|
||||
"sudwestfryslan": "https://afvalkalender.sudwestfryslan.nl",
|
||||
"suez": "https://inzamelwijzer.prezero.nl",
|
||||
"venray": "https://afvalkalender.venray.nl",
|
||||
"voorschoten": "https://afvalkalender.voorschoten.nl",
|
||||
"waalre": "https://afvalkalender.waalre.nl",
|
||||
"zrd": "https://afvalkalender.zrd.nl",
|
||||
}
|
||||
|
||||
SENSOR_COLLECTORS_ICALENDAR = {
|
||||
"eemsdelta": "https://www.eemsdelta.nl/trash-calendar/download/{1}/{2}",
|
||||
}
|
||||
|
||||
SENSOR_COLLECTORS_AFVALWIJZER = [
|
||||
"mijnafvalwijzer",
|
||||
"afvalstoffendienstkalender",
|
||||
"afvalstoffendienstkalender-s-hertogenbosch",
|
||||
"rova",
|
||||
]
|
||||
|
||||
SENSOR_COLLECTORS_XIMMIO = {
|
||||
"acv": "f8e2844a-095e-48f9-9f98-71fceb51d2c3",
|
||||
"almere": "53d8db94-7945-42fd-9742-9bbc71dbe4c1",
|
||||
"areareiniging": "adc418da-d19b-11e5-ab30-625662870761",
|
||||
"avalex": "f7a74ad1-fdbf-4a43-9f91-44644f4d4222",
|
||||
"avri": "78cd4156-394b-413d-8936-d407e334559a",
|
||||
"bar": "bb58e633-de14-4b2a-9941-5bc419f1c4b0",
|
||||
"hellendoorn": "24434f5b-7244-412b-9306-3a2bd1e22bc1",
|
||||
"meerlanden": "800bf8d7-6dd1-4490-ba9d-b419d6dc8a45",
|
||||
"meppel": "b7a594c7-2490-4413-88f9-94749a3ec62a",
|
||||
"rad": "13a2cad9-36d0-4b01-b877-efcb421a864d",
|
||||
"twentemilieu": "8d97bb56-5afd-4cbc-a651-b4f7314264b4",
|
||||
"waardlanden": "942abcf6-3775-400d-ae5d-7380d728b23c",
|
||||
"westland": "6fc75608-126a-4a50-9241-a002ce8c8a6c",
|
||||
"ximmio": "800bf8d7-6dd1-4490-ba9d-b419d6dc8a45",
|
||||
"reinis": "9dc25c8a-175a-4a41-b7a1-83f237a80b77",
|
||||
}
|
||||
|
||||
SENSOR_COLLECTORS_RD4 = {
|
||||
"rd4": "https://data.rd4.nl/api/v1/waste-calendar?postal_code={0}&house_number={1}&house_number_extension={2}&year={3}",
|
||||
}
|
||||
|
||||
SENSOR_COLLECTORS_DEAFVALAPP = {
|
||||
"deafvalapp": "https://dataservice.deafvalapp.nl/dataservice/DataServiceServlet?service=OPHAALSCHEMA&land=NL&postcode={0}&straatId=0&huisnr={1}&huisnrtoev={2}",
|
||||
}
|
||||
|
||||
CONF_COLLECTOR = "provider"
|
||||
CONF_API_TOKEN = "api_token"
|
||||
CONF_POSTAL_CODE = "postal_code"
|
||||
CONF_STREET_NUMBER = "street_number"
|
||||
CONF_SUFFIX = "suffix"
|
||||
CONF_DATE_FORMAT = "date_format"
|
||||
CONF_EXCLUDE_PICKUP_TODAY = "exclude_pickup_today"
|
||||
CONF_DEFAULT_LABEL = "default_label"
|
||||
CONF_ID = "id"
|
||||
CONF_EXCLUDE_LIST = "exclude_list"
|
||||
|
||||
SENSOR_PREFIX = "afvalwijzer "
|
||||
SENSOR_ICON = "mdi:recycle"
|
||||
|
||||
ATTR_LAST_UPDATE = "last_update"
|
||||
ATTR_IS_COLLECTION_DATE_TODAY = "is_collection_date_today"
|
||||
ATTR_IS_COLLECTION_DATE_TOMORROW = "is_collection_date_tomorrow"
|
||||
ATTR_IS_COLLECTION_DATE_DAY_AFTER_TOMORROW = "is_collection_date_day_after_tomorrow"
|
||||
ATTR_DAYS_UNTIL_COLLECTION_DATE = "days_until_collection_date"
|
||||
ATTR_YEAR_MONTH_DAY_DATE = "year_month_day_date"
|
||||
|
||||
MIN_TIME_BETWEEN_UPDATES = timedelta(hours=1)
|
||||
PARALLEL_UPDATES = 1
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
DOMAIN = "afvalwijzer"
|
||||
DOMAIN_DATA = "afvalwijzer_data"
|
||||
|
||||
STARTUP_MESSAGE = f"""
|
||||
-------------------------------------------------------------------,
|
||||
Afvalwijzer - {VERSION},
|
||||
This is a custom integration!,
|
||||
If you have any issues with this you need to open an issue here:,
|
||||
https://github.com/xirixiz/homeassistant-afvalwijzer/issues,
|
||||
-------------------------------------------------------------------,
|
||||
"""
|
||||
@@ -1,14 +0,0 @@
|
||||
{
|
||||
"domain": "afvalwijzer",
|
||||
"name": "Afvalwijzer",
|
||||
"version": "2023.01.01",
|
||||
"iot_class": "cloud_polling",
|
||||
"documentation": "https://github.com/xirixiz/homeassistant-afvalwijzer/blob/master/README.md",
|
||||
"issue_tracker": "https://github.com/xirixiz/homeassistant-afvalwijzer/issues",
|
||||
"config_flow": false,
|
||||
"dependencies": [],
|
||||
"codeowners": [
|
||||
"@xirixiz"
|
||||
],
|
||||
"requirements": []
|
||||
}
|
||||
@@ -1,147 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Sensor component Afvalwijzer
|
||||
Author: Bram van Dartel - xirixiz
|
||||
"""
|
||||
|
||||
from functools import partial
|
||||
|
||||
from homeassistant.components.sensor import PLATFORM_SCHEMA
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.util import Throttle
|
||||
import voluptuous as vol
|
||||
|
||||
from .collector.main_collector import MainCollector
|
||||
from .const.const import (
|
||||
_LOGGER,
|
||||
CONF_COLLECTOR,
|
||||
CONF_DEFAULT_LABEL,
|
||||
CONF_EXCLUDE_LIST,
|
||||
CONF_EXCLUDE_PICKUP_TODAY,
|
||||
CONF_ID,
|
||||
CONF_POSTAL_CODE,
|
||||
CONF_STREET_NUMBER,
|
||||
CONF_SUFFIX,
|
||||
MIN_TIME_BETWEEN_UPDATES,
|
||||
PARALLEL_UPDATES,
|
||||
SCAN_INTERVAL,
|
||||
STARTUP_MESSAGE,
|
||||
)
|
||||
from .sensor_custom import CustomSensor
|
||||
from .sensor_provider import ProviderSensor
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_COLLECTOR, default="mijnafvalwijzer"
|
||||
): cv.string,
|
||||
vol.Required(CONF_POSTAL_CODE, default="1234AB"): cv.string,
|
||||
vol.Required(CONF_STREET_NUMBER, default="5"): cv.string,
|
||||
vol.Optional(CONF_SUFFIX, default=""): cv.string,
|
||||
vol.Optional(CONF_EXCLUDE_PICKUP_TODAY, default="true"): cv.string,
|
||||
vol.Optional(CONF_EXCLUDE_LIST, default=""): cv.string,
|
||||
vol.Optional(CONF_DEFAULT_LABEL, default="geen"): cv.string,
|
||||
vol.Optional(CONF_ID.strip().lower(), default=""): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
_LOGGER.info(STARTUP_MESSAGE)
|
||||
|
||||
|
||||
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
|
||||
provider = config.get(CONF_COLLECTOR)
|
||||
postal_code = config.get(CONF_POSTAL_CODE)
|
||||
street_number = config.get(CONF_STREET_NUMBER)
|
||||
suffix = config.get(CONF_SUFFIX)
|
||||
exclude_pickup_today = config.get(CONF_EXCLUDE_PICKUP_TODAY)
|
||||
exclude_list = config.get(CONF_EXCLUDE_LIST)
|
||||
default_label = config.get(CONF_DEFAULT_LABEL)
|
||||
|
||||
_LOGGER.debug(f"Afvalwijzer provider = {provider}")
|
||||
_LOGGER.debug(f"Afvalwijzer zipcode = {postal_code}")
|
||||
_LOGGER.debug(f"Afvalwijzer street_number = {street_number}")
|
||||
|
||||
try:
|
||||
collector = await hass.async_add_executor_job(
|
||||
partial(
|
||||
MainCollector,
|
||||
provider,
|
||||
postal_code,
|
||||
street_number,
|
||||
suffix,
|
||||
exclude_pickup_today,
|
||||
exclude_list,
|
||||
default_label,
|
||||
)
|
||||
)
|
||||
except ValueError as err:
|
||||
_LOGGER.error(f"Check afvalwijzer platform settings {err.args}")
|
||||
|
||||
fetch_data = AfvalwijzerData(hass, config)
|
||||
|
||||
waste_types_provider = collector.waste_types_provider
|
||||
_LOGGER.debug(f"Generating waste_types_provider list = {waste_types_provider}")
|
||||
waste_types_custom = collector.waste_types_custom
|
||||
_LOGGER.debug(f"Generating waste_types_custom list = {waste_types_custom}")
|
||||
|
||||
entities = []
|
||||
|
||||
for waste_type in waste_types_provider:
|
||||
_LOGGER.debug(f"Adding sensor provider: {waste_type}")
|
||||
entities.append(ProviderSensor(hass, waste_type, fetch_data, config))
|
||||
for waste_type in waste_types_custom:
|
||||
_LOGGER.debug(f"Adding sensor custom: {waste_type}")
|
||||
entities.append(CustomSensor(hass, waste_type, fetch_data, config))
|
||||
|
||||
_LOGGER.debug(f"Entities appended = {entities}")
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class AfvalwijzerData(object):
|
||||
def __init__(self, hass, config):
|
||||
self._hass = hass
|
||||
self.config = config
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
def update(self):
|
||||
provider = self.config.get(CONF_COLLECTOR)
|
||||
postal_code = self.config.get(CONF_POSTAL_CODE)
|
||||
street_number = self.config.get(CONF_STREET_NUMBER)
|
||||
suffix = self.config.get(CONF_SUFFIX)
|
||||
exclude_pickup_today = self.config.get(CONF_EXCLUDE_PICKUP_TODAY)
|
||||
default_label = self.config.get(CONF_DEFAULT_LABEL)
|
||||
exclude_list = self.config.get(CONF_EXCLUDE_LIST)
|
||||
|
||||
try:
|
||||
collector = MainCollector(
|
||||
provider,
|
||||
postal_code,
|
||||
street_number,
|
||||
suffix,
|
||||
exclude_pickup_today,
|
||||
exclude_list,
|
||||
default_label,
|
||||
)
|
||||
except ValueError as err:
|
||||
_LOGGER.error(f"Check afvalwijzer platform settings {err.args}")
|
||||
|
||||
# waste data provider update - with today
|
||||
try:
|
||||
self.waste_data_with_today = collector.waste_data_with_today
|
||||
except ValueError as err:
|
||||
_LOGGER.error(f"Check waste_data_provider {err.args}")
|
||||
self.waste_data_with_today = default_label
|
||||
|
||||
# waste data provider update - without today
|
||||
try:
|
||||
self.waste_data_without_today = collector.waste_data_without_today
|
||||
except ValueError as err:
|
||||
_LOGGER.error(f"Check waste_data_provider {err.args}")
|
||||
self.waste_data_without_today = default_label
|
||||
|
||||
# waste data custom update
|
||||
try:
|
||||
self.waste_data_custom = collector.waste_data_custom
|
||||
except ValueError as err:
|
||||
_LOGGER.error(f"Check waste_data_custom {err.args}")
|
||||
self.waste_data_custom = default_label
|
||||
@@ -1,106 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
from datetime import datetime
|
||||
import hashlib
|
||||
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
from .const.const import (
|
||||
_LOGGER,
|
||||
ATTR_LAST_UPDATE,
|
||||
ATTR_YEAR_MONTH_DAY_DATE,
|
||||
CONF_DEFAULT_LABEL,
|
||||
CONF_ID,
|
||||
CONF_POSTAL_CODE,
|
||||
CONF_STREET_NUMBER,
|
||||
CONF_SUFFIX,
|
||||
MIN_TIME_BETWEEN_UPDATES,
|
||||
PARALLEL_UPDATES,
|
||||
SENSOR_ICON,
|
||||
SENSOR_PREFIX,
|
||||
)
|
||||
|
||||
|
||||
class CustomSensor(Entity):
|
||||
def __init__(self, hass, waste_type, fetch_data, config):
|
||||
self.hass = hass
|
||||
self.waste_type = waste_type
|
||||
self.fetch_data = fetch_data
|
||||
self.config = config
|
||||
self._id_name = self.config.get(CONF_ID)
|
||||
self._default_label = self.config.get(CONF_DEFAULT_LABEL)
|
||||
self._last_update = None
|
||||
self._name = (
|
||||
SENSOR_PREFIX + (f"{self._id_name} " if len(self._id_name) > 0 else "")
|
||||
) + self.waste_type
|
||||
|
||||
self._state = self.config.get(CONF_DEFAULT_LABEL)
|
||||
self._icon = SENSOR_ICON
|
||||
self._year_month_day_date = None
|
||||
self._unique_id = hashlib.sha1(
|
||||
f"{self.waste_type}{self.config.get(CONF_ID)}{self.config.get(CONF_POSTAL_CODE)}{self.config.get(CONF_STREET_NUMBER)}{self.config.get(CONF_SUFFIX,'')}".encode(
|
||||
"utf-8"
|
||||
)
|
||||
).hexdigest()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def unique_id(self):
|
||||
return self._unique_id
|
||||
|
||||
@property
|
||||
def icon(self):
|
||||
return self._icon
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
return self._state
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
if self._year_month_day_date is not None:
|
||||
return {
|
||||
ATTR_LAST_UPDATE: self._last_update,
|
||||
ATTR_YEAR_MONTH_DAY_DATE: self._year_month_day_date,
|
||||
}
|
||||
else:
|
||||
return {
|
||||
ATTR_LAST_UPDATE: self._last_update,
|
||||
}
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
async def async_update(self):
|
||||
await self.hass.async_add_executor_job(self.fetch_data.update)
|
||||
|
||||
waste_data_custom = self.fetch_data.waste_data_custom
|
||||
|
||||
try:
|
||||
# Add attribute, set the last updated status of the sensor
|
||||
self._last_update = datetime.now().strftime("%d-%m-%Y %H:%M")
|
||||
|
||||
if isinstance(waste_data_custom[self.waste_type], datetime):
|
||||
_LOGGER.debug(
|
||||
f"Generating state via AfvalwijzerCustomSensor for = {self.waste_type} with value {waste_data_custom[self.waste_type].date()}"
|
||||
)
|
||||
# Add the US date format
|
||||
collection_date_us = waste_data_custom[self.waste_type].date()
|
||||
self._year_month_day_date = str(collection_date_us)
|
||||
|
||||
# Add the NL date format as default state
|
||||
self._state = datetime.strftime(
|
||||
waste_data_custom[self.waste_type].date(), "%d-%m-%Y"
|
||||
)
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
f"Generating state via AfvalwijzerCustomSensor for = {self.waste_type} with value {waste_data_custom[self.waste_type]}"
|
||||
)
|
||||
# Add non-date as default state
|
||||
self._state = str(waste_data_custom[self.waste_type])
|
||||
except ValueError:
|
||||
_LOGGER.debug("ValueError AfvalwijzerCustomSensor - unable to set value!")
|
||||
self._state = self._default_label
|
||||
self._year_month_day_date = None
|
||||
self._last_update = datetime.now().strftime("%d-%m-%Y %H:%M")
|
||||
@@ -1,138 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
from datetime import date, datetime, timedelta
|
||||
import hashlib
|
||||
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
from .const.const import (
|
||||
_LOGGER,
|
||||
ATTR_DAYS_UNTIL_COLLECTION_DATE,
|
||||
ATTR_IS_COLLECTION_DATE_DAY_AFTER_TOMORROW,
|
||||
ATTR_IS_COLLECTION_DATE_TODAY,
|
||||
ATTR_IS_COLLECTION_DATE_TOMORROW,
|
||||
ATTR_LAST_UPDATE,
|
||||
ATTR_YEAR_MONTH_DAY_DATE,
|
||||
CONF_DEFAULT_LABEL,
|
||||
CONF_EXCLUDE_PICKUP_TODAY,
|
||||
CONF_ID,
|
||||
CONF_POSTAL_CODE,
|
||||
CONF_STREET_NUMBER,
|
||||
CONF_SUFFIX,
|
||||
MIN_TIME_BETWEEN_UPDATES,
|
||||
PARALLEL_UPDATES,
|
||||
SENSOR_ICON,
|
||||
SENSOR_PREFIX,
|
||||
)
|
||||
|
||||
|
||||
class ProviderSensor(Entity):
|
||||
def __init__(self, hass, waste_type, fetch_data, config):
|
||||
self.hass = hass
|
||||
self.waste_type = waste_type
|
||||
self.fetch_data = fetch_data
|
||||
self.config = config
|
||||
self._id_name = self.config.get(CONF_ID)
|
||||
self._default_label = self.config.get(CONF_DEFAULT_LABEL)
|
||||
self._exclude_pickup_today = self.config.get(CONF_EXCLUDE_PICKUP_TODAY)
|
||||
self._name = (
|
||||
SENSOR_PREFIX
|
||||
+ (self._id_name + " " if len(self._id_name) > 0 else "")
|
||||
+ self.waste_type
|
||||
)
|
||||
self._icon = SENSOR_ICON
|
||||
self._state = self.config.get(CONF_DEFAULT_LABEL)
|
||||
self._last_update = None
|
||||
self._days_until_collection_date = None
|
||||
self._is_collection_date_today = False
|
||||
self._is_collection_date_tomorrow = False
|
||||
self._is_collection_date_day_after_tomorrow = False
|
||||
self._year_month_day_date = None
|
||||
self._unique_id = hashlib.sha1(
|
||||
f"{self.waste_type}{self.config.get(CONF_ID)}{self.config.get(CONF_POSTAL_CODE)}{self.config.get(CONF_STREET_NUMBER)}{self.config.get(CONF_SUFFIX,'')}".encode(
|
||||
"utf-8"
|
||||
)
|
||||
).hexdigest()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def unique_id(self):
|
||||
return self._unique_id
|
||||
|
||||
@property
|
||||
def icon(self):
|
||||
return self._icon
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
return self._state
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
return {
|
||||
ATTR_LAST_UPDATE: self._last_update,
|
||||
ATTR_DAYS_UNTIL_COLLECTION_DATE: self._days_until_collection_date,
|
||||
ATTR_IS_COLLECTION_DATE_TODAY: self._is_collection_date_today,
|
||||
ATTR_IS_COLLECTION_DATE_TOMORROW: self._is_collection_date_tomorrow,
|
||||
ATTR_IS_COLLECTION_DATE_DAY_AFTER_TOMORROW: self._is_collection_date_day_after_tomorrow,
|
||||
ATTR_YEAR_MONTH_DAY_DATE: self._year_month_day_date,
|
||||
}
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
async def async_update(self):
|
||||
await self.hass.async_add_executor_job(self.fetch_data.update)
|
||||
|
||||
if self._exclude_pickup_today.casefold() in ("false", "no"):
|
||||
waste_data_provider = self.fetch_data.waste_data_with_today
|
||||
else:
|
||||
waste_data_provider = self.fetch_data.waste_data_without_today
|
||||
|
||||
try:
|
||||
if not waste_data_provider or self.waste_type not in waste_data_provider:
|
||||
raise (ValueError)
|
||||
# Add attribute, set the last updated status of the sensor
|
||||
self._last_update = datetime.now().strftime("%d-%m-%Y %H:%M")
|
||||
|
||||
if isinstance(waste_data_provider[self.waste_type], datetime):
|
||||
_LOGGER.debug(
|
||||
f"Generating state via AfvalwijzerCustomSensor for = {self.waste_type} with value {waste_data_provider[self.waste_type].date()}"
|
||||
)
|
||||
# Add the US date format
|
||||
collection_date_us = waste_data_provider[self.waste_type].date()
|
||||
self._year_month_day_date = str(collection_date_us)
|
||||
|
||||
# Add the days until the collection date
|
||||
delta = collection_date_us - date.today()
|
||||
self._days_until_collection_date = delta.days
|
||||
|
||||
# Check if the collection days are in today, tomorrow and/or the day after tomorrow
|
||||
self._is_collection_date_today = date.today() == collection_date_us
|
||||
self._is_collection_date_tomorrow = (
|
||||
date.today() + timedelta(days=1) == collection_date_us
|
||||
)
|
||||
self._is_collection_date_day_after_tomorrow = (
|
||||
date.today() + timedelta(days=2) == collection_date_us
|
||||
)
|
||||
|
||||
# Add the NL date format as default state
|
||||
self._state = datetime.strftime(
|
||||
waste_data_provider[self.waste_type].date(), "%d-%m-%Y"
|
||||
)
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
f"Generating state via AfvalwijzerCustomSensor for = {self.waste_type} with value {waste_data_provider[self.waste_type]}"
|
||||
)
|
||||
# Add non-date as default state
|
||||
self._state = str(waste_data_provider[self.waste_type])
|
||||
except ValueError:
|
||||
_LOGGER.debug("ValueError AfvalwijzerProviderSensor - unable to set value!")
|
||||
self._state = self._default_label
|
||||
self._days_until_collection_date = None
|
||||
self._year_month_day_date = None
|
||||
self._is_collection_date_today = False
|
||||
self._is_collection_date_tomorrow = False
|
||||
self._is_collection_date_day_after_tomorrow = False
|
||||
self._last_update = datetime.now().strftime("%d-%m-%Y %H:%M")
|
||||
@@ -1,397 +0,0 @@
|
||||
[
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-01-02"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-01-05"
|
||||
},
|
||||
{
|
||||
"nameType": "restafval",
|
||||
"type": "restafval",
|
||||
"date": "2021-01-08"
|
||||
},
|
||||
{
|
||||
"nameType": "kerstbomen",
|
||||
"type": "kerstbomen",
|
||||
"date": "2021-01-09"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-01-15"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-01-19"
|
||||
},
|
||||
{
|
||||
"nameType": "papier",
|
||||
"type": "papier",
|
||||
"date": "2021-01-20"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-01-29"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-02-02"
|
||||
},
|
||||
{
|
||||
"nameType": "restafval",
|
||||
"type": "restafval",
|
||||
"date": "2021-02-05"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-02-12"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-02-16"
|
||||
},
|
||||
{
|
||||
"nameType": "papier",
|
||||
"type": "papier",
|
||||
"date": "2021-02-17"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-02-26"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-03-02"
|
||||
},
|
||||
{
|
||||
"nameType": "restafval",
|
||||
"type": "restafval",
|
||||
"date": "2021-03-05"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-03-12"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-03-16"
|
||||
},
|
||||
{
|
||||
"nameType": "papier",
|
||||
"type": "papier",
|
||||
"date": "2021-03-17"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-03-26"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-03-30"
|
||||
},
|
||||
{
|
||||
"nameType": "restafval",
|
||||
"type": "restafval",
|
||||
"date": "2021-04-02"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-04-09"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-04-13"
|
||||
},
|
||||
{
|
||||
"nameType": "papier",
|
||||
"type": "papier",
|
||||
"date": "2021-04-21"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-04-23"
|
||||
},
|
||||
{
|
||||
"nameType": "restafval",
|
||||
"type": "restafval",
|
||||
"date": "2021-04-30"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-04-30"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-05-07"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-05-11"
|
||||
},
|
||||
{
|
||||
"nameType": "papier",
|
||||
"type": "papier",
|
||||
"date": "2021-05-19"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-05-21"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-05-25"
|
||||
},
|
||||
{
|
||||
"nameType": "restafval",
|
||||
"type": "restafval",
|
||||
"date": "2021-05-28"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-06-04"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-06-08"
|
||||
},
|
||||
{
|
||||
"nameType": "papier",
|
||||
"type": "papier",
|
||||
"date": "2021-06-16"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-06-18"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-06-22"
|
||||
},
|
||||
{
|
||||
"nameType": "restafval",
|
||||
"type": "restafval",
|
||||
"date": "2021-06-25"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-07-02"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-07-06"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-07-16"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-07-20"
|
||||
},
|
||||
{
|
||||
"nameType": "papier",
|
||||
"type": "papier",
|
||||
"date": "2021-07-21"
|
||||
},
|
||||
{
|
||||
"nameType": "restafval",
|
||||
"type": "restafval",
|
||||
"date": "2021-07-23"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-07-30"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-08-03"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-08-13"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-08-17"
|
||||
},
|
||||
{
|
||||
"nameType": "papier",
|
||||
"type": "papier",
|
||||
"date": "2021-08-18"
|
||||
},
|
||||
{
|
||||
"nameType": "restafval",
|
||||
"type": "restafval",
|
||||
"date": "2021-08-20"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-08-27"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-08-31"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-09-10"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-09-14"
|
||||
},
|
||||
{
|
||||
"nameType": "papier",
|
||||
"type": "papier",
|
||||
"date": "2021-09-15"
|
||||
},
|
||||
{
|
||||
"nameType": "restafval",
|
||||
"type": "restafval",
|
||||
"date": "2021-09-17"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-09-24"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-09-28"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-10-08"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-10-12"
|
||||
},
|
||||
{
|
||||
"nameType": "restafval",
|
||||
"type": "restafval",
|
||||
"date": "2021-10-15"
|
||||
},
|
||||
{
|
||||
"nameType": "papier",
|
||||
"type": "papier",
|
||||
"date": "2021-10-20"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-10-22"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-10-26"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-11-05"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-11-09"
|
||||
},
|
||||
{
|
||||
"nameType": "restafval",
|
||||
"type": "restafval",
|
||||
"date": "2021-11-12"
|
||||
},
|
||||
{
|
||||
"nameType": "papier",
|
||||
"type": "papier",
|
||||
"date": "2021-11-17"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-11-19"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-11-19"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-12-03"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-12-07"
|
||||
},
|
||||
{
|
||||
"nameType": "restafval",
|
||||
"type": "restafval",
|
||||
"date": "2021-12-10"
|
||||
},
|
||||
{
|
||||
"nameType": "papier",
|
||||
"type": "papier",
|
||||
"date": "2021-12-15"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-12-17"
|
||||
},
|
||||
{
|
||||
"nameType": "pmd",
|
||||
"type": "pmd",
|
||||
"date": "2021-12-21"
|
||||
},
|
||||
{
|
||||
"nameType": "gft",
|
||||
"type": "gft",
|
||||
"date": "2021-12-31"
|
||||
}
|
||||
]
|
||||
@@ -1,115 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Sensor component for AfvalDienst
|
||||
Author: Bram van Dartel - xirixiz
|
||||
|
||||
import afvalwijzer
|
||||
from afvalwijzer.collector.mijnafvalwijzer import AfvalWijzer
|
||||
AfvalWijzer().get_data('','','')
|
||||
|
||||
python3 -m afvalwijzer.tests.test_module
|
||||
|
||||
"""
|
||||
|
||||
|
||||
from ..collector.main_collector import MainCollector
|
||||
|
||||
# provider = "afvalstoffendienstkalender"
|
||||
# api_token = "5ef443e778f41c4f75c69459eea6e6ae0c2d92de729aa0fc61653815fbd6a8ca"
|
||||
|
||||
# Afvalstoffendienstkalender
|
||||
# postal_code = "5391KE"
|
||||
# street_number = "1"
|
||||
|
||||
# Common
|
||||
suffix = ""
|
||||
exclude_pickup_today = "True"
|
||||
default_label = "geen"
|
||||
exclude_list = ""
|
||||
|
||||
# DeAfvalapp
|
||||
# provider = "deafvalapp"
|
||||
# postal_code = "6105CN"
|
||||
# street_number = "1"
|
||||
|
||||
# Icalendar
|
||||
# provider = "eemsdelta"
|
||||
# postal_code = "9991AB"
|
||||
# street_number = "2"
|
||||
|
||||
# Afvalwijzer
|
||||
# provider = "mijnafvalwijzer"
|
||||
# postal_code = "5146eg"
|
||||
# street_number = "1"
|
||||
|
||||
provider = "rmn"
|
||||
postal_code = "3701XK"
|
||||
street_number = "24"
|
||||
suffix = "b"
|
||||
|
||||
# Opzet
|
||||
# provider = "prezero"
|
||||
# postal_code = "6665CN"
|
||||
# street_number = "1"
|
||||
|
||||
# RD4
|
||||
# provider = "rd4"
|
||||
# postal_code = "6301ET"
|
||||
# street_number = "24"
|
||||
# suffix = "C"
|
||||
|
||||
# Ximmio
|
||||
# provider = "meerlanden"
|
||||
# postal_code = "2121xt"
|
||||
# street_number = "38"
|
||||
|
||||
# Ximmio
|
||||
# provider = "acv"
|
||||
# postal_code = "6713CG"
|
||||
# street_number = "11"
|
||||
|
||||
# postal_code = postal_code.strip().upper()
|
||||
|
||||
collector = MainCollector(
|
||||
provider,
|
||||
postal_code,
|
||||
street_number,
|
||||
suffix,
|
||||
exclude_pickup_today,
|
||||
exclude_list,
|
||||
default_label,
|
||||
)
|
||||
|
||||
|
||||
# MainCollector(
|
||||
# provider,
|
||||
# postal_code,
|
||||
# street_number,
|
||||
# suffix,
|
||||
# exclude_pickup_today,
|
||||
# exclude_list,
|
||||
# default_label,
|
||||
# )
|
||||
|
||||
# data = XimmioCollector().get_waste_data_provider("meerlanden", postal_code2, street_number2, suffix, default_label, exclude_list)
|
||||
# data2 = MijnAfvalWijzerCollector().get_waste_data_provider("mijnafvalwijzer", postal_code, street_number, suffix, default_label, exclude_list)
|
||||
|
||||
|
||||
#########################################################################################################
|
||||
print("\n")
|
||||
|
||||
print(collector.waste_data_with_today)
|
||||
print(collector.waste_data_without_today)
|
||||
print(collector.waste_data_custom)
|
||||
print(collector.waste_types_provider)
|
||||
print(collector.waste_types_custom)
|
||||
|
||||
print("\n")
|
||||
|
||||
# for key, value in afval1.items():
|
||||
# print(key, value)
|
||||
|
||||
# print("\n")
|
||||
|
||||
# for key, value in afval2.items():
|
||||
# print(key, value)
|
||||
@@ -1,269 +0,0 @@
|
||||
"""
|
||||
HACS gives you a powerful UI to handle downloads of all your custom needs.
|
||||
|
||||
For more details about this integration, please refer to the documentation at
|
||||
https://hacs.xyz/
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import Any
|
||||
|
||||
from aiogithubapi import AIOGitHubAPIException, GitHub, GitHubAPI
|
||||
from aiogithubapi.const import ACCEPT_HEADERS
|
||||
from awesomeversion import AwesomeVersion
|
||||
from homeassistant.components.lovelace.system_health import system_health_info
|
||||
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
||||
from homeassistant.const import Platform, __version__ as HAVERSION
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.discovery import async_load_platform
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.helpers.start import async_at_start
|
||||
from homeassistant.loader import async_get_integration
|
||||
import voluptuous as vol
|
||||
|
||||
from .base import HacsBase
|
||||
from .const import DOMAIN, MINIMUM_HA_VERSION, STARTUP
|
||||
from .data_client import HacsDataClient
|
||||
from .enums import ConfigurationType, HacsDisabledReason, HacsStage, LovelaceMode
|
||||
from .frontend import async_register_frontend
|
||||
from .utils.configuration_schema import hacs_config_combined
|
||||
from .utils.data import HacsData
|
||||
from .utils.queue_manager import QueueManager
|
||||
from .utils.version import version_left_higher_or_equal_then_right
|
||||
from .websocket import async_register_websocket_commands
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({DOMAIN: hacs_config_combined()}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
|
||||
async def async_initialize_integration(
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
config_entry: ConfigEntry | None = None,
|
||||
config: dict[str, Any] | None = None,
|
||||
) -> bool:
|
||||
"""Initialize the integration"""
|
||||
hass.data[DOMAIN] = hacs = HacsBase()
|
||||
hacs.enable_hacs()
|
||||
|
||||
if config is not None:
|
||||
if DOMAIN not in config:
|
||||
return True
|
||||
if hacs.configuration.config_type == ConfigurationType.CONFIG_ENTRY:
|
||||
return True
|
||||
hacs.configuration.update_from_dict(
|
||||
{
|
||||
"config_type": ConfigurationType.YAML,
|
||||
**config[DOMAIN],
|
||||
"config": config[DOMAIN],
|
||||
}
|
||||
)
|
||||
|
||||
if config_entry is not None:
|
||||
if config_entry.source == SOURCE_IMPORT:
|
||||
hass.async_create_task(hass.config_entries.async_remove(config_entry.entry_id))
|
||||
return False
|
||||
|
||||
hacs.configuration.update_from_dict(
|
||||
{
|
||||
"config_entry": config_entry,
|
||||
"config_type": ConfigurationType.CONFIG_ENTRY,
|
||||
**config_entry.data,
|
||||
**config_entry.options,
|
||||
}
|
||||
)
|
||||
|
||||
integration = await async_get_integration(hass, DOMAIN)
|
||||
|
||||
hacs.set_stage(None)
|
||||
|
||||
hacs.log.info(STARTUP, integration.version)
|
||||
|
||||
clientsession = async_get_clientsession(hass)
|
||||
|
||||
hacs.integration = integration
|
||||
hacs.version = integration.version
|
||||
hacs.configuration.dev = integration.version == "0.0.0"
|
||||
hacs.hass = hass
|
||||
hacs.queue = QueueManager(hass=hass)
|
||||
hacs.data = HacsData(hacs=hacs)
|
||||
hacs.data_client = HacsDataClient(
|
||||
session=clientsession,
|
||||
client_name=f"HACS/{integration.version}",
|
||||
)
|
||||
hacs.system.running = True
|
||||
hacs.session = clientsession
|
||||
|
||||
hacs.core.lovelace_mode = LovelaceMode.YAML
|
||||
try:
|
||||
lovelace_info = await system_health_info(hacs.hass)
|
||||
hacs.core.lovelace_mode = LovelaceMode(lovelace_info.get("mode", "yaml"))
|
||||
except BaseException: # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||
# If this happens, the users YAML is not valid, we assume YAML mode
|
||||
pass
|
||||
hacs.log.debug("Configuration type: %s", hacs.configuration.config_type)
|
||||
hacs.core.config_path = hacs.hass.config.path()
|
||||
|
||||
if hacs.core.ha_version is None:
|
||||
hacs.core.ha_version = AwesomeVersion(HAVERSION)
|
||||
|
||||
## Legacy GitHub client
|
||||
hacs.github = GitHub(
|
||||
hacs.configuration.token,
|
||||
clientsession,
|
||||
headers={
|
||||
"User-Agent": f"HACS/{hacs.version}",
|
||||
"Accept": ACCEPT_HEADERS["preview"],
|
||||
},
|
||||
)
|
||||
|
||||
## New GitHub client
|
||||
hacs.githubapi = GitHubAPI(
|
||||
token=hacs.configuration.token,
|
||||
session=clientsession,
|
||||
**{"client_name": f"HACS/{hacs.version}"},
|
||||
)
|
||||
|
||||
async def async_startup():
|
||||
"""HACS startup tasks."""
|
||||
hacs.enable_hacs()
|
||||
|
||||
for location in (
|
||||
hass.config.path("custom_components/custom_updater.py"),
|
||||
hass.config.path("custom_components/custom_updater/__init__.py"),
|
||||
):
|
||||
if os.path.exists(location):
|
||||
hacs.log.critical(
|
||||
"This cannot be used with custom_updater. "
|
||||
"To use this you need to remove custom_updater form %s",
|
||||
location,
|
||||
)
|
||||
|
||||
hacs.disable_hacs(HacsDisabledReason.CONSTRAINS)
|
||||
return False
|
||||
|
||||
if not version_left_higher_or_equal_then_right(
|
||||
hacs.core.ha_version.string,
|
||||
MINIMUM_HA_VERSION,
|
||||
):
|
||||
hacs.log.critical(
|
||||
"You need HA version %s or newer to use this integration.",
|
||||
MINIMUM_HA_VERSION,
|
||||
)
|
||||
hacs.disable_hacs(HacsDisabledReason.CONSTRAINS)
|
||||
return False
|
||||
|
||||
if not await hacs.data.restore():
|
||||
hacs.disable_hacs(HacsDisabledReason.RESTORE)
|
||||
return False
|
||||
|
||||
if not hacs.configuration.experimental:
|
||||
can_update = await hacs.async_can_update()
|
||||
hacs.log.debug("Can update %s repositories", can_update)
|
||||
|
||||
hacs.set_active_categories()
|
||||
|
||||
async_register_websocket_commands(hass)
|
||||
async_register_frontend(hass, hacs)
|
||||
|
||||
if hacs.configuration.config_type == ConfigurationType.YAML:
|
||||
hass.async_create_task(
|
||||
async_load_platform(hass, Platform.SENSOR, DOMAIN, {}, hacs.configuration.config)
|
||||
)
|
||||
hacs.log.info("Update entities are only supported when using UI configuration")
|
||||
|
||||
else:
|
||||
await hass.config_entries.async_forward_entry_setups(
|
||||
config_entry,
|
||||
[Platform.SENSOR, Platform.UPDATE]
|
||||
if hacs.configuration.experimental
|
||||
else [Platform.SENSOR],
|
||||
)
|
||||
|
||||
hacs.set_stage(HacsStage.SETUP)
|
||||
if hacs.system.disabled:
|
||||
return False
|
||||
|
||||
# Schedule startup tasks
|
||||
async_at_start(hass=hass, at_start_cb=hacs.startup_tasks)
|
||||
|
||||
hacs.set_stage(HacsStage.WAITING)
|
||||
hacs.log.info("Setup complete, waiting for Home Assistant before startup tasks starts")
|
||||
|
||||
return not hacs.system.disabled
|
||||
|
||||
async def async_try_startup(_=None):
|
||||
"""Startup wrapper for yaml config."""
|
||||
try:
|
||||
startup_result = await async_startup()
|
||||
except AIOGitHubAPIException:
|
||||
startup_result = False
|
||||
if not startup_result:
|
||||
if (
|
||||
hacs.configuration.config_type == ConfigurationType.YAML
|
||||
or hacs.system.disabled_reason != HacsDisabledReason.INVALID_TOKEN
|
||||
):
|
||||
hacs.log.info("Could not setup HACS, trying again in 15 min")
|
||||
async_call_later(hass, 900, async_try_startup)
|
||||
return
|
||||
hacs.enable_hacs()
|
||||
|
||||
await async_try_startup()
|
||||
|
||||
# Mischief managed!
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: dict[str, Any]) -> bool:
|
||||
"""Set up this integration using yaml."""
|
||||
return await async_initialize_integration(hass=hass, config=config)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
"""Set up this integration using UI."""
|
||||
config_entry.async_on_unload(config_entry.add_update_listener(async_reload_entry))
|
||||
setup_result = await async_initialize_integration(hass=hass, config_entry=config_entry)
|
||||
hacs: HacsBase = hass.data[DOMAIN]
|
||||
return setup_result and not hacs.system.disabled
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
"""Handle removal of an entry."""
|
||||
hacs: HacsBase = hass.data[DOMAIN]
|
||||
|
||||
# Clear out pending queue
|
||||
hacs.queue.clear()
|
||||
|
||||
for task in hacs.recuring_tasks:
|
||||
# Cancel all pending tasks
|
||||
task()
|
||||
|
||||
# Store data
|
||||
await hacs.data.async_write(force=True)
|
||||
|
||||
try:
|
||||
if hass.data.get("frontend_panels", {}).get("hacs"):
|
||||
hacs.log.info("Removing sidepanel")
|
||||
hass.components.frontend.async_remove_panel("hacs")
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
platforms = ["sensor"]
|
||||
if hacs.configuration.experimental:
|
||||
platforms.append("update")
|
||||
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(config_entry, platforms)
|
||||
|
||||
hacs.set_stage(None)
|
||||
hacs.disable_hacs(HacsDisabledReason.REMOVED)
|
||||
|
||||
hass.data.pop(DOMAIN, None)
|
||||
|
||||
return unload_ok
|
||||
|
||||
|
||||
async def async_reload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> None:
|
||||
"""Reload the HACS config entry."""
|
||||
await async_unload_entry(hass, config_entry)
|
||||
await async_setup_entry(hass, config_entry)
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
File diff suppressed because it is too large
Load Diff
@@ -1,182 +0,0 @@
|
||||
"""Adds config flow for HACS."""
|
||||
from aiogithubapi import GitHubDeviceAPI, GitHubException
|
||||
from aiogithubapi.common.const import OAUTH_USER_LOGIN
|
||||
from awesomeversion import AwesomeVersion
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.const import __version__ as HAVERSION
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.loader import async_get_integration
|
||||
import voluptuous as vol
|
||||
|
||||
from .base import HacsBase
|
||||
from .const import CLIENT_ID, DOMAIN, MINIMUM_HA_VERSION
|
||||
from .enums import ConfigurationType
|
||||
from .utils.configuration_schema import RELEASE_LIMIT, hacs_config_option_schema
|
||||
from .utils.logger import LOGGER
|
||||
|
||||
|
||||
class HacsFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
"""Config flow for HACS."""
|
||||
|
||||
VERSION = 1
|
||||
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize."""
|
||||
self._errors = {}
|
||||
self.device = None
|
||||
self.activation = None
|
||||
self.log = LOGGER
|
||||
self._progress_task = None
|
||||
self._login_device = None
|
||||
self._reauth = False
|
||||
|
||||
async def async_step_user(self, user_input):
|
||||
"""Handle a flow initialized by the user."""
|
||||
self._errors = {}
|
||||
if self._async_current_entries():
|
||||
return self.async_abort(reason="single_instance_allowed")
|
||||
if self.hass.data.get(DOMAIN):
|
||||
return self.async_abort(reason="single_instance_allowed")
|
||||
|
||||
if user_input:
|
||||
if [x for x in user_input if not user_input[x]]:
|
||||
self._errors["base"] = "acc"
|
||||
return await self._show_config_form(user_input)
|
||||
|
||||
return await self.async_step_device(user_input)
|
||||
|
||||
## Initial form
|
||||
return await self._show_config_form(user_input)
|
||||
|
||||
async def async_step_device(self, _user_input):
|
||||
"""Handle device steps"""
|
||||
|
||||
async def _wait_for_activation(_=None):
|
||||
if self._login_device is None or self._login_device.expires_in is None:
|
||||
async_call_later(self.hass, 1, _wait_for_activation)
|
||||
return
|
||||
|
||||
response = await self.device.activation(device_code=self._login_device.device_code)
|
||||
self.activation = response.data
|
||||
self.hass.async_create_task(
|
||||
self.hass.config_entries.flow.async_configure(flow_id=self.flow_id)
|
||||
)
|
||||
|
||||
if not self.activation:
|
||||
integration = await async_get_integration(self.hass, DOMAIN)
|
||||
if not self.device:
|
||||
self.device = GitHubDeviceAPI(
|
||||
client_id=CLIENT_ID,
|
||||
session=aiohttp_client.async_get_clientsession(self.hass),
|
||||
**{"client_name": f"HACS/{integration.version}"},
|
||||
)
|
||||
async_call_later(self.hass, 1, _wait_for_activation)
|
||||
try:
|
||||
response = await self.device.register()
|
||||
self._login_device = response.data
|
||||
return self.async_show_progress(
|
||||
step_id="device",
|
||||
progress_action="wait_for_device",
|
||||
description_placeholders={
|
||||
"url": OAUTH_USER_LOGIN,
|
||||
"code": self._login_device.user_code,
|
||||
},
|
||||
)
|
||||
except GitHubException as exception:
|
||||
self.log.error(exception)
|
||||
return self.async_abort(reason="github")
|
||||
|
||||
return self.async_show_progress_done(next_step_id="device_done")
|
||||
|
||||
async def _show_config_form(self, user_input):
|
||||
"""Show the configuration form to edit location data."""
|
||||
|
||||
if not user_input:
|
||||
user_input = {}
|
||||
|
||||
if AwesomeVersion(HAVERSION) < MINIMUM_HA_VERSION:
|
||||
return self.async_abort(
|
||||
reason="min_ha_version",
|
||||
description_placeholders={"version": MINIMUM_HA_VERSION},
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required("acc_logs", default=user_input.get("acc_logs", False)): bool,
|
||||
vol.Required("acc_addons", default=user_input.get("acc_addons", False)): bool,
|
||||
vol.Required(
|
||||
"acc_untested", default=user_input.get("acc_untested", False)
|
||||
): bool,
|
||||
vol.Required("acc_disable", default=user_input.get("acc_disable", False)): bool,
|
||||
}
|
||||
),
|
||||
errors=self._errors,
|
||||
)
|
||||
|
||||
async def async_step_device_done(self, _user_input):
|
||||
"""Handle device steps"""
|
||||
if self._reauth:
|
||||
existing_entry = self.hass.config_entries.async_get_entry(self.context["entry_id"])
|
||||
self.hass.config_entries.async_update_entry(
|
||||
existing_entry, data={"token": self.activation.access_token}
|
||||
)
|
||||
await self.hass.config_entries.async_reload(existing_entry.entry_id)
|
||||
return self.async_abort(reason="reauth_successful")
|
||||
|
||||
return self.async_create_entry(title="", data={"token": self.activation.access_token})
|
||||
|
||||
async def async_step_reauth(self, user_input=None):
|
||||
"""Perform reauth upon an API authentication error."""
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(self, user_input=None):
|
||||
"""Dialog that informs the user that reauth is required."""
|
||||
if user_input is None:
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=vol.Schema({}),
|
||||
)
|
||||
self._reauth = True
|
||||
return await self.async_step_device(None)
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(config_entry):
|
||||
return HacsOptionsFlowHandler(config_entry)
|
||||
|
||||
|
||||
class HacsOptionsFlowHandler(config_entries.OptionsFlow):
|
||||
"""HACS config flow options handler."""
|
||||
|
||||
def __init__(self, config_entry):
|
||||
"""Initialize HACS options flow."""
|
||||
self.config_entry = config_entry
|
||||
|
||||
async def async_step_init(self, _user_input=None):
|
||||
"""Manage the options."""
|
||||
return await self.async_step_user()
|
||||
|
||||
async def async_step_user(self, user_input=None):
|
||||
"""Handle a flow initialized by the user."""
|
||||
hacs: HacsBase = self.hass.data.get(DOMAIN)
|
||||
if user_input is not None:
|
||||
limit = int(user_input.get(RELEASE_LIMIT, 5))
|
||||
if limit <= 0 or limit > 100:
|
||||
return self.async_abort(reason="release_limit_value")
|
||||
return self.async_create_entry(title="", data=user_input)
|
||||
|
||||
if hacs is None or hacs.configuration is None:
|
||||
return self.async_abort(reason="not_setup")
|
||||
|
||||
if hacs.configuration.config_type == ConfigurationType.YAML:
|
||||
schema = {vol.Optional("not_in_use", default=""): str}
|
||||
else:
|
||||
schema = hacs_config_option_schema(self.config_entry.options)
|
||||
del schema["frontend_repo"]
|
||||
del schema["frontend_repo_url"]
|
||||
|
||||
return self.async_show_form(step_id="user", data_schema=vol.Schema(schema))
|
||||
@@ -1,293 +0,0 @@
|
||||
"""Constants for HACS"""
|
||||
from typing import TypeVar
|
||||
|
||||
from aiogithubapi.common.const import ACCEPT_HEADERS
|
||||
|
||||
NAME_SHORT = "HACS"
|
||||
DOMAIN = "hacs"
|
||||
CLIENT_ID = "395a8e669c5de9f7c6e8"
|
||||
MINIMUM_HA_VERSION = "2022.11.0"
|
||||
|
||||
URL_BASE = "/hacsfiles"
|
||||
|
||||
TV = TypeVar("TV")
|
||||
|
||||
PACKAGE_NAME = "custom_components.hacs"
|
||||
|
||||
DEFAULT_CONCURRENT_TASKS = 15
|
||||
DEFAULT_CONCURRENT_BACKOFF_TIME = 1
|
||||
|
||||
HACS_REPOSITORY_ID = "172733314"
|
||||
|
||||
HACS_ACTION_GITHUB_API_HEADERS = {
|
||||
"User-Agent": "HACS/action",
|
||||
"Accept": ACCEPT_HEADERS["preview"],
|
||||
}
|
||||
|
||||
VERSION_STORAGE = "6"
|
||||
STORENAME = "hacs"
|
||||
|
||||
HACS_SYSTEM_ID = "0717a0cd-745c-48fd-9b16-c8534c9704f9-bc944b0f-fd42-4a58-a072-ade38d1444cd"
|
||||
|
||||
STARTUP = """
|
||||
-------------------------------------------------------------------
|
||||
HACS (Home Assistant Community Store)
|
||||
|
||||
Version: %s
|
||||
This is a custom integration
|
||||
If you have any issues with this you need to open an issue here:
|
||||
https://github.com/hacs/integration/issues
|
||||
-------------------------------------------------------------------
|
||||
"""
|
||||
|
||||
LOCALE = [
|
||||
"ALL",
|
||||
"AF",
|
||||
"AL",
|
||||
"DZ",
|
||||
"AS",
|
||||
"AD",
|
||||
"AO",
|
||||
"AI",
|
||||
"AQ",
|
||||
"AG",
|
||||
"AR",
|
||||
"AM",
|
||||
"AW",
|
||||
"AU",
|
||||
"AT",
|
||||
"AZ",
|
||||
"BS",
|
||||
"BH",
|
||||
"BD",
|
||||
"BB",
|
||||
"BY",
|
||||
"BE",
|
||||
"BZ",
|
||||
"BJ",
|
||||
"BM",
|
||||
"BT",
|
||||
"BO",
|
||||
"BQ",
|
||||
"BA",
|
||||
"BW",
|
||||
"BV",
|
||||
"BR",
|
||||
"IO",
|
||||
"BN",
|
||||
"BG",
|
||||
"BF",
|
||||
"BI",
|
||||
"KH",
|
||||
"CM",
|
||||
"CA",
|
||||
"CV",
|
||||
"KY",
|
||||
"CF",
|
||||
"TD",
|
||||
"CL",
|
||||
"CN",
|
||||
"CX",
|
||||
"CC",
|
||||
"CO",
|
||||
"KM",
|
||||
"CG",
|
||||
"CD",
|
||||
"CK",
|
||||
"CR",
|
||||
"HR",
|
||||
"CU",
|
||||
"CW",
|
||||
"CY",
|
||||
"CZ",
|
||||
"CI",
|
||||
"DK",
|
||||
"DJ",
|
||||
"DM",
|
||||
"DO",
|
||||
"EC",
|
||||
"EG",
|
||||
"SV",
|
||||
"GQ",
|
||||
"ER",
|
||||
"EE",
|
||||
"ET",
|
||||
"FK",
|
||||
"FO",
|
||||
"FJ",
|
||||
"FI",
|
||||
"FR",
|
||||
"GF",
|
||||
"PF",
|
||||
"TF",
|
||||
"GA",
|
||||
"GM",
|
||||
"GE",
|
||||
"DE",
|
||||
"GH",
|
||||
"GI",
|
||||
"GR",
|
||||
"GL",
|
||||
"GD",
|
||||
"GP",
|
||||
"GU",
|
||||
"GT",
|
||||
"GG",
|
||||
"GN",
|
||||
"GW",
|
||||
"GY",
|
||||
"HT",
|
||||
"HM",
|
||||
"VA",
|
||||
"HN",
|
||||
"HK",
|
||||
"HU",
|
||||
"IS",
|
||||
"IN",
|
||||
"ID",
|
||||
"IR",
|
||||
"IQ",
|
||||
"IE",
|
||||
"IM",
|
||||
"IL",
|
||||
"IT",
|
||||
"JM",
|
||||
"JP",
|
||||
"JE",
|
||||
"JO",
|
||||
"KZ",
|
||||
"KE",
|
||||
"KI",
|
||||
"KP",
|
||||
"KR",
|
||||
"KW",
|
||||
"KG",
|
||||
"LA",
|
||||
"LV",
|
||||
"LB",
|
||||
"LS",
|
||||
"LR",
|
||||
"LY",
|
||||
"LI",
|
||||
"LT",
|
||||
"LU",
|
||||
"MO",
|
||||
"MK",
|
||||
"MG",
|
||||
"MW",
|
||||
"MY",
|
||||
"MV",
|
||||
"ML",
|
||||
"MT",
|
||||
"MH",
|
||||
"MQ",
|
||||
"MR",
|
||||
"MU",
|
||||
"YT",
|
||||
"MX",
|
||||
"FM",
|
||||
"MD",
|
||||
"MC",
|
||||
"MN",
|
||||
"ME",
|
||||
"MS",
|
||||
"MA",
|
||||
"MZ",
|
||||
"MM",
|
||||
"NA",
|
||||
"NR",
|
||||
"NP",
|
||||
"NL",
|
||||
"NC",
|
||||
"NZ",
|
||||
"NI",
|
||||
"NE",
|
||||
"NG",
|
||||
"NU",
|
||||
"NF",
|
||||
"MP",
|
||||
"NO",
|
||||
"OM",
|
||||
"PK",
|
||||
"PW",
|
||||
"PS",
|
||||
"PA",
|
||||
"PG",
|
||||
"PY",
|
||||
"PE",
|
||||
"PH",
|
||||
"PN",
|
||||
"PL",
|
||||
"PT",
|
||||
"PR",
|
||||
"QA",
|
||||
"RO",
|
||||
"RU",
|
||||
"RW",
|
||||
"RE",
|
||||
"BL",
|
||||
"SH",
|
||||
"KN",
|
||||
"LC",
|
||||
"MF",
|
||||
"PM",
|
||||
"VC",
|
||||
"WS",
|
||||
"SM",
|
||||
"ST",
|
||||
"SA",
|
||||
"SN",
|
||||
"RS",
|
||||
"SC",
|
||||
"SL",
|
||||
"SG",
|
||||
"SX",
|
||||
"SK",
|
||||
"SI",
|
||||
"SB",
|
||||
"SO",
|
||||
"ZA",
|
||||
"GS",
|
||||
"SS",
|
||||
"ES",
|
||||
"LK",
|
||||
"SD",
|
||||
"SR",
|
||||
"SJ",
|
||||
"SZ",
|
||||
"SE",
|
||||
"CH",
|
||||
"SY",
|
||||
"TW",
|
||||
"TJ",
|
||||
"TZ",
|
||||
"TH",
|
||||
"TL",
|
||||
"TG",
|
||||
"TK",
|
||||
"TO",
|
||||
"TT",
|
||||
"TN",
|
||||
"TR",
|
||||
"TM",
|
||||
"TC",
|
||||
"TV",
|
||||
"UG",
|
||||
"UA",
|
||||
"AE",
|
||||
"GB",
|
||||
"US",
|
||||
"UM",
|
||||
"UY",
|
||||
"UZ",
|
||||
"VU",
|
||||
"VE",
|
||||
"VN",
|
||||
"VG",
|
||||
"VI",
|
||||
"WF",
|
||||
"EH",
|
||||
"YE",
|
||||
"ZM",
|
||||
"ZW",
|
||||
]
|
||||
@@ -1,82 +0,0 @@
|
||||
"""Diagnostics support for HACS."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from aiogithubapi import GitHubException
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .base import HacsBase
|
||||
from .const import DOMAIN
|
||||
from .utils.configuration_schema import TOKEN
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
hacs: HacsBase = hass.data[DOMAIN]
|
||||
|
||||
data = {
|
||||
"entry": entry.as_dict(),
|
||||
"hacs": {
|
||||
"stage": hacs.stage,
|
||||
"version": hacs.version,
|
||||
"disabled_reason": hacs.system.disabled_reason,
|
||||
"new": hacs.status.new,
|
||||
"startup": hacs.status.startup,
|
||||
"categories": hacs.common.categories,
|
||||
"renamed_repositories": hacs.common.renamed_repositories,
|
||||
"archived_repositories": hacs.common.archived_repositories,
|
||||
"ignored_repositories": hacs.common.ignored_repositories,
|
||||
"lovelace_mode": hacs.core.lovelace_mode,
|
||||
"configuration": {},
|
||||
},
|
||||
"custom_repositories": [
|
||||
repo.data.full_name
|
||||
for repo in hacs.repositories.list_all
|
||||
if not hacs.repositories.is_default(str(repo.data.id))
|
||||
],
|
||||
"repositories": [],
|
||||
}
|
||||
|
||||
for key in (
|
||||
"appdaemon",
|
||||
"country",
|
||||
"debug",
|
||||
"dev",
|
||||
"experimental",
|
||||
"netdaemon",
|
||||
"python_script",
|
||||
"release_limit",
|
||||
"theme",
|
||||
):
|
||||
data["hacs"]["configuration"][key] = getattr(hacs.configuration, key, None)
|
||||
|
||||
for repository in hacs.repositories.list_downloaded:
|
||||
data["repositories"].append(
|
||||
{
|
||||
"data": repository.data.to_json(),
|
||||
"integration_manifest": repository.integration_manifest,
|
||||
"repository_manifest": repository.repository_manifest.to_dict(),
|
||||
"ref": repository.ref,
|
||||
"paths": {
|
||||
"localpath": repository.localpath.replace(hacs.core.config_path, "/config"),
|
||||
"local": repository.content.path.local.replace(
|
||||
hacs.core.config_path, "/config"
|
||||
),
|
||||
"remote": repository.content.path.remote,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
try:
|
||||
rate_limit_response = await hacs.githubapi.rate_limit()
|
||||
data["rate_limit"] = rate_limit_response.data.as_dict
|
||||
except GitHubException as exception:
|
||||
data["rate_limit"] = str(exception)
|
||||
|
||||
return async_redact_data(data, (TOKEN,))
|
||||
@@ -1,119 +0,0 @@
|
||||
"""HACS Base entities."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
from .const import DOMAIN, HACS_SYSTEM_ID, NAME_SHORT
|
||||
from .enums import HacsDispatchEvent, HacsGitHubRepo
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .base import HacsBase
|
||||
from .repositories.base import HacsRepository
|
||||
|
||||
|
||||
def system_info(hacs: HacsBase) -> dict:
|
||||
"""Return system info."""
|
||||
return {
|
||||
"identifiers": {(DOMAIN, HACS_SYSTEM_ID)},
|
||||
"name": NAME_SHORT,
|
||||
"manufacturer": "hacs.xyz",
|
||||
"model": "",
|
||||
"sw_version": str(hacs.version),
|
||||
"configuration_url": "homeassistant://hacs",
|
||||
"entry_type": DeviceEntryType.SERVICE,
|
||||
}
|
||||
|
||||
|
||||
class HacsBaseEntity(Entity):
|
||||
"""Base HACS entity."""
|
||||
|
||||
repository: HacsRepository | None = None
|
||||
_attr_should_poll = False
|
||||
|
||||
def __init__(self, hacs: HacsBase) -> None:
|
||||
"""Initialize."""
|
||||
self.hacs = hacs
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register for status events."""
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
HacsDispatchEvent.REPOSITORY,
|
||||
self._update_and_write_state,
|
||||
)
|
||||
)
|
||||
|
||||
@callback
|
||||
def _update(self) -> None:
|
||||
"""Update the sensor."""
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Manual updates of the sensor."""
|
||||
self._update()
|
||||
|
||||
@callback
|
||||
def _update_and_write_state(self, _: Any) -> None:
|
||||
"""Update the entity and write state."""
|
||||
self._update()
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
class HacsSystemEntity(HacsBaseEntity):
|
||||
"""Base system entity."""
|
||||
|
||||
_attr_icon = "hacs:hacs"
|
||||
_attr_unique_id = HACS_SYSTEM_ID
|
||||
|
||||
@property
|
||||
def device_info(self) -> dict[str, any]:
|
||||
"""Return device information about HACS."""
|
||||
return system_info(self.hacs)
|
||||
|
||||
|
||||
class HacsRepositoryEntity(HacsBaseEntity):
|
||||
"""Base repository entity."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hacs: HacsBase,
|
||||
repository: HacsRepository,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(hacs=hacs)
|
||||
self.repository = repository
|
||||
self._attr_unique_id = str(repository.data.id)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return self.hacs.repositories.is_downloaded(repository_id=str(self.repository.data.id))
|
||||
|
||||
@property
|
||||
def device_info(self) -> dict[str, any]:
|
||||
"""Return device information about HACS."""
|
||||
if self.repository.data.full_name == HacsGitHubRepo.INTEGRATION:
|
||||
return system_info(self.hacs)
|
||||
|
||||
return {
|
||||
"identifiers": {(DOMAIN, str(self.repository.data.id))},
|
||||
"name": self.repository.display_name,
|
||||
"model": self.repository.data.category,
|
||||
"manufacturer": ", ".join(
|
||||
author.replace("@", "") for author in self.repository.data.authors
|
||||
),
|
||||
"configuration_url": "homeassistant://hacs",
|
||||
"entry_type": DeviceEntryType.SERVICE,
|
||||
}
|
||||
|
||||
@callback
|
||||
def _update_and_write_state(self, data: dict) -> None:
|
||||
"""Update the entity and write state."""
|
||||
if data.get("repository_id") == self.repository.data.id:
|
||||
self._update()
|
||||
self.async_write_ha_state()
|
||||
@@ -1,89 +0,0 @@
|
||||
"""Helper constants."""
|
||||
# pylint: disable=missing-class-docstring
|
||||
import sys
|
||||
|
||||
if sys.version_info.minor >= 11:
|
||||
# Needs Python 3.11
|
||||
from enum import StrEnum # # pylint: disable=no-name-in-module
|
||||
else:
|
||||
try:
|
||||
# https://github.com/home-assistant/core/blob/dev/homeassistant/backports/enum.py
|
||||
# Considered internal to Home Assistant, can be removed whenever.
|
||||
from homeassistant.backports.enum import StrEnum
|
||||
except ImportError:
|
||||
from enum import Enum
|
||||
|
||||
class StrEnum(str, Enum):
|
||||
pass
|
||||
|
||||
|
||||
class HacsGitHubRepo(StrEnum):
|
||||
"""HacsGitHubRepo."""
|
||||
|
||||
DEFAULT = "hacs/default"
|
||||
INTEGRATION = "hacs/integration"
|
||||
|
||||
|
||||
class HacsCategory(StrEnum):
|
||||
APPDAEMON = "appdaemon"
|
||||
INTEGRATION = "integration"
|
||||
LOVELACE = "lovelace"
|
||||
PLUGIN = "plugin" # Kept for legacy purposes
|
||||
NETDAEMON = "netdaemon"
|
||||
PYTHON_SCRIPT = "python_script"
|
||||
THEME = "theme"
|
||||
REMOVED = "removed"
|
||||
|
||||
def __str__(self):
|
||||
return str(self.value)
|
||||
|
||||
|
||||
class HacsDispatchEvent(StrEnum):
|
||||
"""HacsDispatchEvent."""
|
||||
|
||||
CONFIG = "hacs_dispatch_config"
|
||||
ERROR = "hacs_dispatch_error"
|
||||
RELOAD = "hacs_dispatch_reload"
|
||||
REPOSITORY = "hacs_dispatch_repository"
|
||||
REPOSITORY_DOWNLOAD_PROGRESS = "hacs_dispatch_repository_download_progress"
|
||||
STAGE = "hacs_dispatch_stage"
|
||||
STARTUP = "hacs_dispatch_startup"
|
||||
STATUS = "hacs_dispatch_status"
|
||||
|
||||
|
||||
class RepositoryFile(StrEnum):
|
||||
"""Repository file names."""
|
||||
|
||||
HACS_JSON = "hacs.json"
|
||||
MAINIFEST_JSON = "manifest.json"
|
||||
|
||||
|
||||
class ConfigurationType(StrEnum):
|
||||
YAML = "yaml"
|
||||
CONFIG_ENTRY = "config_entry"
|
||||
|
||||
|
||||
class LovelaceMode(StrEnum):
|
||||
"""Lovelace Modes."""
|
||||
|
||||
STORAGE = "storage"
|
||||
AUTO = "auto"
|
||||
AUTO_GEN = "auto-gen"
|
||||
YAML = "yaml"
|
||||
|
||||
|
||||
class HacsStage(StrEnum):
|
||||
SETUP = "setup"
|
||||
STARTUP = "startup"
|
||||
WAITING = "waiting"
|
||||
RUNNING = "running"
|
||||
BACKGROUND = "background"
|
||||
|
||||
|
||||
class HacsDisabledReason(StrEnum):
|
||||
RATE_LIMIT = "rate_limit"
|
||||
REMOVED = "removed"
|
||||
INVALID_TOKEN = "invalid_token"
|
||||
CONSTRAINS = "constrains"
|
||||
LOAD_HACS = "load_hacs"
|
||||
RESTORE = "restore"
|
||||
@@ -1,49 +0,0 @@
|
||||
"""Custom Exceptions for HACS."""
|
||||
|
||||
|
||||
class HacsException(Exception):
|
||||
"""Super basic."""
|
||||
|
||||
|
||||
class HacsRepositoryArchivedException(HacsException):
|
||||
"""For repositories that are archived."""
|
||||
|
||||
|
||||
class HacsNotModifiedException(HacsException):
|
||||
"""For responses that are not modified."""
|
||||
|
||||
|
||||
class HacsExpectedException(HacsException):
|
||||
"""For stuff that are expected."""
|
||||
|
||||
|
||||
class HacsRepositoryExistException(HacsException):
|
||||
"""For repositories that are already exist."""
|
||||
|
||||
|
||||
class HacsExecutionStillInProgress(HacsException):
|
||||
"""Exception to raise if execution is still in progress."""
|
||||
|
||||
|
||||
class AddonRepositoryException(HacsException):
|
||||
"""Exception to raise when user tries to add add-on repository."""
|
||||
|
||||
exception_message = (
|
||||
"The repository does not seem to be a integration, "
|
||||
"but an add-on repository. HACS does not manage add-ons."
|
||||
)
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__(self.exception_message)
|
||||
|
||||
|
||||
class HomeAssistantCoreRepositoryException(HacsException):
|
||||
"""Exception to raise when user tries to add the home-assistant/core repository."""
|
||||
|
||||
exception_message = (
|
||||
"You can not add homeassistant/core, to use core integrations "
|
||||
"check the Home Assistant documentation for how to add them."
|
||||
)
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__(self.exception_message)
|
||||
@@ -1,94 +0,0 @@
|
||||
""""Starting setup task: Frontend"."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from aiohttp import web
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from .const import DOMAIN, URL_BASE
|
||||
from .hacs_frontend import VERSION as FE_VERSION, locate_dir
|
||||
from .hacs_frontend_experimental import (
|
||||
VERSION as EXPERIMENTAL_FE_VERSION,
|
||||
locate_dir as experimental_locate_dir,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .base import HacsBase
|
||||
|
||||
|
||||
@callback
|
||||
def async_register_frontend(hass: HomeAssistant, hacs: HacsBase) -> None:
|
||||
"""Register the frontend."""
|
||||
|
||||
# Setup themes endpoint if needed
|
||||
hacs.async_setup_frontend_endpoint_themes()
|
||||
|
||||
# Register frontend
|
||||
if hacs.configuration.frontend_repo_url:
|
||||
hacs.log.warning(
|
||||
"<HacsFrontend> Frontend development mode enabled. Do not run in production!"
|
||||
)
|
||||
hass.http.register_view(HacsFrontendDev())
|
||||
elif hacs.configuration.experimental:
|
||||
hacs.log.info("<HacsFrontend> Using experimental frontend")
|
||||
hass.http.register_static_path(
|
||||
f"{URL_BASE}/frontend", experimental_locate_dir(), cache_headers=False
|
||||
)
|
||||
else:
|
||||
#
|
||||
hass.http.register_static_path(f"{URL_BASE}/frontend", locate_dir(), cache_headers=False)
|
||||
|
||||
# Custom iconset
|
||||
hass.http.register_static_path(
|
||||
f"{URL_BASE}/iconset.js", str(hacs.integration_dir / "iconset.js")
|
||||
)
|
||||
if "frontend_extra_module_url" not in hass.data:
|
||||
hass.data["frontend_extra_module_url"] = set()
|
||||
hass.data["frontend_extra_module_url"].add(f"{URL_BASE}/iconset.js")
|
||||
|
||||
hacs.frontend_version = (
|
||||
FE_VERSION if not hacs.configuration.experimental else EXPERIMENTAL_FE_VERSION
|
||||
)
|
||||
|
||||
# Add to sidepanel if needed
|
||||
if DOMAIN not in hass.data.get("frontend_panels", {}):
|
||||
hass.components.frontend.async_register_built_in_panel(
|
||||
component_name="custom",
|
||||
sidebar_title=hacs.configuration.sidepanel_title,
|
||||
sidebar_icon=hacs.configuration.sidepanel_icon,
|
||||
frontend_url_path=DOMAIN,
|
||||
config={
|
||||
"_panel_custom": {
|
||||
"name": "hacs-frontend",
|
||||
"embed_iframe": True,
|
||||
"trust_external": False,
|
||||
"js_url": f"/hacsfiles/frontend/entrypoint.js?hacstag={hacs.frontend_version}",
|
||||
}
|
||||
},
|
||||
require_admin=True,
|
||||
)
|
||||
|
||||
# Setup plugin endpoint if needed
|
||||
hacs.async_setup_frontend_endpoint_plugin()
|
||||
|
||||
|
||||
class HacsFrontendDev(HomeAssistantView):
|
||||
"""Dev View Class for HACS."""
|
||||
|
||||
requires_auth = False
|
||||
name = "hacs_files:frontend"
|
||||
url = r"/hacsfiles/frontend/{requested_file:.+}"
|
||||
|
||||
async def get(self, request, requested_file): # pylint: disable=unused-argument
|
||||
"""Handle HACS Web requests."""
|
||||
hacs: HacsBase = request.app["hass"].data.get(DOMAIN)
|
||||
requested = requested_file.split("/")[-1]
|
||||
request = await hacs.session.get(f"{hacs.configuration.frontend_repo_url}/{requested}")
|
||||
if request.status == 200:
|
||||
result = await request.read()
|
||||
response = web.Response(body=result)
|
||||
response.headers["Content-Type"] = "application/javascript"
|
||||
|
||||
return response
|
||||
@@ -1,5 +0,0 @@
|
||||
"""HACS Frontend"""
|
||||
from .version import VERSION
|
||||
|
||||
def locate_dir():
|
||||
return __path__[0]
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1,23 +0,0 @@
|
||||
import{a as t,r as i,n as a}from"./main-ad130be7.js";import{L as n,s}from"./c.82eccc94.js";let r=t([a("ha-list-item")],(function(t,a){return{F:class extends a{constructor(...i){super(...i),t(this)}},d:[{kind:"get",static:!0,key:"styles",value:function(){return[s,i`
|
||||
:host {
|
||||
padding-left: var(--mdc-list-side-padding, 20px);
|
||||
padding-right: var(--mdc-list-side-padding, 20px);
|
||||
}
|
||||
:host([graphic="avatar"]:not([twoLine])),
|
||||
:host([graphic="icon"]:not([twoLine])) {
|
||||
height: 48px;
|
||||
}
|
||||
span.material-icons:first-of-type {
|
||||
margin-inline-start: 0px !important;
|
||||
margin-inline-end: var(
|
||||
--mdc-list-item-graphic-margin,
|
||||
16px
|
||||
) !important;
|
||||
direction: var(--direction);
|
||||
}
|
||||
span.material-icons:last-of-type {
|
||||
margin-inline-start: auto !important;
|
||||
margin-inline-end: 0px !important;
|
||||
direction: var(--direction);
|
||||
}
|
||||
`]}}]}}),n);const e=t=>`https://brands.home-assistant.io/${t.useFallback?"_/":""}${t.domain}/${t.darkOptimized?"dark_":""}${t.type}.png`,o=t=>t.split("/")[4],p=t=>t.startsWith("https://brands.home-assistant.io/");export{r as H,e as b,o as e,p as i};
|
||||
Binary file not shown.
@@ -1,24 +0,0 @@
|
||||
import{a as e,h as t,Y as i,e as n,i as o,$ as r,L as l,N as a,r as d,n as s}from"./main-ad130be7.js";import"./c.9b92f489.js";e([s("ha-button-menu")],(function(e,t){class s extends t{constructor(...t){super(...t),e(this)}}return{F:s,d:[{kind:"field",key:i,value:void 0},{kind:"field",decorators:[n()],key:"corner",value:()=>"TOP_START"},{kind:"field",decorators:[n()],key:"menuCorner",value:()=>"START"},{kind:"field",decorators:[n({type:Number})],key:"x",value:()=>null},{kind:"field",decorators:[n({type:Number})],key:"y",value:()=>null},{kind:"field",decorators:[n({type:Boolean})],key:"multi",value:()=>!1},{kind:"field",decorators:[n({type:Boolean})],key:"activatable",value:()=>!1},{kind:"field",decorators:[n({type:Boolean})],key:"disabled",value:()=>!1},{kind:"field",decorators:[n({type:Boolean})],key:"fixed",value:()=>!1},{kind:"field",decorators:[o("mwc-menu",!0)],key:"_menu",value:void 0},{kind:"get",key:"items",value:function(){var e;return null===(e=this._menu)||void 0===e?void 0:e.items}},{kind:"get",key:"selected",value:function(){var e;return null===(e=this._menu)||void 0===e?void 0:e.selected}},{kind:"method",key:"focus",value:function(){var e,t;null!==(e=this._menu)&&void 0!==e&&e.open?this._menu.focusItemAtIndex(0):null===(t=this._triggerButton)||void 0===t||t.focus()}},{kind:"method",key:"render",value:function(){return r`
|
||||
<div @click=${this._handleClick}>
|
||||
<slot name="trigger" @slotchange=${this._setTriggerAria}></slot>
|
||||
</div>
|
||||
<mwc-menu
|
||||
.corner=${this.corner}
|
||||
.menuCorner=${this.menuCorner}
|
||||
.fixed=${this.fixed}
|
||||
.multi=${this.multi}
|
||||
.activatable=${this.activatable}
|
||||
.y=${this.y}
|
||||
.x=${this.x}
|
||||
>
|
||||
<slot></slot>
|
||||
</mwc-menu>
|
||||
`}},{kind:"method",key:"firstUpdated",value:function(e){l(a(s.prototype),"firstUpdated",this).call(this,e),"rtl"===document.dir&&this.updateComplete.then((()=>{this.querySelectorAll("mwc-list-item").forEach((e=>{const t=document.createElement("style");t.innerHTML="span.material-icons:first-of-type { margin-left: var(--mdc-list-item-graphic-margin, 32px) !important; margin-right: 0px !important;}",e.shadowRoot.appendChild(t)}))}))}},{kind:"method",key:"_handleClick",value:function(){this.disabled||(this._menu.anchor=this,this._menu.show())}},{kind:"get",key:"_triggerButton",value:function(){return this.querySelector('ha-icon-button[slot="trigger"], mwc-button[slot="trigger"]')}},{kind:"method",key:"_setTriggerAria",value:function(){this._triggerButton&&(this._triggerButton.ariaHasPopup="menu")}},{kind:"get",static:!0,key:"styles",value:function(){return d`
|
||||
:host {
|
||||
display: inline-block;
|
||||
position: relative;
|
||||
}
|
||||
::slotted([disabled]) {
|
||||
color: var(--disabled-text-color);
|
||||
}
|
||||
`}}]}}),t);
|
||||
Binary file not shown.
@@ -1,390 +0,0 @@
|
||||
import{a as e,h as t,e as i,g as a,t as s,$ as o,j as r,R as n,w as l,r as h,n as c,m as d,L as p,N as u,o as v,b as f,aI as b,ai as m,c as k,E as g,aJ as y,aC as w,aK as x,aL as $,d as _,s as R}from"./main-ad130be7.js";import{f as z}from"./c.3243a8b0.js";import{c as j}from"./c.4a97632a.js";import"./c.f1291e50.js";import"./c.2d5ed670.js";import"./c.97b7c4b0.js";import{r as F}from"./c.4204ca09.js";import{i as P}from"./c.21c042d4.js";import{s as I}from"./c.2645c235.js";import"./c.a5f69ed4.js";import"./c.3f859915.js";import"./c.9b92f489.js";import"./c.82eccc94.js";import"./c.8e28b461.js";import"./c.4feb0cb8.js";import"./c.0ca5587f.js";import"./c.5d3ce9d6.js";import"./c.f6611997.js";import"./c.743a15a1.js";import"./c.4266acdb.js";e([c("ha-tab")],(function(e,t){return{F:class extends t{constructor(...t){super(...t),e(this)}},d:[{kind:"field",decorators:[i({type:Boolean,reflect:!0})],key:"active",value:()=>!1},{kind:"field",decorators:[i({type:Boolean,reflect:!0})],key:"narrow",value:()=>!1},{kind:"field",decorators:[i()],key:"name",value:void 0},{kind:"field",decorators:[a("mwc-ripple")],key:"_ripple",value:void 0},{kind:"field",decorators:[s()],key:"_shouldRenderRipple",value:()=>!1},{kind:"method",key:"render",value:function(){return o`
|
||||
<div
|
||||
tabindex="0"
|
||||
role="tab"
|
||||
aria-selected=${this.active}
|
||||
aria-label=${r(this.name)}
|
||||
@focus=${this.handleRippleFocus}
|
||||
@blur=${this.handleRippleBlur}
|
||||
@mousedown=${this.handleRippleActivate}
|
||||
@mouseup=${this.handleRippleDeactivate}
|
||||
@mouseenter=${this.handleRippleMouseEnter}
|
||||
@mouseleave=${this.handleRippleMouseLeave}
|
||||
@touchstart=${this.handleRippleActivate}
|
||||
@touchend=${this.handleRippleDeactivate}
|
||||
@touchcancel=${this.handleRippleDeactivate}
|
||||
@keydown=${this._handleKeyDown}
|
||||
>
|
||||
${this.narrow?o`<slot name="icon"></slot>`:""}
|
||||
<span class="name">${this.name}</span>
|
||||
${this._shouldRenderRipple?o`<mwc-ripple></mwc-ripple>`:""}
|
||||
</div>
|
||||
`}},{kind:"field",key:"_rippleHandlers",value(){return new n((()=>(this._shouldRenderRipple=!0,this._ripple)))}},{kind:"method",key:"_handleKeyDown",value:function(e){13===e.keyCode&&e.target.click()}},{kind:"method",decorators:[l({passive:!0})],key:"handleRippleActivate",value:function(e){this._rippleHandlers.startPress(e)}},{kind:"method",key:"handleRippleDeactivate",value:function(){this._rippleHandlers.endPress()}},{kind:"method",key:"handleRippleMouseEnter",value:function(){this._rippleHandlers.startHover()}},{kind:"method",key:"handleRippleMouseLeave",value:function(){this._rippleHandlers.endHover()}},{kind:"method",key:"handleRippleFocus",value:function(){this._rippleHandlers.startFocus()}},{kind:"method",key:"handleRippleBlur",value:function(){this._rippleHandlers.endFocus()}},{kind:"get",static:!0,key:"styles",value:function(){return h`
|
||||
div {
|
||||
padding: 0 32px;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
text-align: center;
|
||||
box-sizing: border-box;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
width: 100%;
|
||||
height: var(--header-height);
|
||||
cursor: pointer;
|
||||
position: relative;
|
||||
outline: none;
|
||||
}
|
||||
|
||||
.name {
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
max-width: 100%;
|
||||
}
|
||||
|
||||
:host([active]) {
|
||||
color: var(--primary-color);
|
||||
}
|
||||
|
||||
:host(:not([narrow])[active]) div {
|
||||
border-bottom: 2px solid var(--primary-color);
|
||||
}
|
||||
|
||||
:host([narrow]) {
|
||||
min-width: 0;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
:host([narrow]) div {
|
||||
padding: 0 4px;
|
||||
}
|
||||
`}}]}}),t),e([c("hass-tabs-subpage")],(function(e,t){class a extends t{constructor(...t){super(...t),e(this)}}return{F:a,d:[{kind:"field",decorators:[i({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[i({type:Boolean})],key:"supervisor",value:()=>!1},{kind:"field",decorators:[i({attribute:!1})],key:"localizeFunc",value:void 0},{kind:"field",decorators:[i({type:String,attribute:"back-path"})],key:"backPath",value:void 0},{kind:"field",decorators:[i()],key:"backCallback",value:void 0},{kind:"field",decorators:[i({type:Boolean,attribute:"main-page"})],key:"mainPage",value:()=>!1},{kind:"field",decorators:[i({attribute:!1})],key:"route",value:void 0},{kind:"field",decorators:[i({attribute:!1})],key:"tabs",value:void 0},{kind:"field",decorators:[i({type:Boolean,reflect:!0})],key:"narrow",value:()=>!1},{kind:"field",decorators:[i({type:Boolean,reflect:!0,attribute:"is-wide"})],key:"isWide",value:()=>!1},{kind:"field",decorators:[i({type:Boolean,reflect:!0})],key:"rtl",value:()=>!1},{kind:"field",decorators:[s()],key:"_activeTab",value:void 0},{kind:"field",decorators:[F(".content")],key:"_savedScrollPos",value:void 0},{kind:"field",key:"_getTabs",value(){return d(((e,t,i,a,s,r,n)=>{const l=e.filter((e=>(!e.component||e.core||P(this.hass,e.component))&&(!e.advancedOnly||i)));if(l.length<2){if(1===l.length){const e=l[0];return[e.translationKey?n(e.translationKey):e.name]}return[""]}return l.map((e=>o`
|
||||
<a href=${e.path}>
|
||||
<ha-tab
|
||||
.hass=${this.hass}
|
||||
.active=${e.path===(null==t?void 0:t.path)}
|
||||
.narrow=${this.narrow}
|
||||
.name=${e.translationKey?n(e.translationKey):e.name}
|
||||
>
|
||||
${e.iconPath?o`<ha-svg-icon
|
||||
slot="icon"
|
||||
.path=${e.iconPath}
|
||||
></ha-svg-icon>`:""}
|
||||
</ha-tab>
|
||||
</a>
|
||||
`))}))}},{kind:"method",key:"willUpdate",value:function(e){if(e.has("route")&&(this._activeTab=this.tabs.find((e=>`${this.route.prefix}${this.route.path}`.includes(e.path)))),e.has("hass")){const t=e.get("hass");t&&t.language===this.hass.language||(this.rtl=j(this.hass))}p(u(a.prototype),"willUpdate",this).call(this,e)}},{kind:"method",key:"render",value:function(){var e,t;const i=this._getTabs(this.tabs,this._activeTab,null===(e=this.hass.userData)||void 0===e?void 0:e.showAdvanced,this.hass.config.components,this.hass.language,this.narrow,this.localizeFunc||this.hass.localize),a=i.length>1;return o`
|
||||
<div class="toolbar">
|
||||
${this.mainPage||!this.backPath&&null!==(t=history.state)&&void 0!==t&&t.root?o`
|
||||
<ha-menu-button
|
||||
.hassio=${this.supervisor}
|
||||
.hass=${this.hass}
|
||||
.narrow=${this.narrow}
|
||||
></ha-menu-button>
|
||||
`:this.backPath?o`
|
||||
<a href=${this.backPath}>
|
||||
<ha-icon-button-arrow-prev
|
||||
.hass=${this.hass}
|
||||
></ha-icon-button-arrow-prev>
|
||||
</a>
|
||||
`:o`
|
||||
<ha-icon-button-arrow-prev
|
||||
.hass=${this.hass}
|
||||
@click=${this._backTapped}
|
||||
></ha-icon-button-arrow-prev>
|
||||
`}
|
||||
${this.narrow||!a?o`<div class="main-title">
|
||||
<slot name="header">${a?"":i[0]}</slot>
|
||||
</div>`:""}
|
||||
${a?o`
|
||||
<div id="tabbar" class=${v({"bottom-bar":this.narrow})}>
|
||||
${i}
|
||||
</div>
|
||||
`:""}
|
||||
<div id="toolbar-icon">
|
||||
<slot name="toolbar-icon"></slot>
|
||||
</div>
|
||||
</div>
|
||||
<div
|
||||
class="content ${v({tabs:a})}"
|
||||
@scroll=${this._saveScrollPos}
|
||||
>
|
||||
<slot></slot>
|
||||
</div>
|
||||
<div id="fab" class=${v({tabs:a})}>
|
||||
<slot name="fab"></slot>
|
||||
</div>
|
||||
`}},{kind:"method",decorators:[l({passive:!0})],key:"_saveScrollPos",value:function(e){this._savedScrollPos=e.target.scrollTop}},{kind:"method",key:"_backTapped",value:function(){this.backCallback?this.backCallback():history.back()}},{kind:"get",static:!0,key:"styles",value:function(){return h`
|
||||
:host {
|
||||
display: block;
|
||||
height: 100%;
|
||||
background-color: var(--primary-background-color);
|
||||
}
|
||||
|
||||
:host([narrow]) {
|
||||
width: 100%;
|
||||
position: fixed;
|
||||
}
|
||||
|
||||
ha-menu-button {
|
||||
margin-right: 24px;
|
||||
}
|
||||
|
||||
.toolbar {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
font-size: 20px;
|
||||
height: var(--header-height);
|
||||
background-color: var(--sidebar-background-color);
|
||||
font-weight: 400;
|
||||
border-bottom: 1px solid var(--divider-color);
|
||||
padding: 0 16px;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
.toolbar a {
|
||||
color: var(--sidebar-text-color);
|
||||
text-decoration: none;
|
||||
}
|
||||
.bottom-bar a {
|
||||
width: 25%;
|
||||
}
|
||||
|
||||
#tabbar {
|
||||
display: flex;
|
||||
font-size: 14px;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
#tabbar > a {
|
||||
overflow: hidden;
|
||||
max-width: 45%;
|
||||
}
|
||||
|
||||
#tabbar.bottom-bar {
|
||||
position: absolute;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
padding: 0 16px;
|
||||
box-sizing: border-box;
|
||||
background-color: var(--sidebar-background-color);
|
||||
border-top: 1px solid var(--divider-color);
|
||||
justify-content: space-around;
|
||||
z-index: 2;
|
||||
font-size: 12px;
|
||||
width: 100%;
|
||||
padding-bottom: env(safe-area-inset-bottom);
|
||||
}
|
||||
|
||||
#tabbar:not(.bottom-bar) {
|
||||
flex: 1;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
:host(:not([narrow])) #toolbar-icon {
|
||||
min-width: 40px;
|
||||
}
|
||||
|
||||
ha-menu-button,
|
||||
ha-icon-button-arrow-prev,
|
||||
::slotted([slot="toolbar-icon"]) {
|
||||
display: flex;
|
||||
flex-shrink: 0;
|
||||
pointer-events: auto;
|
||||
color: var(--sidebar-icon-color);
|
||||
}
|
||||
|
||||
.main-title {
|
||||
flex: 1;
|
||||
max-height: var(--header-height);
|
||||
line-height: 20px;
|
||||
color: var(--sidebar-text-color);
|
||||
margin: var(--main-title-margin, 0 0 0 24px);
|
||||
}
|
||||
|
||||
.content {
|
||||
position: relative;
|
||||
width: calc(
|
||||
100% - env(safe-area-inset-left) - env(safe-area-inset-right)
|
||||
);
|
||||
margin-left: env(safe-area-inset-left);
|
||||
margin-right: env(safe-area-inset-right);
|
||||
height: calc(100% - 1px - var(--header-height));
|
||||
height: calc(
|
||||
100% - 1px - var(--header-height) - env(safe-area-inset-bottom)
|
||||
);
|
||||
overflow: auto;
|
||||
-webkit-overflow-scrolling: touch;
|
||||
}
|
||||
|
||||
:host([narrow]) .content.tabs {
|
||||
height: calc(100% - 2 * var(--header-height));
|
||||
height: calc(
|
||||
100% - 2 * var(--header-height) - env(safe-area-inset-bottom)
|
||||
);
|
||||
}
|
||||
|
||||
#fab {
|
||||
position: fixed;
|
||||
right: calc(16px + env(safe-area-inset-right));
|
||||
bottom: calc(16px + env(safe-area-inset-bottom));
|
||||
z-index: 1;
|
||||
}
|
||||
:host([narrow]) #fab.tabs {
|
||||
bottom: calc(84px + env(safe-area-inset-bottom));
|
||||
}
|
||||
#fab[is-wide] {
|
||||
bottom: 24px;
|
||||
right: 24px;
|
||||
}
|
||||
:host([rtl]) #fab {
|
||||
right: auto;
|
||||
left: calc(16px + env(safe-area-inset-left));
|
||||
}
|
||||
:host([rtl][is-wide]) #fab {
|
||||
bottom: 24px;
|
||||
left: 24px;
|
||||
right: auto;
|
||||
}
|
||||
`}}]}}),t);let E=e([c("hacs-store-panel")],(function(e,t){return{F:class extends t{constructor(...t){super(...t),e(this)}},d:[{kind:"field",decorators:[i({attribute:!1})],key:"filters",value:()=>({})},{kind:"field",decorators:[i({attribute:!1})],key:"hacs",value:void 0},{kind:"field",decorators:[i()],key:"_searchInput",value:()=>""},{kind:"field",decorators:[i({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[i({attribute:!1})],key:"narrow",value:void 0},{kind:"field",decorators:[i({attribute:!1})],key:"isWide",value:void 0},{kind:"field",decorators:[i({attribute:!1})],key:"route",value:void 0},{kind:"field",decorators:[i({attribute:!1})],key:"sections",value:void 0},{kind:"field",decorators:[i()],key:"section",value:void 0},{kind:"field",key:"_repositoriesInActiveSection",value(){return d(((e,t)=>[(null==e?void 0:e.filter((e=>{var i,a,s;return(null===(i=this.hacs.sections)||void 0===i||null===(a=i.find((e=>e.id===t)))||void 0===a||null===(s=a.categories)||void 0===s?void 0:s.includes(e.category))&&e.installed})))||[],(null==e?void 0:e.filter((e=>{var i,a,s;return(null===(i=this.hacs.sections)||void 0===i||null===(a=i.find((e=>e.id===t)))||void 0===a||null===(s=a.categories)||void 0===s?void 0:s.includes(e.category))&&e.new&&!e.installed})))||[]]))}},{kind:"get",key:"allRepositories",value:function(){const[e,t]=this._repositoriesInActiveSection(this.hacs.repositories,this.section);return t.concat(e)}},{kind:"field",key:"_filterRepositories",value:()=>d(z)},{kind:"get",key:"visibleRepositories",value:function(){const e=this.allRepositories.filter((e=>{var t,i;return null===(t=this.filters[this.section])||void 0===t||null===(i=t.find((t=>t.id===e.category)))||void 0===i?void 0:i.checked}));return this._filterRepositories(e,this._searchInput)}},{kind:"method",key:"firstUpdated",value:async function(){this.addEventListener("filter-change",(e=>this._updateFilters(e)))}},{kind:"method",key:"_updateFilters",value:function(e){var t;const i=null===(t=this.filters[this.section])||void 0===t?void 0:t.find((t=>t.id===e.detail.id));this.filters[this.section].find((e=>e.id===i.id)).checked=!i.checked,this.requestUpdate()}},{kind:"method",key:"render",value:function(){var e;if(!this.hacs)return o``;const t=this._repositoriesInActiveSection(this.hacs.repositories,this.section)[1];if(!this.filters[this.section]&&this.hacs.info.categories){var i;const e=null===(i=f(this.hacs.language,this.route))||void 0===i?void 0:i.categories;this.filters[this.section]=[],null==e||e.filter((e=>{var t;return null===(t=this.hacs.info)||void 0===t?void 0:t.categories.includes(e)})).forEach((e=>{this.filters[this.section].push({id:e,value:e,checked:!0})}))}return o`<hass-tabs-subpage
|
||||
back-path="/hacs/entry"
|
||||
.hass=${this.hass}
|
||||
.narrow=${this.narrow}
|
||||
.route=${this.route}
|
||||
.tabs=${this.hacs.sections}
|
||||
hasFab
|
||||
>
|
||||
<ha-icon-overflow-menu
|
||||
slot="toolbar-icon"
|
||||
narrow
|
||||
.hass=${this.hass}
|
||||
.items=${[{path:b,label:this.hacs.localize("menu.documentation"),action:()=>m.open("https://hacs.xyz/","_blank","noreferrer=true")},{path:k,label:"GitHub",action:()=>m.open("https://github.com/hacs","_blank","noreferrer=true")},{path:g,label:this.hacs.localize("menu.open_issue"),action:()=>m.open("https://hacs.xyz/docs/issues","_blank","noreferrer=true")},{path:y,label:this.hacs.localize("menu.custom_repositories"),disabled:this.hacs.info.disabled_reason,action:()=>this.dispatchEvent(new CustomEvent("hacs-dialog",{detail:{type:"custom-repositories",repositories:this.hacs.repositories},bubbles:!0,composed:!0}))},{path:w,label:this.hacs.localize("menu.about"),action:()=>I(this,this.hacs)}]}
|
||||
>
|
||||
</ha-icon-overflow-menu>
|
||||
${this.narrow?o`
|
||||
<search-input
|
||||
.hass=${this.hass}
|
||||
class="header"
|
||||
slot="header"
|
||||
.label=${this.hacs.localize("search.downloaded")}
|
||||
.filter=${this._searchInput||""}
|
||||
@value-changed=${this._inputValueChanged}
|
||||
></search-input>
|
||||
`:o`<div class="search">
|
||||
<search-input
|
||||
.hass=${this.hass}
|
||||
.label=${0===t.length?this.hacs.localize("search.downloaded"):this.hacs.localize("search.downloaded_new")}
|
||||
.filter=${this._searchInput||""}
|
||||
@value-changed=${this._inputValueChanged}
|
||||
></search-input>
|
||||
</div>`}
|
||||
<div class="content ${this.narrow?"narrow-content":""}">
|
||||
${(null===(e=this.filters[this.section])||void 0===e?void 0:e.length)>1?o`<div class="filters">
|
||||
<hacs-filter
|
||||
.hacs=${this.hacs}
|
||||
.filters="${this.filters[this.section]}"
|
||||
></hacs-filter>
|
||||
</div>`:""}
|
||||
${null!=t&&t.length?o`<ha-alert .rtl=${j(this.hass)}>
|
||||
${this.hacs.localize("store.new_repositories_note")}
|
||||
<mwc-button
|
||||
class="max-content"
|
||||
slot="action"
|
||||
.label=${this.hacs.localize("menu.dismiss")}
|
||||
@click=${this._clearAllNewRepositories}
|
||||
>
|
||||
</mwc-button>
|
||||
</ha-alert> `:""}
|
||||
<div class="container ${this.narrow?"narrow":""}">
|
||||
${void 0===this.hacs.repositories?"":0===this.allRepositories.length?this._renderEmpty():0===this.visibleRepositories.length?this._renderNoResultsFound():this._renderRepositories()}
|
||||
</div>
|
||||
</div>
|
||||
<ha-fab
|
||||
slot="fab"
|
||||
.label=${this.hacs.localize("store.explore")}
|
||||
.extended=${!this.narrow}
|
||||
@click=${this._addRepository}
|
||||
>
|
||||
<ha-svg-icon slot="icon" .path=${x}></ha-svg-icon>
|
||||
</ha-fab>
|
||||
</hass-tabs-subpage>`}},{kind:"method",key:"_renderRepositories",value:function(){return this.visibleRepositories.map((e=>o`<hacs-repository-card
|
||||
.hass=${this.hass}
|
||||
.hacs=${this.hacs}
|
||||
.repository=${e}
|
||||
.narrow=${this.narrow}
|
||||
?narrow=${this.narrow}
|
||||
></hacs-repository-card>`))}},{kind:"method",key:"_clearAllNewRepositories",value:async function(){var e;await $(this.hass,{categories:(null===(e=f(this.hacs.language,this.route))||void 0===e?void 0:e.categories)||[]})}},{kind:"method",key:"_renderNoResultsFound",value:function(){return o`<ha-alert
|
||||
.rtl=${j(this.hass)}
|
||||
alert-type="warning"
|
||||
.title="${this.hacs.localize("store.no_repositories")} 😕"
|
||||
>
|
||||
${this.hacs.localize("store.no_repositories_found_desc1",{searchInput:this._searchInput})}
|
||||
<br />
|
||||
${this.hacs.localize("store.no_repositories_found_desc2")}
|
||||
</ha-alert>`}},{kind:"method",key:"_renderEmpty",value:function(){return o`<ha-alert
|
||||
.title="${this.hacs.localize("store.no_repositories")} 😕"
|
||||
.rtl=${j(this.hass)}
|
||||
>
|
||||
${this.hacs.localize("store.no_repositories_desc1")}
|
||||
<br />
|
||||
${this.hacs.localize("store.no_repositories_desc2")}
|
||||
</ha-alert>`}},{kind:"method",key:"_inputValueChanged",value:function(e){this._searchInput=e.detail.value,window.localStorage.setItem("hacs-search",this._searchInput)}},{kind:"method",key:"_addRepository",value:function(){this.dispatchEvent(new CustomEvent("hacs-dialog",{detail:{type:"add-repository",repositories:this.hacs.repositories,section:this.section},bubbles:!0,composed:!0}))}},{kind:"get",static:!0,key:"styles",value:function(){return[_,R,h`
|
||||
.filter {
|
||||
border-bottom: 1px solid var(--divider-color);
|
||||
}
|
||||
.content {
|
||||
height: calc(100vh - 128px);
|
||||
overflow: auto;
|
||||
}
|
||||
.narrow-content {
|
||||
height: calc(100vh - 128px);
|
||||
}
|
||||
.container {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(auto-fit, minmax(480px, 1fr));
|
||||
justify-items: center;
|
||||
grid-gap: 8px 8px;
|
||||
padding: 8px 16px 16px;
|
||||
margin-bottom: 64px;
|
||||
}
|
||||
ha-svg-icon {
|
||||
color: var(--hcv-text-color-on-background);
|
||||
}
|
||||
hacs-repository-card {
|
||||
max-width: 500px;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: space-between;
|
||||
}
|
||||
hacs-repository-card[narrow] {
|
||||
width: 100%;
|
||||
}
|
||||
hacs-repository-card[narrow]:last-of-type {
|
||||
margin-bottom: 64px;
|
||||
}
|
||||
ha-alert {
|
||||
color: var(--hcv-text-color-primary);
|
||||
display: block;
|
||||
margin-top: -4px;
|
||||
}
|
||||
.narrow {
|
||||
width: 100%;
|
||||
display: block;
|
||||
padding: 0px;
|
||||
margin: 0;
|
||||
}
|
||||
search-input {
|
||||
display: block;
|
||||
}
|
||||
|
||||
search-input.header {
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.bottom-bar {
|
||||
position: fixed !important;
|
||||
}
|
||||
.max-content {
|
||||
width: max-content;
|
||||
}
|
||||
`]}}]}}),t);export{E as HacsStorePanel};
|
||||
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1,16 +0,0 @@
|
||||
import{a as e,e as t,i,L as a,N as d,$ as r,r as n,n as o}from"./main-ad130be7.js";import{H as s}from"./c.0a1cf8d0.js";e([o("ha-clickable-list-item")],(function(e,o){class s extends o{constructor(...t){super(...t),e(this)}}return{F:s,d:[{kind:"field",decorators:[t()],key:"href",value:void 0},{kind:"field",decorators:[t({type:Boolean})],key:"disableHref",value:()=>!1},{kind:"field",decorators:[t({type:Boolean,reflect:!0})],key:"openNewTab",value:()=>!1},{kind:"field",decorators:[i("a")],key:"_anchor",value:void 0},{kind:"method",key:"render",value:function(){const e=a(d(s.prototype),"render",this).call(this),t=this.href||"";return r`${this.disableHref?r`<a aria-role="option">${e}</a>`:r`<a
|
||||
aria-role="option"
|
||||
target=${this.openNewTab?"_blank":""}
|
||||
href=${t}
|
||||
>${e}</a
|
||||
>`}`}},{kind:"method",key:"firstUpdated",value:function(){a(d(s.prototype),"firstUpdated",this).call(this),this.addEventListener("keydown",(e=>{"Enter"!==e.key&&" "!==e.key||this._anchor.click()}))}},{kind:"get",static:!0,key:"styles",value:function(){return[a(d(s),"styles",this),n`
|
||||
a {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
padding-left: var(--mdc-list-side-padding, 20px);
|
||||
padding-right: var(--mdc-list-side-padding, 20px);
|
||||
overflow: hidden;
|
||||
}
|
||||
`]}}]}}),s);
|
||||
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1 +0,0 @@
|
||||
const n=(n,o)=>n&&n.config.components.includes(o);export{n as i};
|
||||
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1 +0,0 @@
|
||||
import{al as e,am as a,aj as s,an as r,ao as u}from"./main-ad130be7.js";async function i(i,o,n){const t=new e("updateLovelaceResources"),l=await a(i),d=`/hacsfiles/${o.full_name.split("/")[1]}`,c=s({repository:o,version:n}),p=l.find((e=>e.url.includes(d)));t.debug({namespace:d,url:c,exsisting:p}),p&&p.url!==c?(t.debug(`Updating exsusting resource for ${d}`),await r(i,{url:c,resource_id:p.id,res_type:p.type})):l.map((e=>e.url)).includes(c)||(t.debug(`Adding ${c} to Lovelace resources`),await u(i,{url:c,res_type:"module"}))}export{i as u};
|
||||
Binary file not shown.
@@ -1 +0,0 @@
|
||||
import{m as o}from"./c.f6611997.js";import{a as t}from"./c.4266acdb.js";const n=async(n,s)=>t(n,{title:"Home Assistant Community Store",confirmText:s.localize("common.close"),text:o.html(`\n **${s.localize("dialog_about.integration_version")}:** | ${s.info.version}\n --|--\n **${s.localize("dialog_about.frontend_version")}:** | 20220906112053\n **${s.localize("common.repositories")}:** | ${s.repositories.length}\n **${s.localize("dialog_about.downloaded_repositories")}:** | ${s.repositories.filter((o=>o.installed)).length}\n\n **${s.localize("dialog_about.useful_links")}:**\n\n - [General documentation](https://hacs.xyz/)\n - [Configuration](https://hacs.xyz/docs/configuration/start)\n - [FAQ](https://hacs.xyz/docs/faq/what)\n - [GitHub](https://github.com/hacs)\n - [Discord](https://discord.gg/apgchf8)\n - [Become a GitHub sponsor? ❤️](https://github.com/sponsors/ludeeus)\n - [BuyMe~~Coffee~~Beer? 🍺🙈](https://buymeacoffee.com/ludeeus)\n\n ***\n\n _Everything you find in HACS is **not** tested by Home Assistant, that includes HACS itself.\n The HACS and Home Assistant teams do not support **anything** you find here._`)});export{n as s};
|
||||
Binary file not shown.
@@ -1,61 +0,0 @@
|
||||
import{a as r,h as a,e as o,r as e,$ as d,n as t}from"./main-ad130be7.js";r([t("ha-card")],(function(r,a){return{F:class extends a{constructor(...a){super(...a),r(this)}},d:[{kind:"field",decorators:[o()],key:"header",value:void 0},{kind:"field",decorators:[o({type:Boolean,reflect:!0})],key:"outlined",value:()=>!1},{kind:"get",static:!0,key:"styles",value:function(){return e`
|
||||
:host {
|
||||
background: var(
|
||||
--ha-card-background,
|
||||
var(--card-background-color, white)
|
||||
);
|
||||
border-radius: var(--ha-card-border-radius, 4px);
|
||||
box-shadow: var(
|
||||
--ha-card-box-shadow,
|
||||
0px 2px 1px -1px rgba(0, 0, 0, 0.2),
|
||||
0px 1px 1px 0px rgba(0, 0, 0, 0.14),
|
||||
0px 1px 3px 0px rgba(0, 0, 0, 0.12)
|
||||
);
|
||||
color: var(--primary-text-color);
|
||||
display: block;
|
||||
transition: all 0.3s ease-out;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
:host([outlined]) {
|
||||
box-shadow: none;
|
||||
border-width: var(--ha-card-border-width, 1px);
|
||||
border-style: solid;
|
||||
border-color: var(
|
||||
--ha-card-border-color,
|
||||
var(--divider-color, #e0e0e0)
|
||||
);
|
||||
}
|
||||
|
||||
.card-header,
|
||||
:host ::slotted(.card-header) {
|
||||
color: var(--ha-card-header-color, --primary-text-color);
|
||||
font-family: var(--ha-card-header-font-family, inherit);
|
||||
font-size: var(--ha-card-header-font-size, 24px);
|
||||
letter-spacing: -0.012em;
|
||||
line-height: 48px;
|
||||
padding: 12px 16px 16px;
|
||||
display: block;
|
||||
margin-block-start: 0px;
|
||||
margin-block-end: 0px;
|
||||
font-weight: normal;
|
||||
}
|
||||
|
||||
:host ::slotted(.card-content:not(:first-child)),
|
||||
slot:not(:first-child)::slotted(.card-content) {
|
||||
padding-top: 0px;
|
||||
margin-top: -8px;
|
||||
}
|
||||
|
||||
:host ::slotted(.card-content) {
|
||||
padding: 16px;
|
||||
}
|
||||
|
||||
:host ::slotted(.card-actions) {
|
||||
border-top: 1px solid var(--divider-color, #e8e8e8);
|
||||
padding: 5px 16px;
|
||||
}
|
||||
`}},{kind:"method",key:"render",value:function(){return d`
|
||||
${this.header?d`<h1 class="card-header">${this.header}</h1>`:d``}
|
||||
<slot></slot>
|
||||
`}}]}}),a);
|
||||
Binary file not shown.
@@ -1,121 +0,0 @@
|
||||
import{a as e,h as t,e as n,t as i,i as o,$ as a,av as d,o as s,L as r,N as l,A as h,ae as c,r as p,n as u}from"./main-ad130be7.js";e([u("ha-expansion-panel")],(function(e,t){class u extends t{constructor(...t){super(...t),e(this)}}return{F:u,d:[{kind:"field",decorators:[n({type:Boolean,reflect:!0})],key:"expanded",value:()=>!1},{kind:"field",decorators:[n({type:Boolean,reflect:!0})],key:"outlined",value:()=>!1},{kind:"field",decorators:[n({type:Boolean,reflect:!0})],key:"leftChevron",value:()=>!1},{kind:"field",decorators:[n()],key:"header",value:void 0},{kind:"field",decorators:[n()],key:"secondary",value:void 0},{kind:"field",decorators:[i()],key:"_showContent",value(){return this.expanded}},{kind:"field",decorators:[o(".container")],key:"_container",value:void 0},{kind:"method",key:"render",value:function(){return a`
|
||||
<div class="top">
|
||||
<div
|
||||
id="summary"
|
||||
@click=${this._toggleContainer}
|
||||
@keydown=${this._toggleContainer}
|
||||
@focus=${this._focusChanged}
|
||||
@blur=${this._focusChanged}
|
||||
role="button"
|
||||
tabindex="0"
|
||||
aria-expanded=${this.expanded}
|
||||
aria-controls="sect1"
|
||||
>
|
||||
${this.leftChevron?a`
|
||||
<ha-svg-icon
|
||||
.path=${d}
|
||||
class="summary-icon ${s({expanded:this.expanded})}"
|
||||
></ha-svg-icon>
|
||||
`:""}
|
||||
<slot name="header">
|
||||
<div class="header">
|
||||
${this.header}
|
||||
<slot class="secondary" name="secondary">${this.secondary}</slot>
|
||||
</div>
|
||||
</slot>
|
||||
${this.leftChevron?"":a`
|
||||
<ha-svg-icon
|
||||
.path=${d}
|
||||
class="summary-icon ${s({expanded:this.expanded})}"
|
||||
></ha-svg-icon>
|
||||
`}
|
||||
</div>
|
||||
<slot name="icons"></slot>
|
||||
</div>
|
||||
<div
|
||||
class="container ${s({expanded:this.expanded})}"
|
||||
@transitionend=${this._handleTransitionEnd}
|
||||
role="region"
|
||||
aria-labelledby="summary"
|
||||
aria-hidden=${!this.expanded}
|
||||
tabindex="-1"
|
||||
>
|
||||
${this._showContent?a`<slot></slot>`:""}
|
||||
</div>
|
||||
`}},{kind:"method",key:"willUpdate",value:function(e){r(l(u.prototype),"willUpdate",this).call(this,e),e.has("expanded")&&this.expanded&&(this._showContent=this.expanded,setTimeout((()=>{this.expanded&&(this._container.style.overflow="initial")}),300))}},{kind:"method",key:"_handleTransitionEnd",value:function(){this._container.style.removeProperty("height"),this._container.style.overflow=this.expanded?"initial":"hidden",this._showContent=this.expanded}},{kind:"method",key:"_toggleContainer",value:async function(e){if(e.defaultPrevented)return;if("keydown"===e.type&&"Enter"!==e.key&&" "!==e.key)return;e.preventDefault();const t=!this.expanded;h(this,"expanded-will-change",{expanded:t}),this._container.style.overflow="hidden",t&&(this._showContent=!0,await c());const n=this._container.scrollHeight;this._container.style.height=`${n}px`,t||setTimeout((()=>{this._container.style.height="0px"}),0),this.expanded=t,h(this,"expanded-changed",{expanded:this.expanded})}},{kind:"method",key:"_focusChanged",value:function(e){this.shadowRoot.querySelector(".top").classList.toggle("focused","focus"===e.type)}},{kind:"get",static:!0,key:"styles",value:function(){return p`
|
||||
:host {
|
||||
display: block;
|
||||
}
|
||||
|
||||
.top {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.top.focused {
|
||||
background: var(--input-fill-color);
|
||||
}
|
||||
|
||||
:host([outlined]) {
|
||||
box-shadow: none;
|
||||
border-width: 1px;
|
||||
border-style: solid;
|
||||
border-color: var(
|
||||
--ha-card-border-color,
|
||||
var(--divider-color, #e0e0e0)
|
||||
);
|
||||
border-radius: var(--ha-card-border-radius, 4px);
|
||||
}
|
||||
|
||||
.summary-icon {
|
||||
margin-left: 8px;
|
||||
}
|
||||
|
||||
:host([leftchevron]) .summary-icon {
|
||||
margin-left: 0;
|
||||
margin-right: 8px;
|
||||
}
|
||||
|
||||
#summary {
|
||||
flex: 1;
|
||||
display: flex;
|
||||
padding: var(--expansion-panel-summary-padding, 0 8px);
|
||||
min-height: 48px;
|
||||
align-items: center;
|
||||
cursor: pointer;
|
||||
overflow: hidden;
|
||||
font-weight: 500;
|
||||
outline: none;
|
||||
}
|
||||
|
||||
.summary-icon {
|
||||
transition: transform 150ms cubic-bezier(0.4, 0, 0.2, 1);
|
||||
direction: var(--direction);
|
||||
}
|
||||
|
||||
.summary-icon.expanded {
|
||||
transform: rotate(180deg);
|
||||
}
|
||||
|
||||
.header,
|
||||
::slotted([slot="header"]) {
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
.container {
|
||||
padding: var(--expansion-panel-content-padding, 0 8px);
|
||||
overflow: hidden;
|
||||
transition: height 300ms cubic-bezier(0.4, 0, 0.2, 1);
|
||||
height: 0px;
|
||||
}
|
||||
|
||||
.container.expanded {
|
||||
height: auto;
|
||||
}
|
||||
|
||||
.secondary {
|
||||
display: block;
|
||||
color: var(--secondary-text-color);
|
||||
font-size: 12px;
|
||||
}
|
||||
`}}]}}),t);
|
||||
Binary file not shown.
@@ -1,50 +0,0 @@
|
||||
import{a as e,h as i,e as t,i as a,$ as n,O as l,z as o,A as s,r as c,n as r,m as d}from"./main-ad130be7.js";import"./c.3f859915.js";e([r("search-input")],(function(e,i){return{F:class extends i{constructor(...i){super(...i),e(this)}},d:[{kind:"field",decorators:[t({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[t()],key:"filter",value:void 0},{kind:"field",decorators:[t({type:Boolean})],key:"suffix",value:()=>!1},{kind:"field",decorators:[t({type:Boolean})],key:"autofocus",value:()=>!1},{kind:"field",decorators:[t({type:String})],key:"label",value:void 0},{kind:"method",key:"focus",value:function(){var e;null===(e=this._input)||void 0===e||e.focus()}},{kind:"field",decorators:[a("ha-textfield",!0)],key:"_input",value:void 0},{kind:"method",key:"render",value:function(){return n`
|
||||
<ha-textfield
|
||||
.autofocus=${this.autofocus}
|
||||
.label=${this.label||"Search"}
|
||||
.value=${this.filter||""}
|
||||
icon
|
||||
.iconTrailing=${this.filter||this.suffix}
|
||||
@input=${this._filterInputChanged}
|
||||
>
|
||||
<slot name="prefix" slot="leadingIcon">
|
||||
<ha-svg-icon
|
||||
tabindex="-1"
|
||||
class="prefix"
|
||||
.path=${l}
|
||||
></ha-svg-icon>
|
||||
</slot>
|
||||
<div class="trailing" slot="trailingIcon">
|
||||
${this.filter&&n`
|
||||
<ha-icon-button
|
||||
@click=${this._clearSearch}
|
||||
.label=${this.hass.localize("ui.common.clear")}
|
||||
.path=${o}
|
||||
class="clear-button"
|
||||
></ha-icon-button>
|
||||
`}
|
||||
<slot name="suffix"></slot>
|
||||
</div>
|
||||
</ha-textfield>
|
||||
`}},{kind:"method",key:"_filterChanged",value:async function(e){s(this,"value-changed",{value:String(e)})}},{kind:"method",key:"_filterInputChanged",value:async function(e){this._filterChanged(e.target.value)}},{kind:"method",key:"_clearSearch",value:async function(){this._filterChanged("")}},{kind:"get",static:!0,key:"styles",value:function(){return c`
|
||||
:host {
|
||||
display: inline-flex;
|
||||
}
|
||||
ha-svg-icon,
|
||||
ha-icon-button {
|
||||
color: var(--primary-text-color);
|
||||
}
|
||||
ha-svg-icon {
|
||||
outline: none;
|
||||
}
|
||||
.clear-button {
|
||||
--mdc-icon-size: 20px;
|
||||
}
|
||||
ha-textfield {
|
||||
display: inherit;
|
||||
}
|
||||
.trailing {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
}
|
||||
`}}]}}),i);const u=d(((e,i)=>e.filter((e=>h(e.name).includes(h(i))||h(e.description).includes(h(i))||h(e.category).includes(h(i))||h(e.full_name).includes(h(i))||h(e.authors).includes(h(i))||h(e.domain).includes(h(i)))))),h=d((e=>String(e||"").toLocaleLowerCase().replace(/-|_| /g,"")));export{u as f};
|
||||
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user