initial commit
This commit is contained in:
@@ -0,0 +1,53 @@
|
||||
"""The TrueNAS integration"""
|
||||
from .const import DOMAIN, PLATFORMS
|
||||
from .truenas_controller import TrueNASControllerData
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# async_setup
|
||||
# ---------------------------
|
||||
async def async_setup(hass, _config):
|
||||
"""Set up configured OMV Controller"""
|
||||
hass.data[DOMAIN] = {}
|
||||
return True
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# update_listener
|
||||
# ---------------------------
|
||||
async def update_listener(hass, config_entry) -> None:
|
||||
"""Handle options update"""
|
||||
await hass.config_entries.async_reload(config_entry.entry_id)
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# async_setup_entry
|
||||
# ---------------------------
|
||||
async def async_setup_entry(hass, config_entry):
|
||||
"""Set up TrueNAS config entry"""
|
||||
controller = TrueNASControllerData(hass, config_entry)
|
||||
await controller.async_update()
|
||||
await controller.async_init()
|
||||
|
||||
hass.data[DOMAIN][config_entry.entry_id] = controller
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
|
||||
config_entry.async_on_unload(config_entry.add_update_listener(update_listener))
|
||||
|
||||
return True
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# async_unload_entry
|
||||
# ---------------------------
|
||||
async def async_unload_entry(hass, config_entry):
|
||||
"""Unload TrueNAS config entry"""
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(
|
||||
config_entry, PLATFORMS
|
||||
)
|
||||
if unload_ok:
|
||||
controller = hass.data[DOMAIN][config_entry.entry_id]
|
||||
await controller.async_reset()
|
||||
hass.data[DOMAIN].pop(config_entry.entry_id)
|
||||
|
||||
return True
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,352 @@
|
||||
"""API parser for JSON APIs"""
|
||||
from pytz import utc
|
||||
from logging import getLogger
|
||||
from datetime import datetime
|
||||
from voluptuous import Optional
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from .const import TO_REDACT
|
||||
|
||||
_LOGGER = getLogger(__name__)
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# utc_from_timestamp
|
||||
# ---------------------------
|
||||
def utc_from_timestamp(timestamp: float) -> datetime:
|
||||
"""Return a UTC time from a timestamp"""
|
||||
return utc.localize(datetime.utcfromtimestamp(timestamp))
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# from_entry
|
||||
# ---------------------------
|
||||
def from_entry(entry, param, default="") -> str:
|
||||
"""Validate and return str value an API dict"""
|
||||
if "/" in param:
|
||||
for tmp_param in param.split("/"):
|
||||
if isinstance(entry, dict) and tmp_param in entry:
|
||||
entry = entry[tmp_param]
|
||||
else:
|
||||
return default
|
||||
|
||||
ret = entry
|
||||
elif param in entry:
|
||||
ret = entry[param]
|
||||
else:
|
||||
return default
|
||||
|
||||
if default != "":
|
||||
if isinstance(ret, str):
|
||||
ret = str(ret)
|
||||
elif isinstance(ret, int):
|
||||
ret = int(ret)
|
||||
elif isinstance(ret, float):
|
||||
ret = round(float(ret), 2)
|
||||
|
||||
return ret[:255] if isinstance(ret, str) and len(ret) > 255 else ret
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# from_entry_bool
|
||||
# ---------------------------
|
||||
def from_entry_bool(entry, param, default=False, reverse=False) -> bool:
|
||||
"""Validate and return a bool value from an API dict"""
|
||||
if "/" in param:
|
||||
for tmp_param in param.split("/"):
|
||||
if isinstance(entry, dict) and tmp_param in entry:
|
||||
entry = entry[tmp_param]
|
||||
else:
|
||||
return default
|
||||
|
||||
ret = entry
|
||||
elif param in entry:
|
||||
ret = entry[param]
|
||||
else:
|
||||
return default
|
||||
|
||||
if isinstance(ret, str):
|
||||
if ret in ("on", "On", "ON", "yes", "Yes", "YES", "up", "Up", "UP"):
|
||||
ret = True
|
||||
elif ret in ("off", "Off", "OFF", "no", "No", "NO", "down", "Down", "DOWN"):
|
||||
ret = False
|
||||
|
||||
if not isinstance(ret, bool):
|
||||
ret = default
|
||||
|
||||
if reverse:
|
||||
return not ret
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# parse_api
|
||||
# ---------------------------
|
||||
def parse_api(
|
||||
data=None,
|
||||
source=None,
|
||||
key=None,
|
||||
key_secondary=None,
|
||||
key_search=None,
|
||||
vals=None,
|
||||
val_proc=None,
|
||||
ensure_vals=None,
|
||||
only=None,
|
||||
skip=None,
|
||||
) -> dict:
|
||||
"""Get data from API"""
|
||||
debug = _LOGGER.getEffectiveLevel() == 10
|
||||
if type(source) == dict:
|
||||
tmp = source
|
||||
source = [tmp]
|
||||
|
||||
if not source:
|
||||
if not key and not key_search:
|
||||
data = fill_defaults(data, vals)
|
||||
return data
|
||||
|
||||
if debug:
|
||||
_LOGGER.debug("Processing source %s", async_redact_data(source, TO_REDACT))
|
||||
|
||||
keymap = generate_keymap(data, key_search)
|
||||
for entry in source:
|
||||
if only and not matches_only(entry, only):
|
||||
continue
|
||||
|
||||
if skip and can_skip(entry, skip):
|
||||
continue
|
||||
|
||||
uid = None
|
||||
if key or key_search:
|
||||
uid = get_uid(entry, key, key_secondary, key_search, keymap)
|
||||
if not uid:
|
||||
continue
|
||||
|
||||
if uid not in data:
|
||||
data[uid] = {}
|
||||
|
||||
if debug:
|
||||
_LOGGER.debug("Processing entry %s", async_redact_data(entry, TO_REDACT))
|
||||
|
||||
if vals:
|
||||
data = fill_vals(data, entry, uid, vals)
|
||||
|
||||
if ensure_vals:
|
||||
data = fill_ensure_vals(data, uid, ensure_vals)
|
||||
|
||||
if val_proc:
|
||||
data = fill_vals_proc(data, uid, val_proc)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# get_uid
|
||||
# ---------------------------
|
||||
def get_uid(entry, key, key_secondary, key_search, keymap) -> Optional(str):
|
||||
"""Get UID for data list"""
|
||||
uid = None
|
||||
if not key_search:
|
||||
key_primary_found = key in entry
|
||||
if key_primary_found and key not in entry and not entry[key]:
|
||||
return None
|
||||
|
||||
if key_primary_found:
|
||||
uid = entry[key]
|
||||
elif key_secondary:
|
||||
if key_secondary not in entry:
|
||||
return None
|
||||
|
||||
if not entry[key_secondary]:
|
||||
return None
|
||||
|
||||
uid = entry[key_secondary]
|
||||
elif keymap and key_search in entry and entry[key_search] in keymap:
|
||||
uid = keymap[entry[key_search]]
|
||||
else:
|
||||
return None
|
||||
|
||||
return uid or None
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# generate_keymap
|
||||
# ---------------------------
|
||||
def generate_keymap(data, key_search) -> Optional(dict):
|
||||
"""Generate keymap"""
|
||||
return (
|
||||
{data[uid][key_search]: uid for uid in data if key_search in data[uid]}
|
||||
if key_search
|
||||
else None
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# matches_only
|
||||
# ---------------------------
|
||||
def matches_only(entry, only) -> bool:
|
||||
"""Return True if all variables are matched"""
|
||||
ret = False
|
||||
for val in only:
|
||||
if val["key"] in entry and entry[val["key"]] == val["value"]:
|
||||
ret = True
|
||||
else:
|
||||
ret = False
|
||||
break
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# can_skip
|
||||
# ---------------------------
|
||||
def can_skip(entry, skip) -> bool:
|
||||
"""Return True if at least one variable matches"""
|
||||
ret = False
|
||||
for val in skip:
|
||||
if val["name"] in entry and entry[val["name"]] == val["value"]:
|
||||
ret = True
|
||||
break
|
||||
|
||||
if val["value"] == "" and val["name"] not in entry:
|
||||
ret = True
|
||||
break
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# fill_defaults
|
||||
# ---------------------------
|
||||
def fill_defaults(data, vals) -> dict:
|
||||
"""Fill defaults if source is not present"""
|
||||
for val in vals:
|
||||
_name = val["name"]
|
||||
_type = val["type"] if "type" in val else "str"
|
||||
_source = val["source"] if "source" in val else _name
|
||||
|
||||
if _type == "str":
|
||||
_default = val["default"] if "default" in val else ""
|
||||
if "default_val" in val and val["default_val"] in val:
|
||||
_default = val[val["default_val"]]
|
||||
|
||||
if _name not in data:
|
||||
data[_name] = from_entry([], _source, default=_default)
|
||||
|
||||
elif _type == "bool":
|
||||
_default = val["default"] if "default" in val else False
|
||||
_reverse = val["reverse"] if "reverse" in val else False
|
||||
if _name not in data:
|
||||
data[_name] = from_entry_bool(
|
||||
[], _source, default=_default, reverse=_reverse
|
||||
)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# fill_vals
|
||||
# ---------------------------
|
||||
def fill_vals(data, entry, uid, vals) -> dict:
|
||||
"""Fill all data"""
|
||||
for val in vals:
|
||||
_name = val["name"]
|
||||
_type = val["type"] if "type" in val else "str"
|
||||
_source = val["source"] if "source" in val else _name
|
||||
_convert = val["convert"] if "convert" in val else None
|
||||
|
||||
if _type == "str":
|
||||
_default = val["default"] if "default" in val else ""
|
||||
if "default_val" in val and val["default_val"] in val:
|
||||
_default = val[val["default_val"]]
|
||||
|
||||
if uid:
|
||||
data[uid][_name] = from_entry(entry, _source, default=_default)
|
||||
else:
|
||||
data[_name] = from_entry(entry, _source, default=_default)
|
||||
|
||||
elif _type == "bool":
|
||||
_default = val["default"] if "default" in val else False
|
||||
_reverse = val["reverse"] if "reverse" in val else False
|
||||
|
||||
if uid:
|
||||
data[uid][_name] = from_entry_bool(
|
||||
entry, _source, default=_default, reverse=_reverse
|
||||
)
|
||||
else:
|
||||
data[_name] = from_entry_bool(
|
||||
entry, _source, default=_default, reverse=_reverse
|
||||
)
|
||||
|
||||
if _convert == "utc_from_timestamp":
|
||||
if uid:
|
||||
if isinstance(data[uid][_name], int) and data[uid][_name] > 0:
|
||||
if data[uid][_name] > 100000000000:
|
||||
data[uid][_name] = data[uid][_name] / 1000
|
||||
|
||||
data[uid][_name] = utc_from_timestamp(data[uid][_name])
|
||||
elif isinstance(data[_name], int) and data[_name] > 0:
|
||||
if data[_name] > 100000000000:
|
||||
data[_name] = data[_name] / 1000
|
||||
|
||||
data[_name] = utc_from_timestamp(data[_name])
|
||||
|
||||
return data
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# fill_ensure_vals
|
||||
# ---------------------------
|
||||
def fill_ensure_vals(data, uid, ensure_vals) -> dict:
|
||||
"""Add required keys which are not available in data"""
|
||||
for val in ensure_vals:
|
||||
if uid:
|
||||
if val["name"] not in data[uid]:
|
||||
_default = val["default"] if "default" in val else ""
|
||||
data[uid][val["name"]] = _default
|
||||
|
||||
elif val["name"] not in data:
|
||||
_default = val["default"] if "default" in val else ""
|
||||
data[val["name"]] = _default
|
||||
|
||||
return data
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# fill_vals_proc
|
||||
# ---------------------------
|
||||
def fill_vals_proc(data, uid, vals_proc) -> dict:
|
||||
"""Add custom keys"""
|
||||
_data = data[uid] if uid else data
|
||||
for val_sub in vals_proc:
|
||||
_name = None
|
||||
_action = None
|
||||
_value = None
|
||||
for val in val_sub:
|
||||
if "name" in val:
|
||||
_name = val["name"]
|
||||
continue
|
||||
|
||||
if "action" in val:
|
||||
_action = val["action"]
|
||||
continue
|
||||
|
||||
if not _name and not _action:
|
||||
break
|
||||
|
||||
if _action == "combine":
|
||||
if "key" in val:
|
||||
tmp = _data[val["key"]] if val["key"] in _data else "unknown"
|
||||
_value = f"{_value}{tmp}" if _value else tmp
|
||||
|
||||
if "text" in val:
|
||||
tmp = val["text"]
|
||||
_value = f"{_value}{tmp}" if _value else tmp
|
||||
|
||||
if _name and _value:
|
||||
if uid:
|
||||
data[uid][_name] = _value
|
||||
else:
|
||||
data[_name] = _value
|
||||
|
||||
return data
|
||||
@@ -0,0 +1,288 @@
|
||||
"""TrueNAS binary sensor platform"""
|
||||
from logging import getLogger
|
||||
from homeassistant.components.binary_sensor import BinarySensorEntity
|
||||
from .model import model_async_setup_entry, TrueNASEntity
|
||||
from .binary_sensor_types import SENSOR_TYPES, SENSOR_SERVICES
|
||||
|
||||
_LOGGER = getLogger(__name__)
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# async_setup_entry
|
||||
# ---------------------------
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"""Set up device tracker for OpenMediaVault component"""
|
||||
dispatcher = {
|
||||
"TrueNASBinarySensor": TrueNASBinarySensor,
|
||||
"TrueNASJailBinarySensor": TrueNASJailBinarySensor,
|
||||
"TrueNASVMBinarySensor": TrueNASVMBinarySensor,
|
||||
"TrueNASServiceBinarySensor": TrueNASServiceBinarySensor,
|
||||
}
|
||||
await model_async_setup_entry(
|
||||
hass,
|
||||
config_entry,
|
||||
async_add_entities,
|
||||
SENSOR_SERVICES,
|
||||
SENSOR_TYPES,
|
||||
dispatcher,
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# TrueNASBinarySensor
|
||||
# ---------------------------
|
||||
class TrueNASBinarySensor(TrueNASEntity, BinarySensorEntity):
|
||||
"""Define an TrueNAS Binary Sensor"""
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if device is on"""
|
||||
return self._data[self.entity_description.data_is_on]
|
||||
|
||||
@property
|
||||
def icon(self) -> str:
|
||||
"""Return the icon"""
|
||||
if self.entity_description.icon_enabled:
|
||||
if self._data[self.entity_description.data_is_on]:
|
||||
return self.entity_description.icon_enabled
|
||||
else:
|
||||
return self.entity_description.icon_disabled
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# TrueNASJailBinarySensor
|
||||
# ---------------------------
|
||||
class TrueNASJailBinarySensor(TrueNASBinarySensor):
|
||||
"""Define a TrueNAS Jail Binary Sensor"""
|
||||
|
||||
async def start(self):
|
||||
"""Start a Jail"""
|
||||
tmp_jail = await self.hass.async_add_executor_job(
|
||||
self._ctrl.api.query, f"jail/id/{self._data['id']}"
|
||||
)
|
||||
|
||||
if "state" not in tmp_jail:
|
||||
_LOGGER.error(
|
||||
"Jail %s (%s) invalid", self._data["comment"], self._data["id"]
|
||||
)
|
||||
return
|
||||
|
||||
if tmp_jail["state"] != "down":
|
||||
_LOGGER.warning(
|
||||
"Jail %s (%s) is not down", self._data["comment"], self._data["id"]
|
||||
)
|
||||
return
|
||||
|
||||
await self.hass.async_add_executor_job(
|
||||
self._ctrl.api.query, "jail/start", "post", self._data["id"]
|
||||
)
|
||||
|
||||
async def stop(self):
|
||||
"""Stop a Jail"""
|
||||
tmp_jail = await self.hass.async_add_executor_job(
|
||||
self._ctrl.api.query, f"jail/id/{self._data['id']}"
|
||||
)
|
||||
|
||||
if "state" not in tmp_jail:
|
||||
_LOGGER.error(
|
||||
"Jail %s (%s) invalid", self._data["comment"], self._data["id"]
|
||||
)
|
||||
return
|
||||
|
||||
if tmp_jail["state"] != "up":
|
||||
_LOGGER.warning(
|
||||
"Jail %s (%s) is not up", self._data["comment"], self._data["id"]
|
||||
)
|
||||
return
|
||||
|
||||
await self.hass.async_add_executor_job(
|
||||
self._ctrl.api.query, "jail/stop", "post", {"jail": self._data["id"]}
|
||||
)
|
||||
|
||||
async def restart(self):
|
||||
"""Restart a Jail"""
|
||||
tmp_jail = await self.hass.async_add_executor_job(
|
||||
self._ctrl.api.query, f"jail/id/{self._data['id']}"
|
||||
)
|
||||
|
||||
if "state" not in tmp_jail:
|
||||
_LOGGER.error(
|
||||
"Jail %s (%s) invalid", self._data["comment"], self._data["id"]
|
||||
)
|
||||
return
|
||||
|
||||
if tmp_jail["state"] != "up":
|
||||
_LOGGER.warning(
|
||||
"Jail %s (%s) is not up", self._data["comment"], self._data["id"]
|
||||
)
|
||||
return
|
||||
|
||||
await self.hass.async_add_executor_job(
|
||||
self._ctrl.api.query, "jail/restart", "post", self._data["id"]
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# TrueNASVMBinarySensor
|
||||
# ---------------------------
|
||||
class TrueNASVMBinarySensor(TrueNASBinarySensor):
|
||||
"""Define a TrueNAS VM Binary Sensor"""
|
||||
|
||||
async def start(self):
|
||||
"""Start a VM"""
|
||||
tmp_vm = await self.hass.async_add_executor_job(
|
||||
self._ctrl.api.query, f"vm/id/{self._data['id']}"
|
||||
)
|
||||
|
||||
if "status" not in tmp_vm:
|
||||
_LOGGER.error("VM %s (%s) invalid", self._data["name"], self._data["id"])
|
||||
return
|
||||
|
||||
if tmp_vm["status"]["state"] != "STOPPED":
|
||||
_LOGGER.warning(
|
||||
"VM %s (%s) is not down", self._data["name"], self._data["id"]
|
||||
)
|
||||
return
|
||||
|
||||
await self.hass.async_add_executor_job(
|
||||
self._ctrl.api.query, f"vm/id/{self._data['id']}/start", "post"
|
||||
)
|
||||
|
||||
async def stop(self):
|
||||
"""Stop a VM"""
|
||||
tmp_vm = await self.hass.async_add_executor_job(
|
||||
self._ctrl.api.query, f"vm/id/{self._data['id']}"
|
||||
)
|
||||
|
||||
if "status" not in tmp_vm:
|
||||
_LOGGER.error("VM %s (%s) invalid", self._data["name"], self._data["id"])
|
||||
return
|
||||
|
||||
if tmp_vm["status"]["state"] != "RUNNING":
|
||||
_LOGGER.warning(
|
||||
"VM %s (%s) is not up", self._data["name"], self._data["id"]
|
||||
)
|
||||
return
|
||||
|
||||
await self.hass.async_add_executor_job(
|
||||
self._ctrl.api.query, f"vm/id/{self._data['id']}/stop", "post"
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# TrueNASServiceBinarySensor
|
||||
# ---------------------------
|
||||
class TrueNASServiceBinarySensor(TrueNASBinarySensor):
|
||||
"""Define a TrueNAS Service Binary Sensor"""
|
||||
|
||||
async def start(self):
|
||||
"""Start a Service"""
|
||||
tmp_service = await self.hass.async_add_executor_job(
|
||||
self._ctrl.api.query, f"service/id/{self._data['id']}"
|
||||
)
|
||||
|
||||
if "state" not in tmp_service:
|
||||
_LOGGER.error(
|
||||
"Service %s (%s) invalid", self._data["service"], self._data["id"]
|
||||
)
|
||||
return
|
||||
|
||||
if tmp_service["state"] != "STOPPED":
|
||||
_LOGGER.warning(
|
||||
"Service %s (%s) is not stopped",
|
||||
self._data["service"],
|
||||
self._data["id"],
|
||||
)
|
||||
return
|
||||
|
||||
await self.hass.async_add_executor_job(
|
||||
self._ctrl.api.query,
|
||||
"service/start",
|
||||
"post",
|
||||
{"service": self._data["service"]},
|
||||
)
|
||||
await self._ctrl.async_update()
|
||||
|
||||
async def stop(self):
|
||||
"""Stop a Service"""
|
||||
tmp_service = await self.hass.async_add_executor_job(
|
||||
self._ctrl.api.query, f"service/id/{self._data['id']}"
|
||||
)
|
||||
|
||||
if "state" not in tmp_service:
|
||||
_LOGGER.error(
|
||||
"Service %s (%s) invalid", self._data["service"], self._data["id"]
|
||||
)
|
||||
return
|
||||
|
||||
if tmp_service["state"] == "STOPPED":
|
||||
_LOGGER.warning(
|
||||
"Service %s (%s) is not running",
|
||||
self._data["service"],
|
||||
self._data["id"],
|
||||
)
|
||||
return
|
||||
|
||||
await self.hass.async_add_executor_job(
|
||||
self._ctrl.api.query,
|
||||
"service/stop",
|
||||
"post",
|
||||
{"service": self._data["service"]},
|
||||
)
|
||||
await self._ctrl.async_update()
|
||||
|
||||
async def restart(self):
|
||||
"""Restart a Service"""
|
||||
tmp_service = await self.hass.async_add_executor_job(
|
||||
self._ctrl.api.query, f"service/id/{self._data['id']}"
|
||||
)
|
||||
|
||||
if "state" not in tmp_service:
|
||||
_LOGGER.error(
|
||||
"Service %s (%s) invalid", self._data["service"], self._data["id"]
|
||||
)
|
||||
return
|
||||
|
||||
if tmp_service["state"] == "STOPPED":
|
||||
_LOGGER.warning(
|
||||
"Service %s (%s) is not running",
|
||||
self._data["service"],
|
||||
self._data["id"],
|
||||
)
|
||||
return
|
||||
|
||||
await self.hass.async_add_executor_job(
|
||||
self._ctrl.api.query,
|
||||
"service/restart",
|
||||
"post",
|
||||
{"service": self._data["service"]},
|
||||
)
|
||||
await self._ctrl.async_update()
|
||||
|
||||
async def reload(self):
|
||||
"""Reload a Service"""
|
||||
tmp_service = await self.hass.async_add_executor_job(
|
||||
self._ctrl.api.query, f"service/id/{self._data['id']}"
|
||||
)
|
||||
|
||||
if "state" not in tmp_service:
|
||||
_LOGGER.error(
|
||||
"Service %s (%s) invalid", self._data["service"], self._data["id"]
|
||||
)
|
||||
return
|
||||
|
||||
if tmp_service["state"] == "STOPPED":
|
||||
_LOGGER.warning(
|
||||
"Service %s (%s) is not running",
|
||||
self._data["service"],
|
||||
self._data["id"],
|
||||
)
|
||||
return
|
||||
|
||||
await self.hass.async_add_executor_job(
|
||||
self._ctrl.api.query,
|
||||
"service/reload",
|
||||
"post",
|
||||
{"service": self._data["service"]},
|
||||
)
|
||||
await self._ctrl.async_update()
|
||||
@@ -0,0 +1,159 @@
|
||||
"""Definitions for TrueNAS binary sensor entities"""
|
||||
from dataclasses import dataclass, field
|
||||
from typing import List
|
||||
from homeassistant.components.binary_sensor import BinarySensorEntityDescription
|
||||
from .const import (
|
||||
SERVICE_JAIL_START,
|
||||
SCHEMA_SERVICE_JAIL_START,
|
||||
SERVICE_JAIL_STOP,
|
||||
SCHEMA_SERVICE_JAIL_STOP,
|
||||
SERVICE_JAIL_RESTART,
|
||||
SCHEMA_SERVICE_JAIL_RESTART,
|
||||
SERVICE_SERVICE_START,
|
||||
SCHEMA_SERVICE_SERVICE_START,
|
||||
SERVICE_SERVICE_STOP,
|
||||
SCHEMA_SERVICE_SERVICE_STOP,
|
||||
SERVICE_SERVICE_RESTART,
|
||||
SCHEMA_SERVICE_SERVICE_RESTART,
|
||||
SERVICE_SERVICE_RELOAD,
|
||||
SCHEMA_SERVICE_SERVICE_RELOAD,
|
||||
SERVICE_VM_START,
|
||||
SCHEMA_SERVICE_VM_START,
|
||||
SERVICE_VM_STOP,
|
||||
SCHEMA_SERVICE_VM_STOP,
|
||||
)
|
||||
|
||||
DEVICE_ATTRIBUTES_POOL = [
|
||||
"path",
|
||||
"status",
|
||||
"healthy",
|
||||
"is_decrypted",
|
||||
"autotrim",
|
||||
"scrub_state",
|
||||
"scrub_start",
|
||||
"scrub_end",
|
||||
"scrub_secs_left",
|
||||
"available_gib",
|
||||
]
|
||||
|
||||
DEVICE_ATTRIBUTES_JAIL = [
|
||||
"comment",
|
||||
"jail_zfs_dataset",
|
||||
"last_started",
|
||||
"ip4_addr",
|
||||
"ip6_addr",
|
||||
"release",
|
||||
"type",
|
||||
"plugin_name",
|
||||
]
|
||||
|
||||
DEVICE_ATTRIBUTES_VM = [
|
||||
"description",
|
||||
"vcpus",
|
||||
"memory",
|
||||
"autostart",
|
||||
"cores",
|
||||
"threads",
|
||||
]
|
||||
|
||||
DEVICE_ATTRIBUTES_SERVICE = [
|
||||
"enable",
|
||||
"state",
|
||||
]
|
||||
|
||||
|
||||
@dataclass
|
||||
class TrueNASBinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||
"""Class describing mikrotik entities"""
|
||||
|
||||
icon_enabled: str = ""
|
||||
icon_disabled: str = ""
|
||||
ha_group: str = ""
|
||||
ha_connection: str = ""
|
||||
ha_connection_value: str = ""
|
||||
data_path: str = ""
|
||||
data_is_on: str = "available"
|
||||
data_name: str = ""
|
||||
data_uid: str = ""
|
||||
data_reference: str = ""
|
||||
data_attributes_list: List = field(default_factory=lambda: [])
|
||||
func: str = "TrueNASBinarySensor"
|
||||
|
||||
|
||||
SENSOR_TYPES = {
|
||||
"pool_healthy": TrueNASBinarySensorEntityDescription(
|
||||
key="pool_healthy",
|
||||
name="healthy",
|
||||
icon_enabled="mdi:database",
|
||||
icon_disabled="mdi:database-off",
|
||||
device_class=None,
|
||||
entity_category=None,
|
||||
ha_group="System",
|
||||
data_path="pool",
|
||||
data_is_on="healthy",
|
||||
data_name="name",
|
||||
data_uid="",
|
||||
data_reference="guid",
|
||||
data_attributes_list=DEVICE_ATTRIBUTES_POOL,
|
||||
),
|
||||
"jail": TrueNASBinarySensorEntityDescription(
|
||||
key="jail",
|
||||
name="",
|
||||
icon_enabled="mdi:layers",
|
||||
icon_disabled="mdi:layers-off",
|
||||
device_class=None,
|
||||
entity_category=None,
|
||||
ha_group="Jails",
|
||||
data_path="jail",
|
||||
data_is_on="state",
|
||||
data_name="host_hostname",
|
||||
data_uid="",
|
||||
data_reference="id",
|
||||
data_attributes_list=DEVICE_ATTRIBUTES_JAIL,
|
||||
func="TrueNASJailBinarySensor",
|
||||
),
|
||||
"vm": TrueNASBinarySensorEntityDescription(
|
||||
key="vm",
|
||||
name="",
|
||||
icon_enabled="mdi:server",
|
||||
icon_disabled="mdi:server-off",
|
||||
device_class=None,
|
||||
entity_category=None,
|
||||
ha_group="VMs",
|
||||
data_path="vm",
|
||||
data_is_on="running",
|
||||
data_name="name",
|
||||
data_uid="",
|
||||
data_reference="id",
|
||||
data_attributes_list=DEVICE_ATTRIBUTES_VM,
|
||||
func="TrueNASVMBinarySensor",
|
||||
),
|
||||
"service": TrueNASBinarySensorEntityDescription(
|
||||
key="service",
|
||||
name="",
|
||||
icon_enabled="mdi:cog",
|
||||
icon_disabled="mdi:cog-off",
|
||||
device_class=None,
|
||||
entity_category=None,
|
||||
ha_group="Services",
|
||||
data_path="service",
|
||||
data_is_on="running",
|
||||
data_name="service",
|
||||
data_uid="",
|
||||
data_reference="id",
|
||||
data_attributes_list=DEVICE_ATTRIBUTES_SERVICE,
|
||||
func="TrueNASServiceBinarySensor",
|
||||
),
|
||||
}
|
||||
|
||||
SENSOR_SERVICES = [
|
||||
[SERVICE_JAIL_START, SCHEMA_SERVICE_JAIL_START, "start"],
|
||||
[SERVICE_JAIL_STOP, SCHEMA_SERVICE_JAIL_STOP, "stop"],
|
||||
[SERVICE_JAIL_RESTART, SCHEMA_SERVICE_JAIL_RESTART, "restart"],
|
||||
[SERVICE_VM_START, SCHEMA_SERVICE_VM_START, "start"],
|
||||
[SERVICE_VM_STOP, SCHEMA_SERVICE_VM_STOP, "stop"],
|
||||
[SERVICE_SERVICE_START, SCHEMA_SERVICE_SERVICE_START, "start"],
|
||||
[SERVICE_SERVICE_STOP, SCHEMA_SERVICE_SERVICE_STOP, "stop"],
|
||||
[SERVICE_SERVICE_RESTART, SCHEMA_SERVICE_SERVICE_RESTART, "restart"],
|
||||
[SERVICE_SERVICE_RELOAD, SCHEMA_SERVICE_SERVICE_RELOAD, "reload"],
|
||||
]
|
||||
@@ -0,0 +1,113 @@
|
||||
"""Config flow to configure TrueNAS"""
|
||||
|
||||
import voluptuous as vol
|
||||
from logging import getLogger
|
||||
from homeassistant.config_entries import CONN_CLASS_LOCAL_POLL, ConfigFlow
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_NAME,
|
||||
CONF_API_KEY,
|
||||
CONF_SSL,
|
||||
CONF_VERIFY_SSL,
|
||||
)
|
||||
from .const import (
|
||||
DEFAULT_DEVICE_NAME,
|
||||
DEFAULT_HOST,
|
||||
DEFAULT_SSL,
|
||||
DEFAULT_SSL_VERIFY,
|
||||
DOMAIN,
|
||||
)
|
||||
from .truenas_api import TrueNASAPI
|
||||
|
||||
_LOGGER = getLogger(__name__)
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# configured_instances
|
||||
# ---------------------------
|
||||
@callback
|
||||
def configured_instances(hass):
|
||||
"""Return a set of configured instances"""
|
||||
return {
|
||||
entry.data[CONF_NAME] for entry in hass.config_entries.async_entries(DOMAIN)
|
||||
}
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# TrueNASConfigFlow
|
||||
# ---------------------------
|
||||
class TrueNASConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""TrueNASConfigFlow class"""
|
||||
|
||||
VERSION = 1
|
||||
CONNECTION_CLASS = CONN_CLASS_LOCAL_POLL
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize TrueNASConfigFlow"""
|
||||
|
||||
async def async_step_import(self, user_input=None):
|
||||
"""Occurs when a previous entry setup fails and is re-initiated"""
|
||||
return await self.async_step_user(user_input)
|
||||
|
||||
async def async_step_user(self, user_input=None):
|
||||
"""Handle a flow initialized by the user"""
|
||||
errors = {}
|
||||
if user_input is not None:
|
||||
# Check if instance with this name already exists
|
||||
if user_input[CONF_NAME] in configured_instances(self.hass):
|
||||
errors["base"] = "name_exists"
|
||||
|
||||
# Test connection
|
||||
api = await self.hass.async_add_executor_job(
|
||||
TrueNASAPI,
|
||||
self.hass,
|
||||
user_input[CONF_HOST],
|
||||
user_input[CONF_API_KEY],
|
||||
user_input[CONF_SSL],
|
||||
user_input[CONF_VERIFY_SSL],
|
||||
)
|
||||
|
||||
conn, errorcode = await self.hass.async_add_executor_job(
|
||||
api.connection_test
|
||||
)
|
||||
if not conn:
|
||||
errors[CONF_HOST] = errorcode
|
||||
_LOGGER.error("TrueNAS connection error (%s)", errorcode)
|
||||
|
||||
# Save instance
|
||||
if not errors:
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_NAME], data=user_input
|
||||
)
|
||||
|
||||
return self._show_config_form(user_input=user_input, errors=errors)
|
||||
|
||||
return self._show_config_form(
|
||||
user_input={
|
||||
CONF_NAME: DEFAULT_DEVICE_NAME,
|
||||
CONF_HOST: DEFAULT_HOST,
|
||||
CONF_API_KEY: "",
|
||||
CONF_SSL: DEFAULT_SSL,
|
||||
CONF_VERIFY_SSL: DEFAULT_SSL_VERIFY,
|
||||
},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
def _show_config_form(self, user_input, errors=None):
|
||||
"""Show the configuration form"""
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_NAME, default=user_input[CONF_NAME]): str,
|
||||
vol.Required(CONF_HOST, default=user_input[CONF_HOST]): str,
|
||||
vol.Required(CONF_API_KEY, default=user_input[CONF_API_KEY]): str,
|
||||
vol.Optional(CONF_SSL, default=user_input[CONF_SSL]): bool,
|
||||
vol.Optional(
|
||||
CONF_VERIFY_SSL, default=user_input[CONF_VERIFY_SSL]
|
||||
): bool,
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
@@ -0,0 +1,65 @@
|
||||
"""Constants used by the TrueNAS integration"""
|
||||
from homeassistant.const import Platform
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.SENSOR,
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.UPDATE,
|
||||
]
|
||||
|
||||
DOMAIN = "truenas"
|
||||
DEFAULT_NAME = "root"
|
||||
ATTRIBUTION = "Data provided by TrueNAS integration"
|
||||
|
||||
DEFAULT_HOST = "10.0.0.1"
|
||||
DEFAULT_USERNAME = "admin"
|
||||
|
||||
DEFAULT_DEVICE_NAME = "TrueNAS"
|
||||
DEFAULT_SSL = False
|
||||
DEFAULT_SSL_VERIFY = True
|
||||
|
||||
TO_REDACT = {
|
||||
"username",
|
||||
"password",
|
||||
"encryption_password",
|
||||
"encryption_salt",
|
||||
"host",
|
||||
"api_key",
|
||||
"serial",
|
||||
"system_serial",
|
||||
"ip4_addr",
|
||||
"ip6_addr",
|
||||
}
|
||||
|
||||
SERVICE_CLOUDSYNC_RUN = "cloudsync_run"
|
||||
SCHEMA_SERVICE_CLOUDSYNC_RUN = {}
|
||||
|
||||
SERVICE_DATASET_SNAPSHOT = "dataset_snapshot"
|
||||
SCHEMA_SERVICE_DATASET_SNAPSHOT = {}
|
||||
|
||||
SERVICE_SYSTEM_REBOOT = "system_reboot"
|
||||
SCHEMA_SERVICE_SYSTEM_REBOOT = {}
|
||||
|
||||
SERVICE_SYSTEM_SHUTDOWN = "system_shutdown"
|
||||
SCHEMA_SERVICE_SYSTEM_SHUTDOWN = {}
|
||||
|
||||
SERVICE_SERVICE_START = "service_start"
|
||||
SCHEMA_SERVICE_SERVICE_START = {}
|
||||
SERVICE_SERVICE_STOP = "service_stop"
|
||||
SCHEMA_SERVICE_SERVICE_STOP = {}
|
||||
SERVICE_SERVICE_RESTART = "service_restart"
|
||||
SCHEMA_SERVICE_SERVICE_RESTART = {}
|
||||
SERVICE_SERVICE_RELOAD = "service_reload"
|
||||
SCHEMA_SERVICE_SERVICE_RELOAD = {}
|
||||
|
||||
SERVICE_JAIL_START = "jail_start"
|
||||
SCHEMA_SERVICE_JAIL_START = {}
|
||||
SERVICE_JAIL_STOP = "jail_stop"
|
||||
SCHEMA_SERVICE_JAIL_STOP = {}
|
||||
SERVICE_JAIL_RESTART = "jail_restart"
|
||||
SCHEMA_SERVICE_JAIL_RESTART = {}
|
||||
|
||||
SERVICE_VM_START = "vm_start"
|
||||
SCHEMA_SERVICE_VM_START = {}
|
||||
SERVICE_VM_STOP = "vm_stop"
|
||||
SCHEMA_SERVICE_VM_STOP = {}
|
||||
@@ -0,0 +1,22 @@
|
||||
"""Diagnostics support for TrueNAS"""
|
||||
from __future__ import annotations
|
||||
from typing import Any
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from .const import DOMAIN, TO_REDACT
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, config_entry: ConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry"""
|
||||
return {
|
||||
"entry": {
|
||||
"data": async_redact_data(config_entry.data, TO_REDACT),
|
||||
"options": async_redact_data(config_entry.options, TO_REDACT),
|
||||
},
|
||||
"data": async_redact_data(
|
||||
hass.data[DOMAIN][config_entry.entry_id].data, TO_REDACT
|
||||
),
|
||||
}
|
||||
@@ -0,0 +1,42 @@
|
||||
"""Helper functions"""
|
||||
from pytz import utc
|
||||
from datetime import datetime
|
||||
|
||||
DEFAULT_TIME_ZONE = None
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# format_attribute
|
||||
# ---------------------------
|
||||
def format_attribute(attr):
|
||||
res = attr.replace("_", " ")
|
||||
res = res.replace("-", " ")
|
||||
res = res.capitalize()
|
||||
res = res.replace("zfs", "ZFS")
|
||||
res = res.replace(" gib", " GiB")
|
||||
res = res.replace("Cpu ", "CPU ")
|
||||
res = res.replace("Vcpu ", "vCPU ")
|
||||
res = res.replace("Vmware ", "VMware ")
|
||||
res = res.replace("Ip4 ", "IP4 ")
|
||||
res = res.replace("Ip6 ", "IP6 ")
|
||||
return res
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# as_local
|
||||
# ---------------------------
|
||||
def as_local(dattim: datetime) -> datetime:
|
||||
"""Convert a UTC datetime object to local time zone"""
|
||||
if dattim.tzinfo == DEFAULT_TIME_ZONE:
|
||||
return dattim
|
||||
if dattim.tzinfo is None:
|
||||
dattim = utc.localize(dattim)
|
||||
|
||||
return dattim.astimezone(DEFAULT_TIME_ZONE)
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# b2gib
|
||||
# ---------------------------
|
||||
def b2gib(b: int) -> float:
|
||||
return round(b / 1073741824, 2)
|
||||
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"domain": "truenas",
|
||||
"name": "TrueNAS",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling",
|
||||
"documentation": "https://github.com/tomaae/homeassistant-truenas",
|
||||
"issue_tracker": "https://github.com/tomaae/homeassistant-truenas/issues",
|
||||
"dependencies": [],
|
||||
"requirements": [],
|
||||
"codeowners": [
|
||||
"@tomaae"
|
||||
],
|
||||
"version": "0.0.0"
|
||||
}
|
||||
@@ -0,0 +1,220 @@
|
||||
"""TrueNAS HA shared entity model"""
|
||||
from logging import getLogger
|
||||
from typing import Any
|
||||
from collections.abc import Mapping
|
||||
from homeassistant.helpers import entity_platform
|
||||
from homeassistant.helpers.entity import DeviceInfo
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.const import ATTR_ATTRIBUTION, CONF_NAME, CONF_HOST
|
||||
from .helper import format_attribute
|
||||
from .const import DOMAIN, ATTRIBUTION
|
||||
|
||||
_LOGGER = getLogger(__name__)
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# model_async_setup_entry
|
||||
# ---------------------------
|
||||
async def model_async_setup_entry(
|
||||
hass, config_entry, async_add_entities, sensor_services, sensor_types, dispatcher
|
||||
):
|
||||
inst = config_entry.data[CONF_NAME]
|
||||
truenas_controller = hass.data[DOMAIN][config_entry.entry_id]
|
||||
sensors = {}
|
||||
|
||||
platform = entity_platform.async_get_current_platform()
|
||||
for tmp in sensor_services:
|
||||
platform.async_register_entity_service(tmp[0], tmp[1], tmp[2])
|
||||
|
||||
@callback
|
||||
def update_controller():
|
||||
"""Update the values of the controller"""
|
||||
model_update_items(
|
||||
inst,
|
||||
truenas_controller,
|
||||
async_add_entities,
|
||||
sensors,
|
||||
dispatcher,
|
||||
sensor_types,
|
||||
)
|
||||
|
||||
truenas_controller.listeners.append(
|
||||
async_dispatcher_connect(
|
||||
hass, truenas_controller.signal_update, update_controller
|
||||
)
|
||||
)
|
||||
update_controller()
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# model_update_items
|
||||
# ---------------------------
|
||||
def model_update_items(
|
||||
inst, truenas_controller, async_add_entities, sensors, dispatcher, sensor_types
|
||||
):
|
||||
def _register_entity(_sensors, _item_id, _uid, _uid_sensor):
|
||||
_LOGGER.debug("Updating entity %s", _item_id)
|
||||
if _item_id in _sensors:
|
||||
if _sensors[_item_id].enabled:
|
||||
_sensors[_item_id].async_schedule_update_ha_state()
|
||||
return None
|
||||
|
||||
return dispatcher[_uid_sensor.func](
|
||||
inst=inst,
|
||||
uid=_uid,
|
||||
truenas_controller=truenas_controller,
|
||||
entity_description=_uid_sensor,
|
||||
)
|
||||
|
||||
new_sensors = []
|
||||
for sensor in sensor_types:
|
||||
uid_sensor = sensor_types[sensor]
|
||||
if not uid_sensor.data_reference:
|
||||
uid_sensor = sensor_types[sensor]
|
||||
if (
|
||||
uid_sensor.data_attribute
|
||||
not in truenas_controller.data[uid_sensor.data_path]
|
||||
or truenas_controller.data[uid_sensor.data_path][
|
||||
uid_sensor.data_attribute
|
||||
]
|
||||
== "unknown"
|
||||
):
|
||||
continue
|
||||
|
||||
item_id = f"{inst}-{sensor}"
|
||||
if tmp := _register_entity(sensors, item_id, "", uid_sensor):
|
||||
sensors[item_id] = tmp
|
||||
new_sensors.append(sensors[item_id])
|
||||
else:
|
||||
for uid in truenas_controller.data[uid_sensor.data_path]:
|
||||
uid_data = truenas_controller.data[uid_sensor.data_path]
|
||||
item_id = f"{inst}-{sensor}-{str(uid_data[uid][uid_sensor.data_reference]).lower()}"
|
||||
if tmp := _register_entity(sensors, item_id, uid, uid_sensor):
|
||||
sensors[item_id] = tmp
|
||||
new_sensors.append(sensors[item_id])
|
||||
|
||||
if new_sensors:
|
||||
async_add_entities(new_sensors, True)
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# TrueNASEntity
|
||||
# ---------------------------
|
||||
class TrueNASEntity:
|
||||
"""Define entity"""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
inst,
|
||||
uid: "",
|
||||
truenas_controller,
|
||||
entity_description,
|
||||
):
|
||||
"""Initialize entity"""
|
||||
self.entity_description = entity_description
|
||||
self._inst = inst
|
||||
self._ctrl = truenas_controller
|
||||
self._attr_extra_state_attributes = {ATTR_ATTRIBUTION: ATTRIBUTION}
|
||||
self._uid = uid
|
||||
if self._uid:
|
||||
self._data = truenas_controller.data[self.entity_description.data_path][
|
||||
self._uid
|
||||
]
|
||||
else:
|
||||
self._data = truenas_controller.data[self.entity_description.data_path]
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Return the name for this entity"""
|
||||
if not self._uid:
|
||||
return f"{self.entity_description.name}"
|
||||
|
||||
if self.entity_description.name:
|
||||
return f"{self._data[self.entity_description.data_name]} {self.entity_description.name}"
|
||||
|
||||
return f"{self._data[self.entity_description.data_name]}"
|
||||
|
||||
@property
|
||||
def unique_id(self) -> str:
|
||||
"""Return a unique id for this entity"""
|
||||
if self._uid:
|
||||
return f"{self._inst.lower()}-{self.entity_description.key}-{str(self._data[self.entity_description.data_reference]).lower()}"
|
||||
else:
|
||||
return f"{self._inst.lower()}-{self.entity_description.key}"
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if controller is available"""
|
||||
return self._ctrl.connected()
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo:
|
||||
"""Return a description for device registry"""
|
||||
dev_connection = DOMAIN
|
||||
dev_connection_value = f"{self._ctrl.name}_{self.entity_description.ha_group}"
|
||||
dev_group = self.entity_description.ha_group
|
||||
if self.entity_description.ha_group == "System":
|
||||
dev_connection_value = self._ctrl.data["system_info"]["hostname"]
|
||||
|
||||
if self.entity_description.ha_group.startswith("data__"):
|
||||
dev_group = self.entity_description.ha_group[6:]
|
||||
if dev_group in self._data:
|
||||
dev_group = self._data[dev_group]
|
||||
dev_connection_value = dev_group
|
||||
|
||||
if self.entity_description.ha_connection:
|
||||
dev_connection = self.entity_description.ha_connection
|
||||
|
||||
if self.entity_description.ha_connection_value:
|
||||
dev_connection_value = self.entity_description.ha_connection_value
|
||||
if dev_connection_value.startswith("data__"):
|
||||
dev_connection_value = dev_connection_value[6:]
|
||||
dev_connection_value = self._data[dev_connection_value]
|
||||
|
||||
return DeviceInfo(
|
||||
connections={(dev_connection, f"{dev_connection_value}")},
|
||||
identifiers={(dev_connection, f"{dev_connection_value}")},
|
||||
default_name=f"{self._inst} {dev_group}",
|
||||
default_manufacturer=f"{self._ctrl.data['system_info']['system_manufacturer']}",
|
||||
default_model=f"{self._ctrl.data['system_info']['system_product']}",
|
||||
sw_version=f"{self._ctrl.data['system_info']['version']}",
|
||||
configuration_url=f"http://{self._ctrl.config_entry.data[CONF_HOST]}",
|
||||
via_device=(DOMAIN, f"{self._ctrl.data['system_info']['hostname']}"),
|
||||
)
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> Mapping[str, Any]:
|
||||
"""Return the state attributes"""
|
||||
attributes = super().extra_state_attributes
|
||||
for variable in self.entity_description.data_attributes_list:
|
||||
if variable in self._data:
|
||||
attributes[format_attribute(variable)] = self._data[variable]
|
||||
|
||||
return attributes
|
||||
|
||||
async def async_added_to_hass(self):
|
||||
"""Run when entity about to be added to hass"""
|
||||
_LOGGER.debug("New binary sensor %s (%s)", self._inst, self.unique_id)
|
||||
|
||||
async def start(self):
|
||||
"""Dummy run function"""
|
||||
_LOGGER.error("Start functionality does not exist for %s", self.entity_id)
|
||||
|
||||
async def stop(self):
|
||||
"""Dummy stop function"""
|
||||
_LOGGER.error("Stop functionality does not exist for %s", self.entity_id)
|
||||
|
||||
async def restart(self):
|
||||
"""Dummy restart function"""
|
||||
_LOGGER.error("Restart functionality does not exist for %s", self.entity_id)
|
||||
|
||||
async def reload(self):
|
||||
"""Dummy reload function"""
|
||||
_LOGGER.error("Reload functionality does not exist for %s", self.entity_id)
|
||||
|
||||
async def snapshot(self):
|
||||
"""Dummy snapshot function"""
|
||||
_LOGGER.error("Snapshot functionality does not exist for %s", self.entity_id)
|
||||
@@ -0,0 +1,131 @@
|
||||
"""TrueNAS sensor platform"""
|
||||
from logging import getLogger
|
||||
from typing import Optional
|
||||
from datetime import datetime
|
||||
from homeassistant.components.sensor import SensorEntity
|
||||
from .model import model_async_setup_entry, TrueNASEntity
|
||||
from .sensor_types import SENSOR_TYPES, SENSOR_SERVICES
|
||||
|
||||
_LOGGER = getLogger(__name__)
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# async_setup_entry
|
||||
# ---------------------------
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"""Set up entry for TrueNAS component"""
|
||||
dispatcher = {
|
||||
"TrueNASSensor": TrueNASSensor,
|
||||
"TrueNASUptimeSensor": TrueNASUptimeSensor,
|
||||
"TrueNASClousyncSensor": TrueNASClousyncSensor,
|
||||
"TrueNASDatasetSensor": TrueNASDatasetSensor,
|
||||
}
|
||||
await model_async_setup_entry(
|
||||
hass,
|
||||
config_entry,
|
||||
async_add_entities,
|
||||
SENSOR_SERVICES,
|
||||
SENSOR_TYPES,
|
||||
dispatcher,
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# TrueNASSensor
|
||||
# ---------------------------
|
||||
class TrueNASSensor(TrueNASEntity, SensorEntity):
|
||||
"""Define an TrueNAS sensor"""
|
||||
|
||||
@property
|
||||
def state(self) -> Optional[str]:
|
||||
"""Return the state"""
|
||||
if self.entity_description.data_attribute:
|
||||
return self._data[self.entity_description.data_attribute]
|
||||
else:
|
||||
return "unknown"
|
||||
|
||||
@property
|
||||
def native_unit_of_measurement(self):
|
||||
"""Return the unit the value is expressed in"""
|
||||
if self.entity_description.native_unit_of_measurement:
|
||||
if self.entity_description.native_unit_of_measurement.startswith("data__"):
|
||||
uom = self.entity_description.native_unit_of_measurement[6:]
|
||||
if uom in self._data:
|
||||
uom = self._data[uom]
|
||||
return uom
|
||||
|
||||
return self.entity_description.native_unit_of_measurement
|
||||
|
||||
return None
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# TrueNASUptimeSensor
|
||||
# ---------------------------
|
||||
class TrueNASUptimeSensor(TrueNASSensor):
|
||||
"""Define an TrueNAS Uptime sensor"""
|
||||
|
||||
async def restart(self):
|
||||
"""Restart TrueNAS systen"""
|
||||
await self.hass.async_add_executor_job(
|
||||
self._ctrl.api.query,
|
||||
"system/reboot",
|
||||
"post",
|
||||
)
|
||||
|
||||
async def stop(self):
|
||||
"""Shutdown TrueNAS systen"""
|
||||
await self.hass.async_add_executor_job(
|
||||
self._ctrl.api.query,
|
||||
"system/shutdown",
|
||||
"post",
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# TrueNASDatasetSensor
|
||||
# ---------------------------
|
||||
class TrueNASDatasetSensor(TrueNASSensor):
|
||||
"""Define an TrueNAS Dataset sensor"""
|
||||
|
||||
async def snapshot(self):
|
||||
"""Create dataset snapshot"""
|
||||
ts = datetime.now().isoformat(sep="_", timespec="microseconds")
|
||||
await self.hass.async_add_executor_job(
|
||||
self._ctrl.api.query,
|
||||
"zfs/snapshot",
|
||||
"post",
|
||||
{"dataset": f"{self._data['name']}", "name": f"custom-{ts}"},
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# TrueNASClousyncSensor
|
||||
# ---------------------------
|
||||
class TrueNASClousyncSensor(TrueNASSensor):
|
||||
"""Define an TrueNAS Cloudsync sensor"""
|
||||
|
||||
async def start(self):
|
||||
"""Run cloudsync job"""
|
||||
tmp_job = await self.hass.async_add_executor_job(
|
||||
self._ctrl.api.query, f"cloudsync/id/{self._data['id']}"
|
||||
)
|
||||
|
||||
if "job" not in tmp_job:
|
||||
_LOGGER.error(
|
||||
"Clousync job %s (%s) invalid",
|
||||
self._data["description"],
|
||||
self._data["id"],
|
||||
)
|
||||
return
|
||||
if tmp_job["job"]["state"] in ["WAITING", "RUNNING"]:
|
||||
_LOGGER.warning(
|
||||
"Clousync job %s (%s) is already running",
|
||||
self._data["description"],
|
||||
self._data["id"],
|
||||
)
|
||||
return
|
||||
|
||||
await self.hass.async_add_executor_job(
|
||||
self._ctrl.api.query, f"cloudsync/id/{self._data['id']}/sync", "post"
|
||||
)
|
||||
@@ -0,0 +1,457 @@
|
||||
"""Definitions for TrueNAS sensor entities"""
|
||||
from dataclasses import dataclass, field
|
||||
from typing import List
|
||||
from homeassistant.helpers.entity import EntityCategory
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorStateClass,
|
||||
SensorEntityDescription,
|
||||
)
|
||||
from homeassistant.const import PERCENTAGE, TEMP_CELSIUS, DATA_GIBIBYTES, DATA_KIBIBYTES
|
||||
from .const import (
|
||||
SERVICE_CLOUDSYNC_RUN,
|
||||
SCHEMA_SERVICE_CLOUDSYNC_RUN,
|
||||
SERVICE_DATASET_SNAPSHOT,
|
||||
SCHEMA_SERVICE_DATASET_SNAPSHOT,
|
||||
SERVICE_SYSTEM_REBOOT,
|
||||
SCHEMA_SERVICE_SYSTEM_REBOOT,
|
||||
SERVICE_SYSTEM_SHUTDOWN,
|
||||
SCHEMA_SERVICE_SYSTEM_SHUTDOWN,
|
||||
)
|
||||
|
||||
DEVICE_ATTRIBUTES_NETWORK = [
|
||||
"description",
|
||||
"mtu",
|
||||
"link_state",
|
||||
"active_media_type",
|
||||
"active_media_subtype",
|
||||
"link_address",
|
||||
]
|
||||
|
||||
DEVICE_ATTRIBUTES_POOL = [
|
||||
"path",
|
||||
"status",
|
||||
"healthy",
|
||||
"is_decrypted",
|
||||
"autotrim",
|
||||
"scrub_state",
|
||||
"scrub_start",
|
||||
"scrub_end",
|
||||
"scrub_secs_left",
|
||||
"healthy",
|
||||
]
|
||||
|
||||
DEVICE_ATTRIBUTES_DATASET = [
|
||||
"type",
|
||||
"pool",
|
||||
"mountpoint",
|
||||
"deduplication",
|
||||
"atime",
|
||||
"casesensitivity",
|
||||
"checksum",
|
||||
"exec",
|
||||
"sync",
|
||||
"compression",
|
||||
"compressratio",
|
||||
"quota",
|
||||
"copies",
|
||||
"readonly",
|
||||
"recordsize",
|
||||
"encryption_algorithm",
|
||||
"used",
|
||||
"available",
|
||||
]
|
||||
|
||||
DEVICE_ATTRIBUTES_DISK = [
|
||||
"serial",
|
||||
"size",
|
||||
"hddstandby",
|
||||
"hddstandby_force",
|
||||
"advpowermgmt",
|
||||
"acousticlevel",
|
||||
"togglesmart",
|
||||
"model",
|
||||
"rotationrate",
|
||||
"type",
|
||||
]
|
||||
|
||||
DEVICE_ATTRIBUTES_CPU = [
|
||||
"cpu_interrupt",
|
||||
"cpu_system",
|
||||
"cpu_user",
|
||||
"cpu_nice",
|
||||
"cpu_idle",
|
||||
]
|
||||
|
||||
DEVICE_ATTRIBUTES_MEMORY = [
|
||||
"memory-used_value",
|
||||
"memory-free_value",
|
||||
"memory-cached_value",
|
||||
"memory-buffered_value",
|
||||
"memory-total_value",
|
||||
]
|
||||
|
||||
DEVICE_ATTRIBUTES_CLOUDSYNC = [
|
||||
"direction",
|
||||
"path",
|
||||
"enabled",
|
||||
"transfer_mode",
|
||||
"snapshot",
|
||||
"time_started",
|
||||
"time_finished",
|
||||
"job_percent",
|
||||
"job_description",
|
||||
]
|
||||
|
||||
DEVICE_ATTRIBUTES_REPLICATION = [
|
||||
"source_datasets",
|
||||
"target_dataset",
|
||||
"recursive",
|
||||
"enabled",
|
||||
"direction",
|
||||
"transport",
|
||||
"auto",
|
||||
"retention_policy",
|
||||
"state",
|
||||
"time_started",
|
||||
"time_finished",
|
||||
"job_percent",
|
||||
"job_description",
|
||||
]
|
||||
|
||||
DEVICE_ATTRIBUTES_SNAPSHOTTASK = [
|
||||
"recursive",
|
||||
"lifetime_value",
|
||||
"lifetime_unit",
|
||||
"enabled",
|
||||
"naming_schema",
|
||||
"allow_empty",
|
||||
"vmware_sync",
|
||||
"state",
|
||||
"datetime",
|
||||
]
|
||||
|
||||
|
||||
@dataclass
|
||||
class TrueNASSensorEntityDescription(SensorEntityDescription):
|
||||
"""Class describing mikrotik entities"""
|
||||
|
||||
ha_group: str = ""
|
||||
ha_connection: str = ""
|
||||
ha_connection_value: str = ""
|
||||
data_path: str = ""
|
||||
data_attribute: str = ""
|
||||
data_name: str = ""
|
||||
data_uid: str = ""
|
||||
data_reference: str = ""
|
||||
data_attributes_list: List = field(default_factory=lambda: [])
|
||||
func: str = "TrueNASSensor"
|
||||
|
||||
|
||||
SENSOR_TYPES = {
|
||||
"system_uptime": TrueNASSensorEntityDescription(
|
||||
key="system_uptime",
|
||||
name="Uptime",
|
||||
icon="mdi:clock-outline",
|
||||
native_unit_of_measurement=None,
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
state_class=None,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
ha_group="System",
|
||||
data_path="system_info",
|
||||
data_attribute="uptimeEpoch",
|
||||
data_name="",
|
||||
data_uid="",
|
||||
data_reference="",
|
||||
func="TrueNASUptimeSensor",
|
||||
),
|
||||
"system_cpu_temperature": TrueNASSensorEntityDescription(
|
||||
key="system_cpu_temperature",
|
||||
name="Temperature",
|
||||
icon="mdi:thermometer",
|
||||
native_unit_of_measurement=TEMP_CELSIUS,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
ha_group="System",
|
||||
data_path="system_info",
|
||||
data_attribute="cpu_temperature",
|
||||
data_name="",
|
||||
data_uid="",
|
||||
data_reference="",
|
||||
),
|
||||
"system_load_shortterm": TrueNASSensorEntityDescription(
|
||||
key="system_load_shortterm",
|
||||
name="CPU load shortterm",
|
||||
icon="mdi:gauge",
|
||||
native_unit_of_measurement=None,
|
||||
device_class=None,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
ha_group="System",
|
||||
data_path="system_info",
|
||||
data_attribute="load_shortterm",
|
||||
data_name="",
|
||||
data_uid="",
|
||||
data_reference="",
|
||||
),
|
||||
"system_load_midterm": TrueNASSensorEntityDescription(
|
||||
key="system_load_midterm",
|
||||
name="CPU load midterm",
|
||||
icon="mdi:gauge",
|
||||
native_unit_of_measurement=None,
|
||||
device_class=None,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
ha_group="System",
|
||||
data_path="system_info",
|
||||
data_attribute="load_midterm",
|
||||
data_name="",
|
||||
data_uid="",
|
||||
data_reference="",
|
||||
),
|
||||
"system_load_longterm": TrueNASSensorEntityDescription(
|
||||
key="system_load_longterm",
|
||||
name="CPU load longterm",
|
||||
icon="mdi:gauge",
|
||||
native_unit_of_measurement=None,
|
||||
device_class=None,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
ha_group="System",
|
||||
data_path="system_info",
|
||||
data_attribute="load_longterm",
|
||||
data_name="",
|
||||
data_uid="",
|
||||
data_reference="",
|
||||
),
|
||||
"system_cpu_usage": TrueNASSensorEntityDescription(
|
||||
key="system_cpu_usage",
|
||||
name="CPU usage",
|
||||
icon="mdi:cpu-64-bit",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
device_class=None,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
ha_group="System",
|
||||
data_path="system_info",
|
||||
data_attribute="cpu_usage",
|
||||
data_name="",
|
||||
data_uid="",
|
||||
data_reference="",
|
||||
data_attributes_list=DEVICE_ATTRIBUTES_CPU,
|
||||
),
|
||||
"system_memory_usage": TrueNASSensorEntityDescription(
|
||||
key="system_memory_usage",
|
||||
name="Memory usage",
|
||||
icon="mdi:memory",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
device_class=None,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
ha_group="System",
|
||||
data_path="system_info",
|
||||
data_attribute="memory-usage_percent",
|
||||
data_name="",
|
||||
data_uid="",
|
||||
data_reference="",
|
||||
data_attributes_list=DEVICE_ATTRIBUTES_MEMORY,
|
||||
),
|
||||
"system_cache_size-arc_value": TrueNASSensorEntityDescription(
|
||||
key="system_cache_size-arc_value",
|
||||
name="ARC size",
|
||||
icon="mdi:memory",
|
||||
native_unit_of_measurement=DATA_GIBIBYTES,
|
||||
device_class=None,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
ha_group="System",
|
||||
data_path="system_info",
|
||||
data_attribute="cache_size-arc_value",
|
||||
data_name="",
|
||||
data_uid="",
|
||||
data_reference="",
|
||||
),
|
||||
"system_cache_size-L2_value": TrueNASSensorEntityDescription(
|
||||
key="system_cache_size-L2_value",
|
||||
name="L2ARC size",
|
||||
icon="mdi:memory",
|
||||
native_unit_of_measurement=DATA_GIBIBYTES,
|
||||
device_class=None,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
ha_group="System",
|
||||
data_path="system_info",
|
||||
data_attribute="cache_size-L2_value",
|
||||
data_name="",
|
||||
data_uid="",
|
||||
data_reference="",
|
||||
),
|
||||
"system_cache_ratio-arc_value": TrueNASSensorEntityDescription(
|
||||
key="system_cache_ratio-arc_value",
|
||||
name="ARC ratio",
|
||||
icon="mdi:aspect-ratio",
|
||||
native_unit_of_measurement=None,
|
||||
device_class=None,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
ha_group="System",
|
||||
data_path="system_info",
|
||||
data_attribute="cache_ratio-arc_value",
|
||||
data_name="",
|
||||
data_uid="",
|
||||
data_reference="",
|
||||
),
|
||||
"system_cache_ratio-L2_value": TrueNASSensorEntityDescription(
|
||||
key="system_cache_ratio-L2_value",
|
||||
name="L2ARC ratio",
|
||||
icon="mdi:aspect-ratio",
|
||||
native_unit_of_measurement=None,
|
||||
device_class=None,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
ha_group="System",
|
||||
data_path="system_info",
|
||||
data_attribute="cache_ratio-L2_value",
|
||||
data_name="",
|
||||
data_uid="",
|
||||
data_reference="",
|
||||
),
|
||||
"dataset": TrueNASSensorEntityDescription(
|
||||
key="dataset",
|
||||
name="",
|
||||
icon="mdi:database",
|
||||
native_unit_of_measurement=DATA_GIBIBYTES,
|
||||
device_class=None,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=None,
|
||||
ha_group="Datasets",
|
||||
data_path="dataset",
|
||||
data_attribute="used_gb",
|
||||
data_name="name",
|
||||
data_uid="",
|
||||
data_reference="id",
|
||||
data_attributes_list=DEVICE_ATTRIBUTES_DATASET,
|
||||
func="TrueNASDatasetSensor",
|
||||
),
|
||||
"disk": TrueNASSensorEntityDescription(
|
||||
key="disk",
|
||||
name="",
|
||||
icon="mdi:harddisk",
|
||||
native_unit_of_measurement=TEMP_CELSIUS,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=None,
|
||||
ha_group="Disks",
|
||||
data_path="disk",
|
||||
data_attribute="temperature",
|
||||
data_name="name",
|
||||
data_uid="",
|
||||
data_reference="devname",
|
||||
data_attributes_list=DEVICE_ATTRIBUTES_DISK,
|
||||
),
|
||||
"pool_free": TrueNASSensorEntityDescription(
|
||||
key="pool_free",
|
||||
name="free",
|
||||
icon="mdi:database-settings",
|
||||
native_unit_of_measurement=DATA_GIBIBYTES,
|
||||
device_class=None,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=None,
|
||||
ha_group="System",
|
||||
data_path="pool",
|
||||
data_attribute="available_gib",
|
||||
data_name="name",
|
||||
data_uid="",
|
||||
data_reference="guid",
|
||||
data_attributes_list=DEVICE_ATTRIBUTES_POOL,
|
||||
),
|
||||
"cloudsync": TrueNASSensorEntityDescription(
|
||||
key="cloudsync",
|
||||
name="",
|
||||
icon="mdi:cloud-upload",
|
||||
native_unit_of_measurement=None,
|
||||
device_class=None,
|
||||
state_class=None,
|
||||
entity_category=None,
|
||||
ha_group="Cloudsync",
|
||||
data_path="cloudsync",
|
||||
data_attribute="state",
|
||||
data_name="description",
|
||||
data_uid="",
|
||||
data_reference="id",
|
||||
data_attributes_list=DEVICE_ATTRIBUTES_CLOUDSYNC,
|
||||
func="TrueNASClousyncSensor",
|
||||
),
|
||||
"replication": TrueNASSensorEntityDescription(
|
||||
key="replication",
|
||||
name="",
|
||||
icon="mdi:transfer",
|
||||
native_unit_of_measurement=None,
|
||||
device_class=None,
|
||||
state_class=None,
|
||||
entity_category=None,
|
||||
ha_group="Replication",
|
||||
data_path="replication",
|
||||
data_attribute="state",
|
||||
data_name="name",
|
||||
data_uid="",
|
||||
data_reference="id",
|
||||
data_attributes_list=DEVICE_ATTRIBUTES_REPLICATION,
|
||||
),
|
||||
"snapshottask": TrueNASSensorEntityDescription(
|
||||
key="snapshottask",
|
||||
name="",
|
||||
icon="mdi:checkbox-marked-circle-plus-outline",
|
||||
native_unit_of_measurement=None,
|
||||
device_class=None,
|
||||
state_class=None,
|
||||
entity_category=None,
|
||||
ha_group="Snapshot tasks",
|
||||
data_path="snapshottask",
|
||||
data_attribute="state",
|
||||
data_name="dataset",
|
||||
data_uid="",
|
||||
data_reference="id",
|
||||
data_attributes_list=DEVICE_ATTRIBUTES_SNAPSHOTTASK,
|
||||
),
|
||||
"traffic_rx": TrueNASSensorEntityDescription(
|
||||
key="traffic_rx",
|
||||
name="RX",
|
||||
icon="mdi:download-network-outline",
|
||||
native_unit_of_measurement=DATA_KIBIBYTES,
|
||||
device_class=None,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=None,
|
||||
ha_group="System",
|
||||
data_path="interface",
|
||||
data_attribute="rx",
|
||||
data_name="name",
|
||||
data_uid="",
|
||||
data_reference="id",
|
||||
data_attributes_list=DEVICE_ATTRIBUTES_NETWORK,
|
||||
),
|
||||
"traffic_tx": TrueNASSensorEntityDescription(
|
||||
key="traffic_tx",
|
||||
name="TX",
|
||||
icon="mdi:upload-network-outline",
|
||||
native_unit_of_measurement=DATA_KIBIBYTES,
|
||||
device_class=None,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=None,
|
||||
ha_group="System",
|
||||
data_path="interface",
|
||||
data_attribute="tx",
|
||||
data_name="name",
|
||||
data_uid="",
|
||||
data_reference="id",
|
||||
data_attributes_list=DEVICE_ATTRIBUTES_NETWORK,
|
||||
),
|
||||
}
|
||||
|
||||
SENSOR_SERVICES = [
|
||||
[SERVICE_CLOUDSYNC_RUN, SCHEMA_SERVICE_CLOUDSYNC_RUN, "start"],
|
||||
[SERVICE_DATASET_SNAPSHOT, SCHEMA_SERVICE_DATASET_SNAPSHOT, "snapshot"],
|
||||
[SERVICE_SYSTEM_REBOOT, SCHEMA_SERVICE_SYSTEM_REBOOT, "restart"],
|
||||
[SERVICE_SYSTEM_SHUTDOWN, SCHEMA_SERVICE_SYSTEM_SHUTDOWN, "stop"],
|
||||
]
|
||||
@@ -0,0 +1,91 @@
|
||||
---
|
||||
cloudsync_run:
|
||||
name: Cloudsync Run
|
||||
description: Start a Clousync Job
|
||||
target:
|
||||
entity:
|
||||
domain: sensor
|
||||
|
||||
dataset_snapshot:
|
||||
name: Dataset Snapshot
|
||||
description: Create a Dataset Snapshot
|
||||
target:
|
||||
entity:
|
||||
domain: sensor
|
||||
|
||||
system_reboot:
|
||||
name: Reboot TrueNAS
|
||||
description: Reboot TrueNAS System (Target Uptime Sensor)
|
||||
target:
|
||||
entity:
|
||||
domain: sensor
|
||||
|
||||
system_shutdown:
|
||||
name: Shutdown TrueNAS
|
||||
description: Shutdown TrueNAS System (Target Uptime Sensor)
|
||||
target:
|
||||
entity:
|
||||
domain: sensor
|
||||
|
||||
service_start:
|
||||
name: Service Start
|
||||
description: Start a Service
|
||||
target:
|
||||
entity:
|
||||
domain: binary_sensor
|
||||
|
||||
service_stop:
|
||||
name: Service Stop
|
||||
description: Stop a Service
|
||||
target:
|
||||
entity:
|
||||
domain: binary_sensor
|
||||
|
||||
service_restart:
|
||||
name: Service Restart
|
||||
description: Restart a Service
|
||||
target:
|
||||
entity:
|
||||
domain: binary_sensor
|
||||
|
||||
service_reload:
|
||||
name: Service Reload
|
||||
description: Reload a Service
|
||||
target:
|
||||
entity:
|
||||
domain: binary_sensor
|
||||
|
||||
jail_start:
|
||||
name: Jail Start
|
||||
description: Start a Jail
|
||||
target:
|
||||
entity:
|
||||
domain: binary_sensor
|
||||
|
||||
jail_stop:
|
||||
name: Jail Stop
|
||||
description: Stop a Jail
|
||||
target:
|
||||
entity:
|
||||
domain: binary_sensor
|
||||
|
||||
jail_restart:
|
||||
name: Jail Restart
|
||||
description: Restart a Jail
|
||||
target:
|
||||
entity:
|
||||
domain: binary_sensor
|
||||
|
||||
vm_start:
|
||||
name: VM Start
|
||||
description: Start a VM
|
||||
target:
|
||||
entity:
|
||||
domain: binary_sensor
|
||||
|
||||
vm_stop:
|
||||
name: VM Stop
|
||||
description: Stop a VM
|
||||
target:
|
||||
entity:
|
||||
domain: binary_sensor
|
||||
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Set up TrueNAS integration.",
|
||||
"data": {
|
||||
"name": "Name of the integration",
|
||||
"host": "Host",
|
||||
"api_key": "API key",
|
||||
"ssl": "Use SSL",
|
||||
"verify_ssl": "Verify SSL certificate"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"name_exists": "Name already exists.",
|
||||
"no_response": "No response from host.",
|
||||
"401": "No authorization for this endpoint.",
|
||||
"404": "API not found on this host.",
|
||||
"500": "Internal error."
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Set up TrueNAS integration.",
|
||||
"data": {
|
||||
"name": "Name of the integration",
|
||||
"host": "Host",
|
||||
"api_key": "API key",
|
||||
"ssl": "Use SSL",
|
||||
"verify_ssl": "Verify SSL certificate"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"name_exists": "Name already exists.",
|
||||
"401": "No authorization for this endpoint.",
|
||||
"404": "API not found on this host.",
|
||||
"500": "Internal error.",
|
||||
"no_response": "No response from host."
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Configure a integração TrueNAS.",
|
||||
"data": {
|
||||
"name": "Nome da integração",
|
||||
"host": "Host",
|
||||
"api_key": "Chave API",
|
||||
"ssl": "Usar SSL",
|
||||
"verify_ssl": "Verificar certificado SSL"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"name_exists": "O nome já existe.",
|
||||
"401": "Nenhuma autorização para este endpoint.",
|
||||
"404": "API não encontrada neste host.",
|
||||
"500": "Erro interno.",
|
||||
"no_response": "Sem resposta do host."
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,121 @@
|
||||
"""TrueNAS API"""
|
||||
from requests import get as requests_get, post as requests_post
|
||||
from logging import getLogger
|
||||
from threading import Lock
|
||||
from voluptuous import Optional
|
||||
|
||||
_LOGGER = getLogger(__name__)
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# TrueNASAPI
|
||||
# ---------------------------
|
||||
class TrueNASAPI(object):
|
||||
"""Handle all communication with TrueNAS"""
|
||||
|
||||
def __init__(self, hass, host, api_key, use_ssl=False, verify_ssl=True):
|
||||
"""Initialize the TrueNAS API"""
|
||||
self._hass = hass
|
||||
self._host = host
|
||||
self._use_ssl = use_ssl
|
||||
self._api_key = api_key
|
||||
self._protocol = "https" if self._use_ssl else "http"
|
||||
self._ssl_verify = verify_ssl
|
||||
if not self._use_ssl:
|
||||
self._ssl_verify = True
|
||||
self._url = f"{self._protocol}://{self._host}/api/v2.0/"
|
||||
|
||||
self.lock = Lock()
|
||||
self._connected = False
|
||||
self._error = ""
|
||||
|
||||
# ---------------------------
|
||||
# connected
|
||||
# ---------------------------
|
||||
def connected(self) -> bool:
|
||||
"""Return connected boolean"""
|
||||
return self._connected
|
||||
|
||||
# ---------------------------
|
||||
# connection_test
|
||||
# ---------------------------
|
||||
def connection_test(self):
|
||||
"""TrueNAS connection test"""
|
||||
self.query("pool")
|
||||
|
||||
return self._connected, self._error
|
||||
|
||||
# ---------------------------
|
||||
# query
|
||||
# ---------------------------
|
||||
def query(self, service, method="get", params={}) -> Optional(list):
|
||||
"""Retrieve data from TrueNAS"""
|
||||
|
||||
self.lock.acquire()
|
||||
error = False
|
||||
try:
|
||||
_LOGGER.debug(
|
||||
"TrueNAS %s query: %s, %s, %s",
|
||||
self._host,
|
||||
service,
|
||||
method,
|
||||
params,
|
||||
)
|
||||
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"Authorization": f"Bearer {self._api_key}",
|
||||
}
|
||||
if method == "get":
|
||||
response = requests_get(
|
||||
f"{self._url}{service}",
|
||||
headers=headers,
|
||||
params=params,
|
||||
verify=self._ssl_verify,
|
||||
)
|
||||
|
||||
elif method == "post":
|
||||
response = requests_post(
|
||||
f"{self._url}{service}",
|
||||
headers=headers,
|
||||
json=params,
|
||||
verify=self._ssl_verify,
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
_LOGGER.debug("TrueNAS %s query response: %s", self._host, data)
|
||||
else:
|
||||
error = True
|
||||
except Exception:
|
||||
error = True
|
||||
|
||||
if error:
|
||||
try:
|
||||
errorcode = response.status_code
|
||||
except Exception:
|
||||
errorcode = "no_response"
|
||||
|
||||
_LOGGER.warning(
|
||||
'TrueNAS %s unable to fetch data "%s" (%s)',
|
||||
self._host,
|
||||
service,
|
||||
errorcode,
|
||||
)
|
||||
|
||||
if errorcode != 500 and service != "reporting/get_data":
|
||||
self._connected = False
|
||||
|
||||
self._error = errorcode
|
||||
self.lock.release()
|
||||
return None
|
||||
|
||||
self._connected = True
|
||||
self._error = ""
|
||||
self.lock.release()
|
||||
|
||||
return data
|
||||
|
||||
@property
|
||||
def error(self):
|
||||
return self._error
|
||||
@@ -0,0 +1,944 @@
|
||||
"""TrueNAS Controller"""
|
||||
from asyncio import wait_for as asyncio_wait_for, Lock as Asyncio_lock
|
||||
from datetime import datetime, timedelta
|
||||
from logging import getLogger
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_NAME,
|
||||
CONF_API_KEY,
|
||||
CONF_SSL,
|
||||
CONF_VERIFY_SSL,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers import entity_registry as er, device_registry as dr
|
||||
from homeassistant.helpers.entity_registry import async_entries_for_config_entry
|
||||
from .const import DOMAIN
|
||||
from .apiparser import parse_api, utc_from_timestamp
|
||||
from .truenas_api import TrueNASAPI
|
||||
from .helper import as_local, b2gib
|
||||
|
||||
_LOGGER = getLogger(__name__)
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# TrueNASControllerData
|
||||
# ---------------------------
|
||||
class TrueNASControllerData(object):
|
||||
"""TrueNASControllerData Class"""
|
||||
|
||||
def __init__(self, hass, config_entry):
|
||||
"""Initialize TrueNASController"""
|
||||
self.hass = hass
|
||||
self.config_entry = config_entry
|
||||
self.name = config_entry.data[CONF_NAME]
|
||||
self.host = config_entry.data[CONF_HOST]
|
||||
|
||||
self.data = {
|
||||
"interface": {},
|
||||
"disk": {},
|
||||
"pool": {},
|
||||
"dataset": {},
|
||||
"system_info": {},
|
||||
"service": {},
|
||||
"jail": {},
|
||||
"vm": {},
|
||||
"cloudsync": {},
|
||||
"replication": {},
|
||||
"snapshottask": {},
|
||||
}
|
||||
|
||||
self.listeners = []
|
||||
self.lock = Asyncio_lock()
|
||||
|
||||
self.api = TrueNASAPI(
|
||||
hass,
|
||||
config_entry.data[CONF_HOST],
|
||||
config_entry.data[CONF_API_KEY],
|
||||
config_entry.data[CONF_SSL],
|
||||
config_entry.data[CONF_VERIFY_SSL],
|
||||
)
|
||||
|
||||
self._systemstats_errored = []
|
||||
self.datasets_hass_device_id = None
|
||||
|
||||
self._force_update_callback = None
|
||||
self._is_scale = False
|
||||
self._is_virtual = False
|
||||
|
||||
# ---------------------------
|
||||
# async_init
|
||||
# ---------------------------
|
||||
async def async_init(self):
|
||||
self._force_update_callback = async_track_time_interval(
|
||||
self.hass, self.force_update, timedelta(seconds=60)
|
||||
)
|
||||
|
||||
# ---------------------------
|
||||
# signal_update
|
||||
# ---------------------------
|
||||
@property
|
||||
def signal_update(self):
|
||||
"""Event to signal new data"""
|
||||
return f"{DOMAIN}-update-{self.name}"
|
||||
|
||||
# ---------------------------
|
||||
# async_reset
|
||||
# ---------------------------
|
||||
async def async_reset(self):
|
||||
"""Reset dispatchers"""
|
||||
for unsub_dispatcher in self.listeners:
|
||||
unsub_dispatcher()
|
||||
|
||||
self.listeners = []
|
||||
return True
|
||||
|
||||
# ---------------------------
|
||||
# connected
|
||||
# ---------------------------
|
||||
def connected(self):
|
||||
"""Return connected state"""
|
||||
return self.api.connected()
|
||||
|
||||
# ---------------------------
|
||||
# force_update
|
||||
# ---------------------------
|
||||
@callback
|
||||
async def force_update(self, _now=None):
|
||||
"""Trigger update by timer"""
|
||||
await self.async_update()
|
||||
|
||||
# ---------------------------
|
||||
# async_update
|
||||
# ---------------------------
|
||||
async def async_update(self):
|
||||
"""Update TrueNAS data"""
|
||||
try:
|
||||
await asyncio_wait_for(self.lock.acquire(), timeout=10)
|
||||
except Exception:
|
||||
return
|
||||
|
||||
await self.hass.async_add_executor_job(self.get_systeminfo)
|
||||
if self.api.connected():
|
||||
await self.hass.async_add_executor_job(self.get_systemstats)
|
||||
if self.api.connected():
|
||||
await self.hass.async_add_executor_job(self.get_service)
|
||||
if self.api.connected():
|
||||
await self.hass.async_add_executor_job(self.get_disk)
|
||||
if self.api.connected():
|
||||
await self.hass.async_add_executor_job(self.get_dataset)
|
||||
if self.api.connected():
|
||||
await self.hass.async_add_executor_job(self.get_pool)
|
||||
if self.api.connected():
|
||||
await self.hass.async_add_executor_job(self.get_jail)
|
||||
if self.api.connected():
|
||||
await self.hass.async_add_executor_job(self.get_vm)
|
||||
if self.api.connected():
|
||||
await self.hass.async_add_executor_job(self.get_cloudsync)
|
||||
if self.api.connected():
|
||||
await self.hass.async_add_executor_job(self.get_replication)
|
||||
if self.api.connected():
|
||||
await self.hass.async_add_executor_job(self.get_snapshottask)
|
||||
|
||||
async_dispatcher_send(self.hass, self.signal_update)
|
||||
self.lock.release()
|
||||
|
||||
# ---------------------------
|
||||
# get_systeminfo
|
||||
# ---------------------------
|
||||
def get_systeminfo(self):
|
||||
"""Get system info from TrueNAS"""
|
||||
self.data["system_info"] = parse_api(
|
||||
data=self.data["system_info"],
|
||||
source=self.api.query("system/info"),
|
||||
vals=[
|
||||
{"name": "version", "default": "unknown"},
|
||||
{"name": "hostname", "default": "unknown"},
|
||||
{"name": "uptime_seconds", "default": 0},
|
||||
{"name": "system_serial", "default": "unknown"},
|
||||
{"name": "system_product", "default": "unknown"},
|
||||
{"name": "system_manufacturer", "default": "unknown"},
|
||||
],
|
||||
ensure_vals=[
|
||||
{"name": "uptimeEpoch", "default": 0},
|
||||
{"name": "cpu_temperature", "default": 0.0},
|
||||
{"name": "load_shortterm", "default": 0.0},
|
||||
{"name": "load_midterm", "default": 0.0},
|
||||
{"name": "load_longterm", "default": 0.0},
|
||||
{"name": "cpu_interrupt", "default": 0.0},
|
||||
{"name": "cpu_system", "default": 0.0},
|
||||
{"name": "cpu_user", "default": 0.0},
|
||||
{"name": "cpu_nice", "default": 0.0},
|
||||
{"name": "cpu_idle", "default": 0.0},
|
||||
{"name": "cpu_usage", "default": 0.0},
|
||||
{"name": "cache_size-arc_value", "default": 0.0},
|
||||
{"name": "cache_size-L2_value", "default": 0.0},
|
||||
{"name": "cache_ratio-arc_value", "default": 0},
|
||||
{"name": "cache_ratio-L2_value", "default": 0},
|
||||
{"name": "memory-used_value", "default": 0.0},
|
||||
{"name": "memory-free_value", "default": 0.0},
|
||||
{"name": "memory-cached_value", "default": 0.0},
|
||||
{"name": "memory-buffered_value", "default": 0.0},
|
||||
{"name": "memory-total_value", "default": 0.0},
|
||||
{"name": "memory-usage_percent", "default": 0},
|
||||
{"name": "update_available", "type": "bool", "default": False},
|
||||
{"name": "update_progress", "default": 0},
|
||||
{"name": "update_jobid", "default": 0},
|
||||
{"name": "update_state", "default": "unknown"},
|
||||
],
|
||||
)
|
||||
if not self.api.connected():
|
||||
return
|
||||
|
||||
self.data["system_info"] = parse_api(
|
||||
data=self.data["system_info"],
|
||||
source=self.api.query("update/check_available", method="post"),
|
||||
vals=[
|
||||
{
|
||||
"name": "update_status",
|
||||
"source": "status",
|
||||
"default": "unknown",
|
||||
},
|
||||
{
|
||||
"name": "update_version",
|
||||
"source": "version",
|
||||
"default": "unknown",
|
||||
},
|
||||
],
|
||||
)
|
||||
|
||||
if not self.api.connected():
|
||||
return
|
||||
|
||||
self.data["system_info"]["update_available"] = (
|
||||
self.data["system_info"]["update_status"] == "AVAILABLE"
|
||||
)
|
||||
|
||||
if not self.data["system_info"]["update_available"]:
|
||||
self.data["system_info"]["update_version"] = self.data["system_info"][
|
||||
"version"
|
||||
]
|
||||
|
||||
if self.data["system_info"]["update_jobid"]:
|
||||
self.data["system_info"] = parse_api(
|
||||
data=self.data["system_info"],
|
||||
source=self.api.query(
|
||||
"core/get_jobs",
|
||||
method="get",
|
||||
params={"id": self.data["system_info"]["update_jobid"]},
|
||||
),
|
||||
vals=[
|
||||
{
|
||||
"name": "update_progress",
|
||||
"source": "progress/percent",
|
||||
"default": 0,
|
||||
},
|
||||
{
|
||||
"name": "update_state",
|
||||
"source": "state",
|
||||
"default": "unknown",
|
||||
},
|
||||
],
|
||||
)
|
||||
if not self.api.connected():
|
||||
return
|
||||
|
||||
if (
|
||||
self.data["system_info"]["update_state"] != "RUNNING"
|
||||
or not self.data["system_info"]["update_available"]
|
||||
):
|
||||
self.data["system_info"]["update_progress"] = 0
|
||||
self.data["system_info"]["update_jobid"] = 0
|
||||
self.data["system_info"]["update_state"] = "unknown"
|
||||
|
||||
self._is_scale = bool(
|
||||
self.data["system_info"]["version"].startswith("TrueNAS-SCALE-")
|
||||
)
|
||||
|
||||
self._is_virtual = self.data["system_info"]["system_manufacturer"] in [
|
||||
"QEMU",
|
||||
"VMware, Inc.",
|
||||
] or self.data["system_info"]["system_product"] in [
|
||||
"VirtualBox",
|
||||
]
|
||||
|
||||
if self.data["system_info"]["uptime_seconds"] > 0:
|
||||
now = datetime.now().replace(microsecond=0)
|
||||
uptime_tm = datetime.timestamp(
|
||||
now - timedelta(seconds=int(self.data["system_info"]["uptime_seconds"]))
|
||||
)
|
||||
self.data["system_info"]["uptimeEpoch"] = str(
|
||||
as_local(utc_from_timestamp(uptime_tm)).isoformat()
|
||||
)
|
||||
|
||||
self.data["interface"] = parse_api(
|
||||
data=self.data["interface"],
|
||||
source=self.api.query("interface"),
|
||||
key="id",
|
||||
vals=[
|
||||
{"name": "id", "default": "unknown"},
|
||||
{"name": "name", "default": "unknown"},
|
||||
{"name": "description", "default": "unknown"},
|
||||
{"name": "mtu", "default": "unknown"},
|
||||
{
|
||||
"name": "link_state",
|
||||
"source": "state/link_state",
|
||||
"default": "unknown",
|
||||
},
|
||||
{
|
||||
"name": "active_media_type",
|
||||
"source": "state/active_media_type",
|
||||
"default": "unknown",
|
||||
},
|
||||
{
|
||||
"name": "active_media_subtype",
|
||||
"source": "state/active_media_subtype",
|
||||
"default": "unknown",
|
||||
},
|
||||
{
|
||||
"name": "link_address",
|
||||
"source": "state/link_address",
|
||||
"default": "unknown",
|
||||
},
|
||||
],
|
||||
ensure_vals=[
|
||||
{"name": "rx", "default": 0},
|
||||
{"name": "tx", "default": 0},
|
||||
],
|
||||
)
|
||||
|
||||
# ---------------------------
|
||||
# get_systemstats
|
||||
# ---------------------------
|
||||
def get_systemstats(self):
|
||||
# Get graphs
|
||||
tmp_params = {
|
||||
"graphs": [
|
||||
{"name": "load"},
|
||||
{"name": "cputemp"},
|
||||
{"name": "cpu"},
|
||||
{"name": "arcsize"},
|
||||
{"name": "arcratio"},
|
||||
{"name": "memory"},
|
||||
],
|
||||
"reporting_query": {
|
||||
"start": "now-90s",
|
||||
"end": "now-30s",
|
||||
"aggregate": True,
|
||||
},
|
||||
}
|
||||
|
||||
for uid, vals in self.data["interface"].items():
|
||||
tmp_params["graphs"].append({"name": "interface", "identifier": uid})
|
||||
|
||||
if self._is_virtual:
|
||||
tmp_params["graphs"].remove({"name": "cputemp"})
|
||||
|
||||
for tmp in tmp_params["graphs"]:
|
||||
if tmp["name"] in self._systemstats_errored:
|
||||
tmp_params["graphs"].remove(tmp)
|
||||
|
||||
if not tmp_params["graphs"]:
|
||||
return
|
||||
|
||||
tmp_graph = self.api.query(
|
||||
"reporting/get_data",
|
||||
method="post",
|
||||
params=tmp_params,
|
||||
)
|
||||
|
||||
if not isinstance(tmp_graph, list):
|
||||
if self.api.error == 500:
|
||||
for tmp in tmp_params["graphs"]:
|
||||
tmp2 = self.api.query(
|
||||
"reporting/get_data",
|
||||
method="post",
|
||||
params={
|
||||
"graphs": [
|
||||
tmp,
|
||||
],
|
||||
"reporting_query": {
|
||||
"start": "now-90s",
|
||||
"end": "now-30s",
|
||||
"aggregate": True,
|
||||
},
|
||||
},
|
||||
)
|
||||
if not isinstance(tmp2, list) and self.api.error == 500:
|
||||
self._systemstats_errored.append(tmp["name"])
|
||||
|
||||
_LOGGER.warning(
|
||||
"TrueNAS %s fetching following graphs failed, check your NAS: %s",
|
||||
self.host,
|
||||
self._systemstats_errored,
|
||||
)
|
||||
self.get_systemstats()
|
||||
|
||||
return
|
||||
|
||||
for i in range(len(tmp_graph)):
|
||||
if "name" not in tmp_graph[i]:
|
||||
continue
|
||||
|
||||
# CPU temperature
|
||||
if tmp_graph[i]["name"] == "cputemp":
|
||||
if "aggregations" in tmp_graph[i]:
|
||||
self.data["system_info"]["cpu_temperature"] = round(
|
||||
max(list(filter(None, tmp_graph[i]["aggregations"]["mean"]))), 1
|
||||
)
|
||||
else:
|
||||
self.data["system_info"]["cpu_temperature"] = 0.0
|
||||
|
||||
# CPU load
|
||||
if tmp_graph[i]["name"] == "load":
|
||||
tmp_arr = ("load_shortterm", "load_midterm", "load_longterm")
|
||||
self._systemstats_process(tmp_arr, tmp_graph[i], "")
|
||||
|
||||
# CPU usage
|
||||
if tmp_graph[i]["name"] == "cpu":
|
||||
tmp_arr = ("interrupt", "system", "user", "nice", "idle")
|
||||
self._systemstats_process(tmp_arr, tmp_graph[i], "cpu")
|
||||
self.data["system_info"]["cpu_usage"] = round(
|
||||
self.data["system_info"]["cpu_system"]
|
||||
+ self.data["system_info"]["cpu_user"],
|
||||
2,
|
||||
)
|
||||
|
||||
# Interface
|
||||
if tmp_graph[i]["name"] == "interface":
|
||||
tmp_etc = tmp_graph[i]["identifier"]
|
||||
if tmp_etc in self.data["interface"]:
|
||||
# 12->13 API change
|
||||
tmp_graph[i]["legend"] = [
|
||||
tmp.replace("if_octets_", "") for tmp in tmp_graph[i]["legend"]
|
||||
]
|
||||
tmp_arr = ("rx", "tx")
|
||||
if "aggregations" in tmp_graph[i]:
|
||||
for e in range(len(tmp_graph[i]["legend"])):
|
||||
tmp_var = tmp_graph[i]["legend"][e]
|
||||
if tmp_var in tmp_arr:
|
||||
tmp_val = tmp_graph[i]["aggregations"]["mean"][e] or 0.0
|
||||
self.data["interface"][tmp_etc][tmp_var] = round(
|
||||
(tmp_val / 1024), 2
|
||||
)
|
||||
else:
|
||||
for tmp_load in tmp_arr:
|
||||
self.data["interface"][tmp_etc][tmp_load] = 0.0
|
||||
|
||||
# arcratio
|
||||
if tmp_graph[i]["name"] == "memory":
|
||||
tmp_arr = (
|
||||
"memory-used_value",
|
||||
"memory-free_value",
|
||||
"memory-cached_value",
|
||||
"memory-buffered_value",
|
||||
)
|
||||
self._systemstats_process(tmp_arr, tmp_graph[i], "memory")
|
||||
self.data["system_info"]["memory-total_value"] = round(
|
||||
self.data["system_info"]["memory-used_value"]
|
||||
+ self.data["system_info"]["memory-free_value"]
|
||||
+ self.data["system_info"]["cache_size-arc_value"],
|
||||
2,
|
||||
)
|
||||
if self.data["system_info"]["memory-total_value"] > 0:
|
||||
self.data["system_info"]["memory-usage_percent"] = round(
|
||||
100
|
||||
* (
|
||||
float(self.data["system_info"]["memory-total_value"])
|
||||
- float(self.data["system_info"]["memory-free_value"])
|
||||
)
|
||||
/ float(self.data["system_info"]["memory-total_value"]),
|
||||
0,
|
||||
)
|
||||
|
||||
# arcsize
|
||||
if tmp_graph[i]["name"] == "arcsize":
|
||||
tmp_arr = ("cache_size-arc_value", "cache_size-L2_value")
|
||||
self._systemstats_process(tmp_arr, tmp_graph[i], "memory")
|
||||
|
||||
# arcratio
|
||||
if tmp_graph[i]["name"] == "arcratio":
|
||||
tmp_arr = ("cache_ratio-arc_value", "cache_ratio-L2_value")
|
||||
self._systemstats_process(tmp_arr, tmp_graph[i], "")
|
||||
|
||||
# ---------------------------
|
||||
# _systemstats_process
|
||||
# ---------------------------
|
||||
def _systemstats_process(self, arr, graph, t):
|
||||
if "aggregations" in graph:
|
||||
for e in range(len(graph["legend"])):
|
||||
tmp_var = graph["legend"][e]
|
||||
if tmp_var in arr:
|
||||
tmp_val = graph["aggregations"]["mean"][e] or 0.0
|
||||
if t == "memory":
|
||||
self.data["system_info"][tmp_var] = b2gib(tmp_val)
|
||||
elif t == "cpu":
|
||||
self.data["system_info"][f"cpu_{tmp_var}"] = round(tmp_val, 2)
|
||||
else:
|
||||
self.data["system_info"][tmp_var] = round(tmp_val, 2)
|
||||
else:
|
||||
for tmp_load in arr:
|
||||
if t == "cpu":
|
||||
self.data["system_info"][f"cpu_{tmp_load}"] = 0.0
|
||||
else:
|
||||
self.data["system_info"][tmp_load] = 0.0
|
||||
|
||||
# ---------------------------
|
||||
# get_service
|
||||
# ---------------------------
|
||||
def get_service(self):
|
||||
"""Get service info from TrueNAS"""
|
||||
self.data["service"] = parse_api(
|
||||
data=self.data["service"],
|
||||
source=self.api.query("service"),
|
||||
key="id",
|
||||
vals=[
|
||||
{"name": "id", "default": 0},
|
||||
{"name": "service", "default": "unknown"},
|
||||
{"name": "enable", "type": "bool", "default": False},
|
||||
{"name": "state", "default": "unknown"},
|
||||
],
|
||||
ensure_vals=[
|
||||
{"name": "running", "type": "bool", "default": False},
|
||||
],
|
||||
)
|
||||
|
||||
for uid, vals in self.data["service"].items():
|
||||
self.data["service"][uid]["running"] = vals["state"] == "RUNNING"
|
||||
|
||||
# ---------------------------
|
||||
# get_pool
|
||||
# ---------------------------
|
||||
def get_pool(self):
|
||||
"""Get pools from TrueNAS"""
|
||||
self.data["pool"] = parse_api(
|
||||
data=self.data["pool"],
|
||||
source=self.api.query("pool"),
|
||||
key="guid",
|
||||
vals=[
|
||||
{"name": "guid", "default": 0},
|
||||
{"name": "id", "default": 0},
|
||||
{"name": "name", "default": "unknown"},
|
||||
{"name": "path", "default": "unknown"},
|
||||
{"name": "status", "default": "unknown"},
|
||||
{"name": "healthy", "type": "bool", "default": False},
|
||||
{"name": "is_decrypted", "type": "bool", "default": False},
|
||||
{
|
||||
"name": "autotrim",
|
||||
"source": "autotrim/parsed",
|
||||
"type": "bool",
|
||||
"default": False,
|
||||
},
|
||||
{
|
||||
"name": "scan_function",
|
||||
"source": "scan/function",
|
||||
"default": "unknown",
|
||||
},
|
||||
{"name": "scrub_state", "source": "scan/state", "default": "unknown"},
|
||||
{
|
||||
"name": "scrub_start",
|
||||
"source": "scan/start_time/$date",
|
||||
"default": 0,
|
||||
"convert": "utc_from_timestamp",
|
||||
},
|
||||
{
|
||||
"name": "scrub_end",
|
||||
"source": "scan/end_time/$date",
|
||||
"default": 0,
|
||||
"convert": "utc_from_timestamp",
|
||||
},
|
||||
{
|
||||
"name": "scrub_secs_left",
|
||||
"source": "scan/total_secs_left",
|
||||
"default": 0,
|
||||
},
|
||||
],
|
||||
ensure_vals=[
|
||||
{"name": "available_gib", "default": 0.0},
|
||||
],
|
||||
)
|
||||
|
||||
self.data["pool"] = parse_api(
|
||||
data=self.data["pool"],
|
||||
source=self.api.query("boot/get_state"),
|
||||
key="guid",
|
||||
vals=[
|
||||
{"name": "guid", "default": 0},
|
||||
{"name": "id", "default": 0},
|
||||
{"name": "name", "default": "unknown"},
|
||||
{"name": "path", "default": "unknown"},
|
||||
{"name": "status", "default": "unknown"},
|
||||
{"name": "healthy", "type": "bool", "default": False},
|
||||
{"name": "is_decrypted", "type": "bool", "default": False},
|
||||
{
|
||||
"name": "autotrim",
|
||||
"source": "autotrim/parsed",
|
||||
"type": "bool",
|
||||
"default": False,
|
||||
},
|
||||
{"name": "root_dataset"},
|
||||
{
|
||||
"name": "root_dataset_available",
|
||||
"source": "root_dataset/properties/available/parsed",
|
||||
"default": 0,
|
||||
},
|
||||
{
|
||||
"name": "scan_function",
|
||||
"source": "scan/function",
|
||||
"default": "unknown",
|
||||
},
|
||||
{"name": "scrub_state", "source": "scan/state", "default": "unknown"},
|
||||
{
|
||||
"name": "scrub_start",
|
||||
"source": "scan/start_time/$date",
|
||||
"default": 0,
|
||||
"convert": "utc_from_timestamp",
|
||||
},
|
||||
{
|
||||
"name": "scrub_end",
|
||||
"source": "scan/end_time/$date",
|
||||
"default": 0,
|
||||
"convert": "utc_from_timestamp",
|
||||
},
|
||||
{
|
||||
"name": "scrub_secs_left",
|
||||
"source": "scan/total_secs_left",
|
||||
"default": 0,
|
||||
},
|
||||
],
|
||||
ensure_vals=[
|
||||
{"name": "available_gib", "default": 0.0},
|
||||
],
|
||||
)
|
||||
|
||||
# Process pools
|
||||
tmp_dataset = {
|
||||
self.data["dataset"][uid]["mountpoint"]: b2gib(vals["available"])
|
||||
for uid, vals in self.data["dataset"].items()
|
||||
}
|
||||
|
||||
for uid, vals in self.data["pool"].items():
|
||||
if vals["path"] in tmp_dataset:
|
||||
self.data["pool"][uid]["available_gib"] = tmp_dataset[vals["path"]]
|
||||
|
||||
if vals["name"] in ["boot-pool", "freenas-boot"]:
|
||||
self.data["pool"][uid]["available_gib"] = b2gib(
|
||||
vals["root_dataset_available"]
|
||||
)
|
||||
self.data["pool"][uid].pop("root_dataset")
|
||||
|
||||
# ---------------------------
|
||||
# get_dataset
|
||||
# ---------------------------
|
||||
def get_dataset(self):
|
||||
"""Get datasets from TrueNAS"""
|
||||
self.data["dataset"] = parse_api(
|
||||
data={},
|
||||
source=self.api.query("pool/dataset"),
|
||||
key="id",
|
||||
vals=[
|
||||
{"name": "id", "default": "unknown"},
|
||||
{"name": "type", "default": "unknown"},
|
||||
{"name": "name", "default": "unknown"},
|
||||
{"name": "pool", "default": "unknown"},
|
||||
{"name": "mountpoint", "default": "unknown"},
|
||||
{"name": "comments", "source": "comments/parsed", "default": ""},
|
||||
{
|
||||
"name": "deduplication",
|
||||
"source": "deduplication/parsed",
|
||||
"type": "bool",
|
||||
"default": False,
|
||||
},
|
||||
{
|
||||
"name": "atime",
|
||||
"source": "atime/parsed",
|
||||
"type": "bool",
|
||||
"default": False,
|
||||
},
|
||||
{
|
||||
"name": "casesensitivity",
|
||||
"source": "casesensitivity/parsed",
|
||||
"default": "unknown",
|
||||
},
|
||||
{"name": "checksum", "source": "checksum/parsed", "default": "unknown"},
|
||||
{
|
||||
"name": "exec",
|
||||
"source": "exec/parsed",
|
||||
"type": "bool",
|
||||
"default": False,
|
||||
},
|
||||
{"name": "sync", "source": "sync/parsed", "default": "unknown"},
|
||||
{
|
||||
"name": "compression",
|
||||
"source": "compression/parsed",
|
||||
"default": "unknown",
|
||||
},
|
||||
{
|
||||
"name": "compressratio",
|
||||
"source": "compressratio/parsed",
|
||||
"default": "unknown",
|
||||
},
|
||||
{"name": "quota", "source": "quota/parsed", "default": "unknown"},
|
||||
{"name": "copies", "source": "copies/parsed", "default": 0},
|
||||
{
|
||||
"name": "readonly",
|
||||
"source": "readonly/parsed",
|
||||
"type": "bool",
|
||||
"default": False,
|
||||
},
|
||||
{"name": "recordsize", "source": "recordsize/parsed", "default": 0},
|
||||
{
|
||||
"name": "encryption_algorithm",
|
||||
"source": "encryption_algorithm/parsed",
|
||||
"default": "unknown",
|
||||
},
|
||||
{"name": "used", "source": "used/parsed", "default": 0},
|
||||
{"name": "available", "source": "available/parsed", "default": 0},
|
||||
],
|
||||
ensure_vals=[
|
||||
{"name": "used_gb", "default": 0},
|
||||
],
|
||||
)
|
||||
|
||||
for uid, vals in self.data["dataset"].items():
|
||||
self.data["dataset"][uid]["used_gb"] = b2gib(vals["used"])
|
||||
|
||||
if len(self.data["dataset"]) == 0:
|
||||
return
|
||||
|
||||
entities_to_be_removed = []
|
||||
if not self.datasets_hass_device_id:
|
||||
device_registry = dr.async_get(self.hass)
|
||||
for device in device_registry.devices.values():
|
||||
if (
|
||||
self.config_entry.entry_id in device.config_entries
|
||||
and device.name.endswith(" Datasets")
|
||||
):
|
||||
self.datasets_hass_device_id = device.id
|
||||
_LOGGER.debug(f"datasets device: {device.name}")
|
||||
|
||||
if not self.datasets_hass_device_id:
|
||||
return
|
||||
|
||||
_LOGGER.debug(f"datasets_hass_device_id: {self.datasets_hass_device_id}")
|
||||
entity_registry = er.async_get(self.hass)
|
||||
entity_entries = async_entries_for_config_entry(
|
||||
entity_registry, self.config_entry.entry_id
|
||||
)
|
||||
for entity in entity_entries:
|
||||
if (
|
||||
entity.device_id == self.datasets_hass_device_id
|
||||
and entity.unique_id.removeprefix(f"{self.name.lower()}-dataset-")
|
||||
not in map(str.lower, self.data["dataset"].keys())
|
||||
):
|
||||
_LOGGER.debug(f"dataset to be removed: {entity.unique_id}")
|
||||
entities_to_be_removed.append(entity.entity_id)
|
||||
|
||||
for entity_id in entities_to_be_removed:
|
||||
entity_registry.async_remove(entity_id)
|
||||
|
||||
# ---------------------------
|
||||
# get_disk
|
||||
# ---------------------------
|
||||
def get_disk(self):
|
||||
"""Get disks from TrueNAS"""
|
||||
self.data["disk"] = parse_api(
|
||||
data=self.data["disk"],
|
||||
source=self.api.query("disk"),
|
||||
key="devname",
|
||||
vals=[
|
||||
{"name": "name", "default": "unknown"},
|
||||
{"name": "devname", "default": "unknown"},
|
||||
{"name": "serial", "default": "unknown"},
|
||||
{"name": "size", "default": "unknown"},
|
||||
{"name": "hddstandby", "default": "unknown"},
|
||||
{"name": "hddstandby_force", "type": "bool", "default": False},
|
||||
{"name": "advpowermgmt", "default": "unknown"},
|
||||
{"name": "acousticlevel", "default": "unknown"},
|
||||
{"name": "togglesmart", "type": "bool", "default": False},
|
||||
{"name": "model", "default": "unknown"},
|
||||
{"name": "rotationrate", "default": "unknown"},
|
||||
{"name": "type", "default": "unknown"},
|
||||
],
|
||||
ensure_vals=[
|
||||
{"name": "temperature", "default": 0},
|
||||
],
|
||||
)
|
||||
|
||||
# Get disk temperatures
|
||||
temps = self.api.query(
|
||||
"disk/temperatures",
|
||||
method="post",
|
||||
params={"names": []},
|
||||
)
|
||||
|
||||
if temps:
|
||||
for uid in self.data["disk"]:
|
||||
if uid in temps:
|
||||
self.data["disk"][uid]["temperature"] = temps[uid]
|
||||
|
||||
# ---------------------------
|
||||
# get_jail
|
||||
# ---------------------------
|
||||
def get_jail(self):
|
||||
"""Get jails from TrueNAS"""
|
||||
if self._is_scale:
|
||||
return
|
||||
|
||||
self.data["jail"] = parse_api(
|
||||
data=self.data["jail"],
|
||||
source=self.api.query("jail"),
|
||||
key="id",
|
||||
vals=[
|
||||
{"name": "id", "default": "unknown"},
|
||||
{"name": "comment", "default": "unknown"},
|
||||
{"name": "host_hostname", "default": "unknown"},
|
||||
{"name": "jail_zfs_dataset", "default": "unknown"},
|
||||
{"name": "last_started", "default": "unknown"},
|
||||
{"name": "ip4_addr", "default": "unknown"},
|
||||
{"name": "ip6_addr", "default": "unknown"},
|
||||
{"name": "release", "default": "unknown"},
|
||||
{"name": "state", "type": "bool", "default": False},
|
||||
{"name": "type", "default": "unknown"},
|
||||
{"name": "plugin_name", "default": "unknown"},
|
||||
],
|
||||
)
|
||||
|
||||
# ---------------------------
|
||||
# get_vm
|
||||
# ---------------------------
|
||||
def get_vm(self):
|
||||
"""Get VMs from TrueNAS"""
|
||||
self.data["vm"] = parse_api(
|
||||
data=self.data["vm"],
|
||||
source=self.api.query("vm"),
|
||||
key="id",
|
||||
vals=[
|
||||
{"name": "id", "default": 0},
|
||||
{"name": "name", "default": "unknown"},
|
||||
{"name": "description", "default": "unknown"},
|
||||
{"name": "vcpus", "default": 0},
|
||||
{"name": "memory", "default": 0},
|
||||
{"name": "autostart", "type": "bool", "default": False},
|
||||
{"name": "cores", "default": 0},
|
||||
{"name": "threads", "default": 0},
|
||||
{"name": "state", "source": "status/state", "default": "unknown"},
|
||||
],
|
||||
ensure_vals=[
|
||||
{"name": "running", "type": "bool", "default": False},
|
||||
],
|
||||
)
|
||||
|
||||
for uid, vals in self.data["vm"].items():
|
||||
self.data["vm"][uid]["running"] = vals["state"] == "RUNNING"
|
||||
|
||||
# ---------------------------
|
||||
# get_cloudsync
|
||||
# ---------------------------
|
||||
def get_cloudsync(self):
|
||||
"""Get cloudsync from TrueNAS"""
|
||||
self.data["cloudsync"] = parse_api(
|
||||
data=self.data["cloudsync"],
|
||||
source=self.api.query("cloudsync"),
|
||||
key="id",
|
||||
vals=[
|
||||
{"name": "id", "default": "unknown"},
|
||||
{"name": "description", "default": "unknown"},
|
||||
{"name": "direction", "default": "unknown"},
|
||||
{"name": "path", "default": "unknown"},
|
||||
{"name": "enabled", "type": "bool", "default": False},
|
||||
{"name": "transfer_mode", "default": "unknown"},
|
||||
{"name": "snapshot", "type": "bool", "default": False},
|
||||
{"name": "state", "source": "job/state", "default": "unknown"},
|
||||
{
|
||||
"name": "time_started",
|
||||
"source": "job/time_started/$date",
|
||||
"default": 0,
|
||||
"convert": "utc_from_timestamp",
|
||||
},
|
||||
{
|
||||
"name": "time_finished",
|
||||
"source": "job/time_finished/$date",
|
||||
"default": 0,
|
||||
"convert": "utc_from_timestamp",
|
||||
},
|
||||
{"name": "job_percent", "source": "job/progress/percent", "default": 0},
|
||||
{
|
||||
"name": "job_description",
|
||||
"source": "job/progress/description",
|
||||
"default": "unknown",
|
||||
},
|
||||
],
|
||||
)
|
||||
|
||||
# ---------------------------
|
||||
# get_replication
|
||||
# ---------------------------
|
||||
def get_replication(self):
|
||||
"""Get replication from TrueNAS"""
|
||||
self.data["replication"] = parse_api(
|
||||
data=self.data["replication"],
|
||||
source=self.api.query("replication"),
|
||||
key="id",
|
||||
vals=[
|
||||
{"name": "id", "default": 0},
|
||||
{"name": "name", "default": "unknown"},
|
||||
{"name": "source_datasets", "default": "unknown"},
|
||||
{"name": "target_dataset", "default": "unknown"},
|
||||
{"name": "recursive", "type": "bool", "default": False},
|
||||
{"name": "enabled", "type": "bool", "default": False},
|
||||
{"name": "direction", "default": "unknown"},
|
||||
{"name": "transport", "default": "unknown"},
|
||||
{"name": "auto", "type": "bool", "default": False},
|
||||
{"name": "retention_policy", "default": "unknown"},
|
||||
{"name": "state", "source": "job/state", "default": "unknown"},
|
||||
{
|
||||
"name": "time_started",
|
||||
"source": "job/time_started/$date",
|
||||
"default": 0,
|
||||
"convert": "utc_from_timestamp",
|
||||
},
|
||||
{
|
||||
"name": "time_finished",
|
||||
"source": "job/time_finished/$date",
|
||||
"default": 0,
|
||||
"convert": "utc_from_timestamp",
|
||||
},
|
||||
{"name": "job_percent", "source": "job/progress/percent", "default": 0},
|
||||
{
|
||||
"name": "job_description",
|
||||
"source": "job/progress/description",
|
||||
"default": "unknown",
|
||||
},
|
||||
],
|
||||
)
|
||||
|
||||
# ---------------------------
|
||||
# get_snapshottask
|
||||
# ---------------------------
|
||||
def get_snapshottask(self):
|
||||
"""Get replication from TrueNAS"""
|
||||
self.data["snapshottask"] = parse_api(
|
||||
data=self.data["snapshottask"],
|
||||
source=self.api.query("pool/snapshottask"),
|
||||
key="id",
|
||||
vals=[
|
||||
{"name": "id", "default": 0},
|
||||
{"name": "dataset", "default": "unknown"},
|
||||
{"name": "recursive", "type": "bool", "default": False},
|
||||
{"name": "lifetime_value", "default": 0},
|
||||
{"name": "lifetime_unit", "default": "unknown"},
|
||||
{"name": "enabled", "type": "bool", "default": False},
|
||||
{"name": "naming_schema", "default": "unknown"},
|
||||
{"name": "allow_empty", "type": "bool", "default": False},
|
||||
{"name": "vmware_sync", "type": "bool", "default": False},
|
||||
{"name": "state", "source": "state/state", "default": "unknown"},
|
||||
{
|
||||
"name": "datetime",
|
||||
"source": "state/datetime/$date",
|
||||
"default": 0,
|
||||
"convert": "utc_from_timestamp",
|
||||
},
|
||||
],
|
||||
)
|
||||
@@ -0,0 +1,90 @@
|
||||
"""TrueNAS binary sensor platform"""
|
||||
from logging import getLogger
|
||||
from typing import Any
|
||||
from homeassistant.components.update import (
|
||||
UpdateEntity,
|
||||
UpdateDeviceClass,
|
||||
UpdateEntityFeature,
|
||||
)
|
||||
|
||||
from .model import model_async_setup_entry, TrueNASEntity
|
||||
from .update_types import SENSOR_TYPES, SENSOR_SERVICES
|
||||
|
||||
_LOGGER = getLogger(__name__)
|
||||
DEVICE_UPDATE = "device_update"
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# async_setup_entry
|
||||
# ---------------------------
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"""Set up device tracker for OpenMediaVault component"""
|
||||
dispatcher = {
|
||||
"TrueNASUpdate": TrueNASUpdate,
|
||||
}
|
||||
await model_async_setup_entry(
|
||||
hass,
|
||||
config_entry,
|
||||
async_add_entities,
|
||||
SENSOR_SERVICES,
|
||||
SENSOR_TYPES,
|
||||
dispatcher,
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# TrueNASUpdate
|
||||
# ---------------------------
|
||||
class TrueNASUpdate(TrueNASEntity, UpdateEntity):
|
||||
"""Define an TrueNAS Update Sensor"""
|
||||
|
||||
TYPE = DEVICE_UPDATE
|
||||
_attr_device_class = UpdateDeviceClass.FIRMWARE
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
inst,
|
||||
uid: "",
|
||||
truenas_controller,
|
||||
entity_description,
|
||||
):
|
||||
"""Set up device update entity."""
|
||||
super().__init__(inst, uid, truenas_controller, entity_description)
|
||||
|
||||
self._attr_supported_features = UpdateEntityFeature.INSTALL
|
||||
self._attr_supported_features |= UpdateEntityFeature.PROGRESS
|
||||
self._attr_title = self.entity_description.title
|
||||
|
||||
@property
|
||||
def installed_version(self) -> str:
|
||||
"""Version installed and in use."""
|
||||
return self._data["version"]
|
||||
|
||||
@property
|
||||
def latest_version(self) -> str:
|
||||
"""Latest version available for install."""
|
||||
return self._data["update_version"]
|
||||
|
||||
async def options_updated(self) -> None:
|
||||
"""No action needed."""
|
||||
|
||||
async def async_install(self, version: str, backup: bool, **kwargs: Any) -> None:
|
||||
"""Install an update."""
|
||||
self._data["update_jobid"] = await self.hass.async_add_executor_job(
|
||||
self._ctrl.api.query,
|
||||
"update/update",
|
||||
"post",
|
||||
{"reboot": True},
|
||||
)
|
||||
await self._ctrl.async_update()
|
||||
|
||||
@property
|
||||
def in_progress(self) -> int:
|
||||
"""Update installation progress."""
|
||||
if self._data["update_state"] != "RUNNING":
|
||||
return False
|
||||
|
||||
if self._data["update_progress"] == 0:
|
||||
self._data["update_progress"] = 1
|
||||
|
||||
return self._data["update_progress"]
|
||||
@@ -0,0 +1,38 @@
|
||||
"""Definitions for TrueNAS update entities"""
|
||||
from dataclasses import dataclass, field
|
||||
from typing import List
|
||||
from homeassistant.components.update import UpdateEntityDescription
|
||||
|
||||
|
||||
@dataclass
|
||||
class TrueNASUpdateEntityDescription(UpdateEntityDescription):
|
||||
"""Class describing mikrotik entities"""
|
||||
|
||||
ha_group: str = ""
|
||||
ha_connection: str = ""
|
||||
ha_connection_value: str = ""
|
||||
title: str = ""
|
||||
data_path: str = ""
|
||||
data_attribute: str = "available"
|
||||
data_name: str = ""
|
||||
data_uid: str = ""
|
||||
data_reference: str = ""
|
||||
data_attributes_list: List = field(default_factory=lambda: [])
|
||||
func: str = "TrueNASUpdate"
|
||||
|
||||
|
||||
SENSOR_TYPES = {
|
||||
"system_update": TrueNASUpdateEntityDescription(
|
||||
key="system_update",
|
||||
name="Update",
|
||||
ha_group="System",
|
||||
title="TrueNAS",
|
||||
data_path="system_info",
|
||||
data_attribute="update_available",
|
||||
data_name="",
|
||||
data_uid="",
|
||||
data_reference="",
|
||||
),
|
||||
}
|
||||
|
||||
SENSOR_SERVICES = []
|
||||
Reference in New Issue
Block a user