diff --git a/database/__init__.py b/database/__init__.py index 581e336c2..1a02a6475 100755 --- a/database/__init__.py +++ b/database/__init__.py @@ -398,16 +398,16 @@ def copy_databasefile(self): database_name = '' # copy the database file - self.logger.warning( f"Starting to copy SQLite3 database file from {database_name} to {self._copy_database_name}") + self.logger.info( f"Starting to copy SQLite3 database file from {database_name} to {self._copy_database_name}") import shutil try: shutil.copy2(database_name, self._copy_database_name) - self.logger.warning("Finished copying SQLite3 database file") + self.logger.info("Finished copying SQLite3 database file") except Exception as e: self.logger.error( f"Error copying SQLite3 database file: {e}") - param_dict = {"copy_database": False} - self.update_config_section(param_dict) + #param_dict = {"copy_database": False} + #self.update_config_section(param_dict) return diff --git a/db_addon/__init__.py b/db_addon/__init__.py index c5e85b60f..ebdb2d76e 100644 --- a/db_addon/__init__.py +++ b/db_addon/__init__.py @@ -31,8 +31,6 @@ import time import re import queue -import threading -import logging import pickle import operator from dateutil.relativedelta import relativedelta @@ -62,7 +60,7 @@ class DatabaseAddOn(SmartPlugin): Main class of the Plugin. Does all plugin specific stuff and provides the update functions for the items """ - PLUGIN_VERSION = '1.2.8' + PLUGIN_VERSION = '1.2.9' def __init__(self, sh): """ @@ -80,17 +78,15 @@ def __init__(self, sh): self.plugins = Plugins.get_instance() # define cache dicts - self.pickle_data_validity_time = 600 # seconds after which the data saved in pickle are not valid anymore - self.current_values = {} # Dict to hold min and max value of current day / week / month / year for items - self.previous_values = {} # Dict to hold value of end of last day / week / month / year for items - self.item_cache = {} # Dict to hold item_id, oldest_log_ts and oldest_entry for items - self.value_list_raw_data = {} + self.pickle_data_validity_time = 600 # seconds after which the data saved in pickle are not valid anymore + self.current_values = {} # Dict to hold min and max value of current day / week / month / year for items + self.previous_values = {} # Dict to hold value of end of last day / week / month / year for items + self.item_cache = {} # Dict to hold item_id, oldest_log_ts and oldest_entry for items + self.value_list_raw_data = {} # List to hold raw data # define variables for database, database connection, working queue and status self.item_queue = queue.Queue() # Queue containing all to be executed items self.update_item_delay_deque = deque() # Deque for delay working of updated item values - # ToDo: Check if still needed - self.queue_consumer_thread = None # Queue consumer thread self._db_plugin = None # object if database plugin self._db = None # object of database self.connection_data = None # connection data list of database @@ -99,9 +95,9 @@ def __init__(self, sh): self.item_attribute_search_str = 'database' # attribute, on which an item configured for database can be identified self.last_connect_time = 0 # mechanism for limiting db connection requests self.alive = None # Is plugin alive? - self.suspended = False # Is plugin activity suspended self.active_queue_item: str = '-' # String holding item path of currently executed item - self.onchange_delay_time = 30 + self.onchange_delay_time = 30 # delay time in seconds between change of database item start of reevaluation of db_addon item + self.database_item_list = [] # list of needed database items # define default mysql settings self.default_connect_timeout = 60 @@ -139,19 +135,19 @@ def run(self): # check existence of db-plugin, get parameters, and init connection to db if not self._check_db_existence(): self.logger.error(f"Check of existence of database plugin incl connection check failed. Plugin not loaded") - return self.deinit() + return # create db object self._db = lib.db.Database("DatabaseAddOn", self.db_driver, self.connection_data) if not self._db.api_initialized: self.logger.error("Initialization of database API failed") - return self.deinit() + return self.logger.debug("Initialization of database API successful") # check initialization of db if not self._initialize_db(): self.logger.error("Connection to database failed") - return self.deinit() + return self._db.close() # check db connection settings @@ -169,25 +165,15 @@ def run(self): # update database_items in item config, where path was given self._update_database_items() + # create list if all relevant database items + self._create_list_of_relevant_database_items() + # set plugin to alive self.alive = True # work item queue self.work_item_queue() - # ToDo: Check if still needed - """ - try: - self._queue_consumer_thread_startup() - except Exception as e: - self.logger.warning(f"During working item queue Exception '{e}' occurred.") - self.logger.debug(e, exc_info=True) - # self.logger.error("Thread for working item queue died. De-init plugin.") - # self.deinit() - self.logger.error("Suspend Plugin and clear Item-Queue.") - self.suspend(True) - """ - def stop(self): """ Stop method for the plugin @@ -201,9 +187,6 @@ def stop(self): self._db.close() self.save_cache_data() - # ToDo: Check if still needed - # self._queue_consumer_thread_shutdown() - def parse_item(self, item: Item): """ Default plugin parse_item method. Is called when the plugin is initialized. @@ -526,23 +509,6 @@ def get_database_item() -> Item: return None, None - def has_db_addon_item() -> bool: - """Returns item from shNG config which is item with db_addon attribut valid for database item""" - - for child in item.return_children(): - if check_db_addon_fct(child): - return True - - for child_child in child.return_children(): - if check_db_addon_fct(child_child): - return True - - for child_child_child in child_child.return_children(): - if check_db_addon_fct(child_child_child): - return True - - return False - def check_db_addon_fct(check_item) -> bool: """ Check if item has db_addon_fct and is onchange @@ -626,7 +592,6 @@ def format_db_addon_ignore_value_list(optimize: bool = self.optimize_value_filte # read item_attribute_dict aus item_attributes_master item_attribute_dict = ITEM_ATTRIBUTES['db_addon_fct'].get(db_addon_fct) - self.logger.debug(f"{db_addon_fct}: {item_attribute_dict=}") # get query parameters from db_addon_fct or db_addon_params if item_attribute_dict['params']: @@ -683,11 +648,9 @@ def format_db_addon_ignore_value_list(optimize: bool = self.optimize_value_filte if self.debug_log.parse: self.logger.debug(f"Item={item.property.path} added with db_addon_fct={db_addon_fct} and database_item={database_item}") - # add type (onchange or ondemand) to item dict - item_config_data_dict.update({'on': item_attribute_dict['on']}) - # add cycle for item groups - cycle = item_attribute_dict['calc'] + cycle = item_attribute_dict['cycle'] + on = 'demand' if cycle == 'group': cycle = item_config_data_dict['query_params'].get('group') if not cycle: @@ -696,13 +659,19 @@ def format_db_addon_ignore_value_list(optimize: bool = self.optimize_value_filte elif cycle == 'timeframe': cycle = item_config_data_dict['query_params'].get('timeframe') cycle = f"{timeframe_to_updatecyle(cycle)}" - elif cycle == 'None': - cycle = None - item_config_data_dict.update({'cycle': cycle}) + elif not cycle: + on = 'change' + item_config_data_dict.update({'cycle': cycle, 'on': on}) # do logging if self.debug_log.parse: - self.logger.debug(f"Item '{item.property.path}' added to be run {item_config_data_dict['cycle']}.") + if cycle: + self.logger.debug(f"Item '{item.property.path}' added to be run {item_config_data_dict['cycle']}.") + else: + self.logger.debug(f"Item '{item.property.path}' added but will not be run cyclic.") + + if on == 'change': + self.logger.debug(f"Item '{item.property.path}' added and will be run on-change of {database_item}.") # create item config for item to be run on startup if db_addon_startup or item_attribute_dict['cat'] == 'gen': @@ -711,6 +680,8 @@ def format_db_addon_ignore_value_list(optimize: bool = self.optimize_value_filte item_config_data_dict.update({'startup': False}) # add item to plugin item dict + if self.debug_log.parse: + self.logger.debug(f"Item '{item.property.path}' completely parsed: {item_config_data_dict=}.") self.add_item(item, config_data_dict=item_config_data_dict) # handle all items with db_addon_info @@ -726,11 +697,8 @@ def format_db_addon_ignore_value_list(optimize: bool = self.optimize_value_filte self.add_item(item, config_data_dict={'db_addon': 'admin', 'db_addon_fct': f"admin_{self.get_iattr_value(item.conf, 'db_addon_admin').lower()}", 'database_item': None}) return self.update_item - # Reference to 'update_item' für alle Items mit Attribut 'database', um die on_change Items zu berechnen - elif self.has_iattr(item.conf, self.item_attribute_search_str) and has_db_addon_item(): - if self.debug_log.parse: - self.logger.debug(f"reference to update_item for item={item.property.path} will be set due to onchange") - self.add_item(item, config_data_dict={'db_addon': 'database'}) + # Reference to 'update_item' for all database items to trigger calculation of on-change items + elif self.has_iattr(item.conf, self.item_attribute_search_str): return self.update_item def update_item(self, item, caller=None, source=None, dest=None): @@ -748,13 +716,8 @@ def update_item(self, item, caller=None, source=None, dest=None): if self.alive and caller != self.get_shortname(): # handle database items if item in self._database_items(): - # if not self.startup_finished: - # self.logger.info(f"Handling of 'onchange' is paused for startup. No updated will be processed.") - if self.suspended: - self.logger.info(f"Plugin is suspended. No updated will be processed.") - else: - self.logger.debug(f" Updated Item {item.property.path} with value {item()} will be put to queue in approx. {self.onchange_delay_time}s resp. after startup.") - self.update_item_delay_deque.append([item, item(), int(time.time() + self.onchange_delay_time)]) + self.logger.debug(f" Updated Item {item.property.path} with value {item()} will be put to queue in approx. {self.onchange_delay_time}s resp. after startup.") + self.update_item_delay_deque.append([item, item(), int(time.time() + self.onchange_delay_time)]) # handle admin items elif self.has_iattr(item.conf, 'db_addon_admin'): @@ -956,10 +919,6 @@ def _create_due_items() -> list: if self.debug_log.execute: self.logger.debug(f"execute_items called with {option=}") - if self.suspended: - self.logger.info(f"Plugin is suspended. No items will be calculated.") - return - suspended_items = self._suspended_items() if len(suspended_items) > 0: self.logger.info(f"{len(suspended_items)} are suspended and will not be calculated.") @@ -1282,7 +1241,7 @@ def _update_database_items(self) -> None: item_config.update({'startup': True}) def _suspend_item_calculation(self, item: Union[str, Item], suspended: bool = False) -> Union[bool, None]: - """suspend calculation od decicated item""" + """suspend calculation od dedicated item""" if isinstance(item, str): item = self.items.return_item(item) @@ -1293,6 +1252,16 @@ def _suspend_item_calculation(self, item: Union[str, Item], suspended: bool = Fa item_config['suspended'] = suspended return suspended + def _create_list_of_relevant_database_items(self): + """creates list of all relevant database items for further reference""" + _database_items = set() + for item in self.get_item_list('database_item'): + item_config = self.get_item_config(item) + database_item = item_config.get('database_item') + if database_item is not None: + _database_items.add(database_item) + self.database_item_list = list(_database_items) + @property def log_level(self) -> int: return self.logger.getEffectiveLevel() @@ -1367,7 +1336,7 @@ def _info_items(self) -> list: return self.get_item_list('db_addon', 'info') def _database_items(self) -> list: - return self.get_item_list('db_addon', 'database') + return self.database_item_list def _database_item_path_items(self) -> list: return self.get_item_list('database_item_path', True) @@ -1537,28 +1506,6 @@ def fetch_raw(self, query: str, params: dict = None) -> Union[list, None]: return self._fetchall(query, params) - def suspend(self, state: bool = False) -> bool: - """ - Will pause value evaluation of plugin - - """ - - if state: - self.logger.info("Plugin is set to 'suspended'. Queries to database will not be made until suspension is cleared.") - self.suspended = True - self._clear_queue() - else: - self.logger.info("Plugin suspension cleared. Queries to database will be resumed.") - self.suspended = False - - # write back value to item, if one exists - for item in self.get_item_list('db_addon', 'admin'): - item_config = self.get_item_config(item) - if item_config['db_addon_fct'] == 'suspend': - item(self.suspended, self.get_shortname()) - - return self.suspended - ############################################## # Calculation methods / Using Item Object ############################################## @@ -2459,33 +2406,6 @@ def _clear_queue(self) -> None: self.logger.info(f"Working queue will be cleared. Calculation run will end.") self.item_queue.queue.clear() - # ToDo: Check if still needed - def _queue_consumer_thread_startup(self): - """Start a thread to work item queue""" - - self.logger = logging.getLogger(__name__) - _name = 'plugins.' + self.get_fullname() + '.work_item_queue' - - try: - self.queue_consumer_thread = threading.Thread(target=self.work_item_queue, name=_name, daemon=False) - self.queue_consumer_thread.start() - self.logger.debug("Thread for 'queue_consumer_thread' has been started") - except threading.ThreadError: - self.logger.error("Unable to launch thread for 'queue_consumer_thread'.") - self.queue_consumer_thread = None - - # ToDo: Check if still needed - def _queue_consumer_thread_shutdown(self): - """Shut down the thread to work item queue""" - - if self.queue_consumer_thread: - self.queue_consumer_thread.join() - if self.queue_consumer_thread.is_alive(): - self.logger.error("Unable to shut down 'queue_consumer_thread' thread") - else: - self.logger.info("Thread 'queue_consumer_thread' has been shut down.") - self.queue_consumer_thread = None - def _get_start_end_as_timestamp(self, timeframe: str, start: Union[int, str, None], end: Union[int, str, None]) -> tuple: """ Provides start and end as timestamp in microseconds from timeframe with start and end @@ -2635,7 +2555,7 @@ def _query_log_timestamp(self, func: str, item_id: int, ts_start: int, ts_end: i 'last': 'LIMIT 1 ', } - _where = "item_id = :item_id AND time < :ts_end " if func == "next" else "item_id = :item_id AND time BETWEEN :ts_start AND :ts_end " + _where = "item_id = :item_id AND time <= :ts_start " if func == "next" else "item_id = :item_id AND time BETWEEN :ts_start AND :ts_end " _db_table = 'log ' @@ -2687,7 +2607,7 @@ def _query_log_timestamp(self, func: str, item_id: int, ts_start: int, ts_end: i # set params params = {'item_id': item_id, 'ts_start': ts_start, 'ts_end': ts_end} if func == "next": - params.pop('ts_start', None) + params.pop('ts_end', None) # assemble query query = f"SELECT {_select[func]}FROM {_db_table}WHERE {_where}{_group_by.get(group, '')}{_order.get(func, '')}{_limit.get(func, '')}{_table_alias.get(func, '')}{_group_by.get(group2, '')}".strip() diff --git a/db_addon/item_attributes_master.py b/db_addon/item_attributes_master.py index 012e0ca2f..2351f2812 100644 --- a/db_addon/item_attributes_master.py +++ b/db_addon/item_attributes_master.py @@ -31,172 +31,172 @@ ITEM_ATTRIBUTES = { 'db_addon_fct': { - 'verbrauch_heute': {'cat': 'verbrauch', 'on': 'change', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch am heutigen Tag (Differenz zwischen aktuellem Wert und den Wert am Ende des vorherigen Tages)'}, - 'verbrauch_tag': {'cat': 'verbrauch', 'on': 'change', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch am heutigen Tag (Differenz zwischen aktuellem Wert und den Wert am Ende des vorherigen Tages)'}, - 'verbrauch_woche': {'cat': 'verbrauch', 'on': 'change', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch in der aktuellen Woche'}, - 'verbrauch_monat': {'cat': 'verbrauch', 'on': 'change', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch im aktuellen Monat'}, - 'verbrauch_jahr': {'cat': 'verbrauch', 'on': 'change', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Verbrauch im aktuellen Jahr'}, - 'verbrauch_last_24h': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'last', 'item_type': 'num', 'calc': 'hourly', 'params': False, 'description': 'Verbrauch innerhalb letzten 24h'}, - 'verbrauch_last_7d': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'last', 'item_type': 'num', 'calc': 'hourly', 'params': False, 'description': 'Verbrauch innerhalb letzten 7 Tage'}, - 'verbrauch_heute_minus1': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch gestern (heute -1 Tag) (Differenz zwischen Wert am Ende des gestrigen Tages und dem Wert am Ende des Tages davor)'}, - 'verbrauch_heute_minus2': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch vorgestern (heute -2 Tage)'}, - 'verbrauch_heute_minus3': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -3 Tage'}, - 'verbrauch_heute_minus4': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -4 Tage'}, - 'verbrauch_heute_minus5': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -5 Tage'}, - 'verbrauch_heute_minus6': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -6 Tage'}, - 'verbrauch_heute_minus7': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -7 Tage'}, - 'verbrauch_heute_minus8': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -8 Tage'}, - 'verbrauch_tag_minus1': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch gestern (heute -1 Tag) (Differenz zwischen Wert am Ende des gestrigen Tages und dem Wert am Ende des Tages davor)'}, - 'verbrauch_tag_minus2': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch vorgestern (heute -2 Tage)'}, - 'verbrauch_tag_minus3': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -3 Tage'}, - 'verbrauch_tag_minus4': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -4 Tage'}, - 'verbrauch_tag_minus5': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -5 Tage'}, - 'verbrauch_tag_minus6': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -6 Tage'}, - 'verbrauch_tag_minus7': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -7 Tage'}, - 'verbrauch_tag_minus8': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -8 Tage'}, - 'verbrauch_woche_minus1': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch Vorwoche (aktuelle Woche -1)'}, - 'verbrauch_woche_minus2': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch aktuelle Woche -2 Wochen'}, - 'verbrauch_woche_minus3': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch aktuelle Woche -3 Wochen'}, - 'verbrauch_woche_minus4': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch aktuelle Woche -4 Wochen'}, - 'verbrauch_monat_minus1': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch Vormonat (aktueller Monat -1)'}, - 'verbrauch_monat_minus2': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch aktueller Monat -2 Monate'}, - 'verbrauch_monat_minus3': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch aktueller Monat -3 Monate'}, - 'verbrauch_monat_minus4': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch aktueller Monat -4 Monate'}, - 'verbrauch_monat_minus12': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch aktueller Monat -12 Monate'}, - 'verbrauch_jahr_minus1': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Verbrauch Vorjahr (aktuelles Jahr -1 Jahr)'}, - 'verbrauch_jahr_minus2': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Verbrauch aktuelles Jahr -2 Jahre'}, - 'verbrauch_jahr_minus3': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Verbrauch aktuelles Jahr -3 Jahre'}, - 'verbrauch_rolling_12m_heute_minus1': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'rolling', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Tages'}, - 'verbrauch_rolling_12m_tag_minus1': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'rolling', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Tages'}, - 'verbrauch_rolling_12m_woche_minus1': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'rolling', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch der letzten 12 Monate ausgehend im Ende der letzten Woche'}, - 'verbrauch_rolling_12m_monat_minus1': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'rolling', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Monats'}, - 'verbrauch_rolling_12m_jahr_minus1': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'rolling', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Jahres'}, - 'verbrauch_jahreszeitraum_minus1': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'jahrzeit', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch seit dem 1.1. bis zum heutigen Tag des Vorjahres'}, - 'verbrauch_jahreszeitraum_minus2': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'jahrzeit', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch seit dem 1.1. bis zum heutigen Tag vor 2 Jahren'}, - 'verbrauch_jahreszeitraum_minus3': {'cat': 'verbrauch', 'on': 'demand', 'sub_cat': 'jahrzeit', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch seit dem 1.1. bis zum heutigen Tag vor 3 Jahren'}, - 'zaehlerstand_heute_minus1': {'cat': 'zaehler', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Zählerstand / Wert am Ende des letzten Tages (heute -1 Tag)'}, - 'zaehlerstand_heute_minus2': {'cat': 'zaehler', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Zählerstand / Wert am Ende des vorletzten Tages (heute -2 Tag)'}, - 'zaehlerstand_heute_minus3': {'cat': 'zaehler', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Zählerstand / Wert am Ende des vorvorletzten Tages (heute -3 Tag)'}, - 'zaehlerstand_tag_minus1': {'cat': 'zaehler', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Zählerstand / Wert am Ende des letzten Tages (heute -1 Tag)'}, - 'zaehlerstand_tag_minus2': {'cat': 'zaehler', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Zählerstand / Wert am Ende des vorletzten Tages (heute -2 Tag)'}, - 'zaehlerstand_tag_minus3': {'cat': 'zaehler', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Zählerstand / Wert am Ende des vorvorletzten Tages (heute -3 Tag)'}, - 'zaehlerstand_woche_minus1': {'cat': 'zaehler', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Zählerstand / Wert am Ende der vorvorletzten Woche (aktuelle Woche -1 Woche)'}, - 'zaehlerstand_woche_minus2': {'cat': 'zaehler', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Zählerstand / Wert am Ende der vorletzten Woche (aktuelle Woche -2 Wochen)'}, - 'zaehlerstand_woche_minus3': {'cat': 'zaehler', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Zählerstand / Wert am Ende der aktuellen Woche -3 Wochen'}, - 'zaehlerstand_monat_minus1': {'cat': 'zaehler', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Zählerstand / Wert am Ende des letzten Monates (aktueller Monat -1 Monat)'}, - 'zaehlerstand_monat_minus2': {'cat': 'zaehler', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Zählerstand / Wert am Ende des vorletzten Monates (aktueller Monat -2 Monate)'}, - 'zaehlerstand_monat_minus3': {'cat': 'zaehler', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Zählerstand / Wert am Ende des aktuellen Monats -3 Monate'}, - 'zaehlerstand_jahr_minus1': {'cat': 'zaehler', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Zählerstand / Wert am Ende des letzten Jahres (aktuelles Jahr -1 Jahr)'}, - 'zaehlerstand_jahr_minus2': {'cat': 'zaehler', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Zählerstand / Wert am Ende des vorletzten Jahres (aktuelles Jahr -2 Jahre)'}, - 'zaehlerstand_jahr_minus3': {'cat': 'zaehler', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Zählerstand / Wert am Ende des aktuellen Jahres -3 Jahre'}, - 'minmax_last_24h_min': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'last', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'minimaler Wert der letzten 24h'}, - 'minmax_last_24h_max': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'last', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'maximaler Wert der letzten 24h'}, - 'minmax_last_24h_avg': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'last', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'durchschnittlicher Wert der letzten 24h'}, - 'minmax_last_7d_min': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'last', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'minimaler Wert der letzten 7 Tage'}, - 'minmax_last_7d_max': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'last', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'maximaler Wert der letzten 7 Tage'}, - 'minmax_last_7d_avg': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'last', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'durchschnittlicher Wert der letzten 7 Tage'}, - 'minmax_heute_min': {'cat': 'wertehistorie', 'on': 'change', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Minimalwert seit Tagesbeginn'}, - 'minmax_heute_max': {'cat': 'wertehistorie', 'on': 'change', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Maximalwert seit Tagesbeginn'}, - 'minmax_heute_avg': {'cat': 'wertehistorie', 'on': 'change', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Durschnittswert seit Tagesbeginn'}, - 'minmax_heute_minus1_min': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Minimalwert gestern (heute -1 Tag)'}, - 'minmax_heute_minus1_max': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Maximalwert gestern (heute -1 Tag)'}, - 'minmax_heute_minus1_avg': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Durchschnittswert gestern (heute -1 Tag)'}, - 'minmax_heute_minus2_min': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Minimalwert vorgestern (heute -2 Tage)'}, - 'minmax_heute_minus2_max': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Maximalwert vorgestern (heute -2 Tage)'}, - 'minmax_heute_minus2_avg': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Durchschnittswert vorgestern (heute -2 Tage)'}, - 'minmax_heute_minus3_min': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Minimalwert heute vor 3 Tagen'}, - 'minmax_heute_minus3_max': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Maximalwert heute vor 3 Tagen'}, - 'minmax_heute_minus3_avg': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Durchschnittswert heute vor 3 Tagen'}, - 'minmax_tag_min': {'cat': 'wertehistorie', 'on': 'change', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Minimalwert seit Tagesbeginn'}, - 'minmax_tag_max': {'cat': 'wertehistorie', 'on': 'change', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Maximalwert seit Tagesbeginn'}, - 'minmax_tag_avg': {'cat': 'wertehistorie', 'on': 'change', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Durschnittswert seit Tagesbeginn'}, - 'minmax_tag_minus1_min': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Minimalwert gestern (heute -1 Tag)'}, - 'minmax_tag_minus1_max': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Maximalwert gestern (heute -1 Tag)'}, - 'minmax_tag_minus1_avg': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Durchschnittswert gestern (heute -1 Tag)'}, - 'minmax_tag_minus2_min': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Minimalwert vorgestern (heute -2 Tage)'}, - 'minmax_tag_minus2_max': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Maximalwert vorgestern (heute -2 Tage)'}, - 'minmax_tag_minus2_avg': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Durchschnittswert vorgestern (heute -2 Tage)'}, - 'minmax_tag_minus3_min': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Minimalwert heute vor 3 Tagen'}, - 'minmax_tag_minus3_max': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Maximalwert heute vor 3 Tagen'}, - 'minmax_tag_minus3_avg': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Durchschnittswert heute vor 3 Tagen'}, - 'minmax_woche_min': {'cat': 'wertehistorie', 'on': 'change', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Minimalwert seit Wochenbeginn'}, - 'minmax_woche_max': {'cat': 'wertehistorie', 'on': 'change', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Maximalwert seit Wochenbeginn'}, - 'minmax_woche_minus1_min': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Minimalwert Vorwoche (aktuelle Woche -1)'}, - 'minmax_woche_minus1_max': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Maximalwert Vorwoche (aktuelle Woche -1)'}, - 'minmax_woche_minus1_avg': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Durchschnittswert Vorwoche (aktuelle Woche -1)'}, - 'minmax_woche_minus2_min': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Minimalwert aktuelle Woche -2 Wochen'}, - 'minmax_woche_minus2_max': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Maximalwert aktuelle Woche -2 Wochen'}, - 'minmax_woche_minus2_avg': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Durchschnittswert aktuelle Woche -2 Wochen'}, - 'minmax_monat_min': {'cat': 'wertehistorie', 'on': 'change', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Minimalwert seit Monatsbeginn'}, - 'minmax_monat_max': {'cat': 'wertehistorie', 'on': 'change', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Maximalwert seit Monatsbeginn'}, - 'minmax_monat_minus1_min': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Minimalwert Vormonat (aktueller Monat -1)'}, - 'minmax_monat_minus1_max': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Maximalwert Vormonat (aktueller Monat -1)'}, - 'minmax_monat_minus1_avg': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Durchschnittswert Vormonat (aktueller Monat -1)'}, - 'minmax_monat_minus2_min': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Minimalwert aktueller Monat -2 Monate'}, - 'minmax_monat_minus2_max': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Maximalwert aktueller Monat -2 Monate'}, - 'minmax_monat_minus2_avg': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Durchschnittswert aktueller Monat -2 Monate'}, - 'minmax_jahr_min': {'cat': 'wertehistorie', 'on': 'change', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Minimalwert seit Jahresbeginn'}, - 'minmax_jahr_max': {'cat': 'wertehistorie', 'on': 'change', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Maximalwert seit Jahresbeginn'}, - 'minmax_jahr_minus1_min': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Minimalwert Vorjahr (aktuelles Jahr -1 Jahr)'}, - 'minmax_jahr_minus1_max': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Maximalwert Vorjahr (aktuelles Jahr -1 Jahr)'}, - 'minmax_jahr_minus1_avg': {'cat': 'wertehistorie', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Durchschnittswert Vorjahr (aktuelles Jahr -1 Jahr)'}, - 'tagesmitteltemperatur_heute': {'cat': 'tagesmittel', 'on': 'change', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Tagesmitteltemperatur heute'}, - 'tagesmitteltemperatur_heute_minus1': {'cat': 'tagesmittel', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Tagesmitteltemperatur des letzten Tages (heute -1 Tag)'}, - 'tagesmitteltemperatur_heute_minus2': {'cat': 'tagesmittel', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Tagesmitteltemperatur des vorletzten Tages (heute -2 Tag)'}, - 'tagesmitteltemperatur_heute_minus3': {'cat': 'tagesmittel', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Tagesmitteltemperatur des vorvorletzten Tages (heute -3 Tag)'}, - 'tagesmitteltemperatur_tag': {'cat': 'tagesmittel', 'on': 'change', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Tagesmitteltemperatur heute'}, - 'tagesmitteltemperatur_tag_minus1': {'cat': 'tagesmittel', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Tagesmitteltemperatur des letzten Tages (heute -1 Tag)'}, - 'tagesmitteltemperatur_tag_minus2': {'cat': 'tagesmittel', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Tagesmitteltemperatur des vorletzten Tages (heute -2 Tag)'}, - 'tagesmitteltemperatur_tag_minus3': {'cat': 'tagesmittel', 'on': 'demand', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Tagesmitteltemperatur des vorvorletzten Tages (heute -3 Tag)'}, - 'serie_minmax_monat_min_15m': {'cat': 'serie', 'on': 'demand', 'sub_cat': 'minmax', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'monatlicher Minimalwert der letzten 15 Monate (gleitend)'}, - 'serie_minmax_monat_max_15m': {'cat': 'serie', 'on': 'demand', 'sub_cat': 'minmax', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'monatlicher Maximalwert der letzten 15 Monate (gleitend)'}, - 'serie_minmax_monat_avg_15m': {'cat': 'serie', 'on': 'demand', 'sub_cat': 'minmax', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'monatlicher Mittelwert der letzten 15 Monate (gleitend)'}, - 'serie_minmax_woche_min_30w': {'cat': 'serie', 'on': 'demand', 'sub_cat': 'minmax', 'item_type': 'list', 'calc': 'weekly', 'params': False, 'description': 'wöchentlicher Minimalwert der letzten 30 Wochen (gleitend)'}, - 'serie_minmax_woche_max_30w': {'cat': 'serie', 'on': 'demand', 'sub_cat': 'minmax', 'item_type': 'list', 'calc': 'weekly', 'params': False, 'description': 'wöchentlicher Maximalwert der letzten 30 Wochen (gleitend)'}, - 'serie_minmax_woche_avg_30w': {'cat': 'serie', 'on': 'demand', 'sub_cat': 'minmax', 'item_type': 'list', 'calc': 'weekly', 'params': False, 'description': 'wöchentlicher Mittelwert der letzten 30 Wochen (gleitend)'}, - 'serie_minmax_tag_min_30d': {'cat': 'serie', 'on': 'demand', 'sub_cat': 'minmax', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'täglicher Minimalwert der letzten 30 Tage (gleitend)'}, - 'serie_minmax_tag_max_30d': {'cat': 'serie', 'on': 'demand', 'sub_cat': 'minmax', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'täglicher Maximalwert der letzten 30 Tage (gleitend)'}, - 'serie_minmax_tag_avg_30d': {'cat': 'serie', 'on': 'demand', 'sub_cat': 'minmax', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'täglicher Mittelwert der letzten 30 Tage (gleitend)'}, - 'serie_verbrauch_tag_30d': {'cat': 'serie', 'on': 'demand', 'sub_cat': 'verbrauch', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'Verbrauch pro Tag der letzten 30 Tage'}, - 'serie_verbrauch_woche_30w': {'cat': 'serie', 'on': 'demand', 'sub_cat': 'verbrauch', 'item_type': 'list', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch pro Woche der letzten 30 Wochen'}, - 'serie_verbrauch_monat_18m': {'cat': 'serie', 'on': 'demand', 'sub_cat': 'verbrauch', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch pro Monat der letzten 18 Monate'}, - 'serie_zaehlerstand_tag_30d': {'cat': 'serie', 'on': 'demand', 'sub_cat': 'zaehler', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'Zählerstand am Tagesende der letzten 30 Tage'}, - 'serie_zaehlerstand_woche_30w': {'cat': 'serie', 'on': 'demand', 'sub_cat': 'zaehler', 'item_type': 'list', 'calc': 'weekly', 'params': False, 'description': 'Zählerstand am Wochenende der letzten 30 Wochen'}, - 'serie_zaehlerstand_monat_18m': {'cat': 'serie', 'on': 'demand', 'sub_cat': 'zaehler', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'Zählerstand am Monatsende der letzten 18 Monate'}, - 'serie_waermesumme_monat_24m': {'cat': 'serie', 'on': 'demand', 'sub_cat': 'summe', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'monatliche Wärmesumme der letzten 24 Monate'}, - 'serie_kaeltesumme_monat_24m': {'cat': 'serie', 'on': 'demand', 'sub_cat': 'summe', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'monatliche Kältesumme der letzten 24 Monate'}, - 'serie_tagesmittelwert_0d': {'cat': 'serie', 'on': 'demand', 'sub_cat': 'mittel_d', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'Tagesmittelwert für den aktuellen Tag'}, - 'serie_tagesmittelwert_stunde_0d': {'cat': 'serie', 'on': 'demand', 'sub_cat': 'mittel_h', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'Stundenmittelwert für den aktuellen Tag'}, - 'serie_tagesmittelwert_stunde_30_0d': {'cat': 'serie', 'on': 'demand', 'sub_cat': 'mittel_h1', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'Stundenmittelwert für den aktuellen Tag'}, - 'serie_tagesmittelwert_tag_stunde_30d': {'cat': 'serie', 'on': 'demand', 'sub_cat': 'mittel_d_h', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'Stundenmittelwert pro Tag der letzten 30 Tage (bspw. zur Berechnung der Tagesmitteltemperatur basierend auf den Mittelwert der Temperatur pro Stunde'}, - 'general_oldest_value': {'cat': 'gen', 'on': 'demand', 'sub_cat': None, 'item_type': 'num', 'calc': 'None', 'params': False, 'description': 'Ausgabe des ältesten Wertes des entsprechenden "Parent-Items" mit database Attribut'}, - 'general_oldest_log': {'cat': 'gen', 'on': 'demand', 'sub_cat': None, 'item_type': 'list', 'calc': 'None', 'params': False, 'description': 'Ausgabe des Timestamp des ältesten Eintrages des entsprechenden "Parent-Items" mit database Attribut'}, - 'kaeltesumme': {'cat': 'summe', 'on': 'demand', 'sub_cat': None, 'item_type': 'num', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Kältesumme für einen Zeitraum, db_addon_params: (year=optional, month=optional)'}, - 'waermesumme': {'cat': 'summe', 'on': 'demand', 'sub_cat': None, 'item_type': 'num', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Wärmesumme für einen Zeitraum, db_addon_params: (year=optional, month=optional)'}, - 'gruenlandtempsumme': {'cat': 'summe', 'on': 'demand', 'sub_cat': None, 'item_type': 'num', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Grünlandtemperatursumme für einen Zeitraum, db_addon_params: (year=optional)'}, - 'wachstumsgradtage': {'cat': 'summe', 'on': 'demand', 'sub_cat': None, 'item_type': 'num', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Wachstumsgradtage auf Basis der stündlichen Durchschnittswerte eines Tages für das laufende Jahr mit an Angabe des Temperaturschwellenwertes (threshold=Schwellentemperatur)'}, - 'wuestentage': {'cat': 'summe', 'on': 'demand', 'sub_cat': 'kenntage', 'item_type': 'num', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Anzahl der Wüstentage des Jahres, db_addon_params: (year=optional)'}, - 'heisse_tage': {'cat': 'summe', 'on': 'demand', 'sub_cat': 'kenntage', 'item_type': 'num', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Anzahl der heissen Tage des Jahres, db_addon_params: (year=optional)'}, - 'tropennaechte': {'cat': 'summe', 'on': 'demand', 'sub_cat': 'kenntage', 'item_type': 'num', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Anzahl der Tropennächte des Jahres, db_addon_params: (year=optional)'}, - 'sommertage': {'cat': 'summe', 'on': 'demand', 'sub_cat': 'kenntage', 'item_type': 'num', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Anzahl der Sommertage des Jahres, db_addon_params: (year=optional)'}, - 'heiztage': {'cat': 'summe', 'on': 'demand', 'sub_cat': 'kenntage', 'item_type': 'num', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Anzahl der Heiztage des Jahres, db_addon_params: (year=optional)'}, - 'vegetationstage': {'cat': 'summe', 'on': 'demand', 'sub_cat': 'kenntage', 'item_type': 'num', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Anzahl der Vegatationstage des Jahres, db_addon_params: (year=optional)'}, - 'frosttage': {'cat': 'summe', 'on': 'demand', 'sub_cat': 'kenntage', 'item_type': 'num', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Anzahl der Frosttage des Jahres, db_addon_params: (year=optional)'}, - 'eistage': {'cat': 'summe', 'on': 'demand', 'sub_cat': 'kenntage', 'item_type': 'num', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Anzahl der Eistage des Jahres, db_addon_params: (year=optional)'}, - 'tagesmitteltemperatur': {'cat': 'complex', 'on': 'demand', 'sub_cat': None, 'item_type': 'list', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Tagesmitteltemperatur auf Basis der stündlichen Durchschnittswerte eines Tages für die angegebene Anzahl von Tagen (timeframe=day, count=integer)'}, - 'db_request': {'cat': 'complex', 'on': 'demand', 'sub_cat': None, 'item_type': 'list', 'calc': 'group', 'params': True, 'description': 'Abfrage der DB: db_addon_params: (func=mandatory, item=mandatory, timespan=mandatory, start=optional, end=optional, count=optional, group=optional, group2=optional)'}, - 'minmax': {'cat': 'complex', 'on': 'demand', 'sub_cat': None, 'item_type': 'num', 'calc': 'timeframe', 'params': True, 'description': 'Berechnet einen min/max/avg Wert für einen bestimmen Zeitraum: db_addon_params: (func=mandatory, timeframe=mandatory, start=mandatory)'}, - 'minmax_last': {'cat': 'complex', 'on': 'demand', 'sub_cat': None, 'item_type': 'num', 'calc': 'timeframe', 'params': True, 'description': 'Berechnet einen min/max/avg Wert für ein bestimmtes Zeitfenster von jetzt zurück: db_addon_params: (func=mandatory, timeframe=mandatory, start=mandatory, end=mandatory)'}, - 'verbrauch': {'cat': 'complex', 'on': 'demand', 'sub_cat': None, 'item_type': 'num', 'calc': 'timeframe', 'params': True, 'description': 'Berechnet einen Verbrauchswert für einen bestimmen Zeitraum: db_addon_params: (timeframe=mandatory, start=mandatory end=mandatory)'}, - 'zaehlerstand': {'cat': 'complex', 'on': 'demand', 'sub_cat': None, 'item_type': 'num', 'calc': 'timeframe', 'params': True, 'description': 'Berechnet einen Zählerstand für einen bestimmen Zeitpunkt: db_addon_params: (timeframe=mandatory, start=mandatory)'}, + 'verbrauch_heute': {'cat': 'verbrauch', 'sub_cat': 'onchange', 'item_type': 'num', 'cycle': None, 'params': False, 'description': 'Verbrauch am heutigen Tag (Differenz zwischen aktuellem Wert und den Wert am Ende des vorherigen Tages)'}, + 'verbrauch_tag': {'cat': 'verbrauch', 'sub_cat': 'onchange', 'item_type': 'num', 'cycle': None, 'params': False, 'description': 'Verbrauch am heutigen Tag (Differenz zwischen aktuellem Wert und den Wert am Ende des vorherigen Tages)'}, + 'verbrauch_woche': {'cat': 'verbrauch', 'sub_cat': 'onchange', 'item_type': 'num', 'cycle': None, 'params': False, 'description': 'Verbrauch in der aktuellen Woche'}, + 'verbrauch_monat': {'cat': 'verbrauch', 'sub_cat': 'onchange', 'item_type': 'num', 'cycle': None, 'params': False, 'description': 'Verbrauch im aktuellen Monat'}, + 'verbrauch_jahr': {'cat': 'verbrauch', 'sub_cat': 'onchange', 'item_type': 'num', 'cycle': None, 'params': False, 'description': 'Verbrauch im aktuellen Jahr'}, + 'verbrauch_last_24h': {'cat': 'verbrauch', 'sub_cat': 'last', 'item_type': 'num', 'cycle': 'hourly', 'params': False, 'description': 'Verbrauch innerhalb letzten 24h'}, + 'verbrauch_last_7d': {'cat': 'verbrauch', 'sub_cat': 'last', 'item_type': 'num', 'cycle': 'hourly', 'params': False, 'description': 'Verbrauch innerhalb letzten 7 Tage'}, + 'verbrauch_heute_minus1': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Verbrauch gestern (heute -1 Tag) (Differenz zwischen Wert am Ende des gestrigen Tages und dem Wert am Ende des Tages davor)'}, + 'verbrauch_heute_minus2': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Verbrauch vorgestern (heute -2 Tage)'}, + 'verbrauch_heute_minus3': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Verbrauch heute -3 Tage'}, + 'verbrauch_heute_minus4': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Verbrauch heute -4 Tage'}, + 'verbrauch_heute_minus5': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Verbrauch heute -5 Tage'}, + 'verbrauch_heute_minus6': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Verbrauch heute -6 Tage'}, + 'verbrauch_heute_minus7': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Verbrauch heute -7 Tage'}, + 'verbrauch_heute_minus8': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Verbrauch heute -8 Tage'}, + 'verbrauch_tag_minus1': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Verbrauch gestern (heute -1 Tag) (Differenz zwischen Wert am Ende des gestrigen Tages und dem Wert am Ende des Tages davor)'}, + 'verbrauch_tag_minus2': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Verbrauch vorgestern (heute -2 Tage)'}, + 'verbrauch_tag_minus3': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Verbrauch heute -3 Tage'}, + 'verbrauch_tag_minus4': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Verbrauch heute -4 Tage'}, + 'verbrauch_tag_minus5': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Verbrauch heute -5 Tage'}, + 'verbrauch_tag_minus6': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Verbrauch heute -6 Tage'}, + 'verbrauch_tag_minus7': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Verbrauch heute -7 Tage'}, + 'verbrauch_tag_minus8': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Verbrauch heute -8 Tage'}, + 'verbrauch_woche_minus1': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'weekly', 'params': False, 'description': 'Verbrauch Vorwoche (aktuelle Woche -1)'}, + 'verbrauch_woche_minus2': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'weekly', 'params': False, 'description': 'Verbrauch aktuelle Woche -2 Wochen'}, + 'verbrauch_woche_minus3': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'weekly', 'params': False, 'description': 'Verbrauch aktuelle Woche -3 Wochen'}, + 'verbrauch_woche_minus4': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'weekly', 'params': False, 'description': 'Verbrauch aktuelle Woche -4 Wochen'}, + 'verbrauch_monat_minus1': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'monthly', 'params': False, 'description': 'Verbrauch Vormonat (aktueller Monat -1)'}, + 'verbrauch_monat_minus2': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'monthly', 'params': False, 'description': 'Verbrauch aktueller Monat -2 Monate'}, + 'verbrauch_monat_minus3': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'monthly', 'params': False, 'description': 'Verbrauch aktueller Monat -3 Monate'}, + 'verbrauch_monat_minus4': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'monthly', 'params': False, 'description': 'Verbrauch aktueller Monat -4 Monate'}, + 'verbrauch_monat_minus12': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'monthly', 'params': False, 'description': 'Verbrauch aktueller Monat -12 Monate'}, + 'verbrauch_jahr_minus1': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'yearly', 'params': False, 'description': 'Verbrauch Vorjahr (aktuelles Jahr -1 Jahr)'}, + 'verbrauch_jahr_minus2': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'yearly', 'params': False, 'description': 'Verbrauch aktuelles Jahr -2 Jahre'}, + 'verbrauch_jahr_minus3': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'yearly', 'params': False, 'description': 'Verbrauch aktuelles Jahr -3 Jahre'}, + 'verbrauch_rolling_12m_heute_minus1': {'cat': 'verbrauch', 'sub_cat': 'rolling', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Tages'}, + 'verbrauch_rolling_12m_tag_minus1': {'cat': 'verbrauch', 'sub_cat': 'rolling', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Tages'}, + 'verbrauch_rolling_12m_woche_minus1': {'cat': 'verbrauch', 'sub_cat': 'rolling', 'item_type': 'num', 'cycle': 'weekly', 'params': False, 'description': 'Verbrauch der letzten 12 Monate ausgehend im Ende der letzten Woche'}, + 'verbrauch_rolling_12m_monat_minus1': {'cat': 'verbrauch', 'sub_cat': 'rolling', 'item_type': 'num', 'cycle': 'monthly', 'params': False, 'description': 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Monats'}, + 'verbrauch_rolling_12m_jahr_minus1': {'cat': 'verbrauch', 'sub_cat': 'rolling', 'item_type': 'num', 'cycle': 'yearly', 'params': False, 'description': 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Jahres'}, + 'verbrauch_jahreszeitraum_minus1': {'cat': 'verbrauch', 'sub_cat': 'jahrzeit', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Verbrauch seit dem 1.1. bis zum heutigen Tag des Vorjahres'}, + 'verbrauch_jahreszeitraum_minus2': {'cat': 'verbrauch', 'sub_cat': 'jahrzeit', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Verbrauch seit dem 1.1. bis zum heutigen Tag vor 2 Jahren'}, + 'verbrauch_jahreszeitraum_minus3': {'cat': 'verbrauch', 'sub_cat': 'jahrzeit', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Verbrauch seit dem 1.1. bis zum heutigen Tag vor 3 Jahren'}, + 'zaehlerstand_heute_minus1': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Zählerstand / Wert am Ende des letzten Tages (heute -1 Tag)'}, + 'zaehlerstand_heute_minus2': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Zählerstand / Wert am Ende des vorletzten Tages (heute -2 Tag)'}, + 'zaehlerstand_heute_minus3': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Zählerstand / Wert am Ende des vorvorletzten Tages (heute -3 Tag)'}, + 'zaehlerstand_tag_minus1': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Zählerstand / Wert am Ende des letzten Tages (heute -1 Tag)'}, + 'zaehlerstand_tag_minus2': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Zählerstand / Wert am Ende des vorletzten Tages (heute -2 Tag)'}, + 'zaehlerstand_tag_minus3': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Zählerstand / Wert am Ende des vorvorletzten Tages (heute -3 Tag)'}, + 'zaehlerstand_woche_minus1': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'weekly', 'params': False, 'description': 'Zählerstand / Wert am Ende der vorvorletzten Woche (aktuelle Woche -1 Woche)'}, + 'zaehlerstand_woche_minus2': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'weekly', 'params': False, 'description': 'Zählerstand / Wert am Ende der vorletzten Woche (aktuelle Woche -2 Wochen)'}, + 'zaehlerstand_woche_minus3': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'weekly', 'params': False, 'description': 'Zählerstand / Wert am Ende der aktuellen Woche -3 Wochen'}, + 'zaehlerstand_monat_minus1': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'monthly', 'params': False, 'description': 'Zählerstand / Wert am Ende des letzten Monates (aktueller Monat -1 Monat)'}, + 'zaehlerstand_monat_minus2': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'monthly', 'params': False, 'description': 'Zählerstand / Wert am Ende des vorletzten Monates (aktueller Monat -2 Monate)'}, + 'zaehlerstand_monat_minus3': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'monthly', 'params': False, 'description': 'Zählerstand / Wert am Ende des aktuellen Monats -3 Monate'}, + 'zaehlerstand_jahr_minus1': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'yearly', 'params': False, 'description': 'Zählerstand / Wert am Ende des letzten Jahres (aktuelles Jahr -1 Jahr)'}, + 'zaehlerstand_jahr_minus2': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'yearly', 'params': False, 'description': 'Zählerstand / Wert am Ende des vorletzten Jahres (aktuelles Jahr -2 Jahre)'}, + 'zaehlerstand_jahr_minus3': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'yearly', 'params': False, 'description': 'Zählerstand / Wert am Ende des aktuellen Jahres -3 Jahre'}, + 'minmax_last_24h_min': {'cat': 'wertehistorie', 'sub_cat': 'last', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'minimaler Wert der letzten 24h'}, + 'minmax_last_24h_max': {'cat': 'wertehistorie', 'sub_cat': 'last', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'maximaler Wert der letzten 24h'}, + 'minmax_last_24h_avg': {'cat': 'wertehistorie', 'sub_cat': 'last', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'durchschnittlicher Wert der letzten 24h'}, + 'minmax_last_7d_min': {'cat': 'wertehistorie', 'sub_cat': 'last', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'minimaler Wert der letzten 7 Tage'}, + 'minmax_last_7d_max': {'cat': 'wertehistorie', 'sub_cat': 'last', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'maximaler Wert der letzten 7 Tage'}, + 'minmax_last_7d_avg': {'cat': 'wertehistorie', 'sub_cat': 'last', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'durchschnittlicher Wert der letzten 7 Tage'}, + 'minmax_heute_min': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'cycle': None, 'params': False, 'description': 'Minimalwert seit Tagesbeginn'}, + 'minmax_heute_max': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'cycle': None, 'params': False, 'description': 'Maximalwert seit Tagesbeginn'}, + 'minmax_heute_avg': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'cycle': None, 'params': False, 'description': 'Durchschnittswert seit Tagesbeginn'}, + 'minmax_heute_minus1_min': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Minimalwert gestern (heute -1 Tag)'}, + 'minmax_heute_minus1_max': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Maximalwert gestern (heute -1 Tag)'}, + 'minmax_heute_minus1_avg': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Durchschnittswert gestern (heute -1 Tag)'}, + 'minmax_heute_minus2_min': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Minimalwert vorgestern (heute -2 Tage)'}, + 'minmax_heute_minus2_max': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Maximalwert vorgestern (heute -2 Tage)'}, + 'minmax_heute_minus2_avg': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Durchschnittswert vorgestern (heute -2 Tage)'}, + 'minmax_heute_minus3_min': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Minimalwert heute vor 3 Tagen'}, + 'minmax_heute_minus3_max': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Maximalwert heute vor 3 Tagen'}, + 'minmax_heute_minus3_avg': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Durchschnittswert heute vor 3 Tagen'}, + 'minmax_tag_min': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'cycle': None, 'params': False, 'description': 'Minimalwert seit Tagesbeginn'}, + 'minmax_tag_max': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'cycle': None, 'params': False, 'description': 'Maximalwert seit Tagesbeginn'}, + 'minmax_tag_avg': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'cycle': None, 'params': False, 'description': 'Durchschnittswert seit Tagesbeginn'}, + 'minmax_tag_minus1_min': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Minimalwert gestern (heute -1 Tag)'}, + 'minmax_tag_minus1_max': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Maximalwert gestern (heute -1 Tag)'}, + 'minmax_tag_minus1_avg': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Durchschnittswert gestern (heute -1 Tag)'}, + 'minmax_tag_minus2_min': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Minimalwert vorgestern (heute -2 Tage)'}, + 'minmax_tag_minus2_max': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Maximalwert vorgestern (heute -2 Tage)'}, + 'minmax_tag_minus2_avg': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Durchschnittswert vorgestern (heute -2 Tage)'}, + 'minmax_tag_minus3_min': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Minimalwert heute vor 3 Tagen'}, + 'minmax_tag_minus3_max': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Maximalwert heute vor 3 Tagen'}, + 'minmax_tag_minus3_avg': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Durchschnittswert heute vor 3 Tagen'}, + 'minmax_woche_min': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'cycle': None, 'params': False, 'description': 'Minimalwert seit Wochenbeginn'}, + 'minmax_woche_max': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'cycle': None, 'params': False, 'description': 'Maximalwert seit Wochenbeginn'}, + 'minmax_woche_minus1_min': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'weekly', 'params': False, 'description': 'Minimalwert Vorwoche (aktuelle Woche -1)'}, + 'minmax_woche_minus1_max': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'weekly', 'params': False, 'description': 'Maximalwert Vorwoche (aktuelle Woche -1)'}, + 'minmax_woche_minus1_avg': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'weekly', 'params': False, 'description': 'Durchschnittswert Vorwoche (aktuelle Woche -1)'}, + 'minmax_woche_minus2_min': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'weekly', 'params': False, 'description': 'Minimalwert aktuelle Woche -2 Wochen'}, + 'minmax_woche_minus2_max': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'weekly', 'params': False, 'description': 'Maximalwert aktuelle Woche -2 Wochen'}, + 'minmax_woche_minus2_avg': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'weekly', 'params': False, 'description': 'Durchschnittswert aktuelle Woche -2 Wochen'}, + 'minmax_monat_min': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'cycle': None, 'params': False, 'description': 'Minimalwert seit Monatsbeginn'}, + 'minmax_monat_max': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'cycle': None, 'params': False, 'description': 'Maximalwert seit Monatsbeginn'}, + 'minmax_monat_minus1_min': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'monthly', 'params': False, 'description': 'Minimalwert Vormonat (aktueller Monat -1)'}, + 'minmax_monat_minus1_max': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'monthly', 'params': False, 'description': 'Maximalwert Vormonat (aktueller Monat -1)'}, + 'minmax_monat_minus1_avg': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'monthly', 'params': False, 'description': 'Durchschnittswert Vormonat (aktueller Monat -1)'}, + 'minmax_monat_minus2_min': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'monthly', 'params': False, 'description': 'Minimalwert aktueller Monat -2 Monate'}, + 'minmax_monat_minus2_max': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'monthly', 'params': False, 'description': 'Maximalwert aktueller Monat -2 Monate'}, + 'minmax_monat_minus2_avg': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'monthly', 'params': False, 'description': 'Durchschnittswert aktueller Monat -2 Monate'}, + 'minmax_jahr_min': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'cycle': None, 'params': False, 'description': 'Minimalwert seit Jahresbeginn'}, + 'minmax_jahr_max': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'cycle': None, 'params': False, 'description': 'Maximalwert seit Jahresbeginn'}, + 'minmax_jahr_minus1_min': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'yearly', 'params': False, 'description': 'Minimalwert Vorjahr (aktuelles Jahr -1 Jahr)'}, + 'minmax_jahr_minus1_max': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'yearly', 'params': False, 'description': 'Maximalwert Vorjahr (aktuelles Jahr -1 Jahr)'}, + 'minmax_jahr_minus1_avg': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'yearly', 'params': False, 'description': 'Durchschnittswert Vorjahr (aktuelles Jahr -1 Jahr)'}, + 'tagesmitteltemperatur_heute': {'cat': 'tagesmittel', 'sub_cat': 'onchange', 'item_type': 'num', 'cycle': None, 'params': False, 'description': 'Tagesmitteltemperatur heute'}, + 'tagesmitteltemperatur_heute_minus1': {'cat': 'tagesmittel', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Tagesmitteltemperatur des letzten Tages (heute -1 Tag)'}, + 'tagesmitteltemperatur_heute_minus2': {'cat': 'tagesmittel', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Tagesmitteltemperatur des vorletzten Tages (heute -2 Tag)'}, + 'tagesmitteltemperatur_heute_minus3': {'cat': 'tagesmittel', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Tagesmitteltemperatur des vorvorletzten Tages (heute -3 Tag)'}, + 'tagesmitteltemperatur_tag': {'cat': 'tagesmittel', 'sub_cat': 'onchange', 'item_type': 'num', 'cycle': None, 'params': False, 'description': 'Tagesmitteltemperatur heute'}, + 'tagesmitteltemperatur_tag_minus1': {'cat': 'tagesmittel', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Tagesmitteltemperatur des letzten Tages (heute -1 Tag)'}, + 'tagesmitteltemperatur_tag_minus2': {'cat': 'tagesmittel', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Tagesmitteltemperatur des vorletzten Tages (heute -2 Tag)'}, + 'tagesmitteltemperatur_tag_minus3': {'cat': 'tagesmittel', 'sub_cat': 'timeframe', 'item_type': 'num', 'cycle': 'daily', 'params': False, 'description': 'Tagesmitteltemperatur des vorvorletzten Tages (heute -3 Tag)'}, + 'serie_minmax_monat_min_15m': {'cat': 'serie', 'sub_cat': 'minmax', 'item_type': 'list', 'cycle': 'monthly', 'params': False, 'description': 'monatlicher Minimalwert der letzten 15 Monate (gleitend)'}, + 'serie_minmax_monat_max_15m': {'cat': 'serie', 'sub_cat': 'minmax', 'item_type': 'list', 'cycle': 'monthly', 'params': False, 'description': 'monatlicher Maximalwert der letzten 15 Monate (gleitend)'}, + 'serie_minmax_monat_avg_15m': {'cat': 'serie', 'sub_cat': 'minmax', 'item_type': 'list', 'cycle': 'monthly', 'params': False, 'description': 'monatlicher Mittelwert der letzten 15 Monate (gleitend)'}, + 'serie_minmax_woche_min_30w': {'cat': 'serie', 'sub_cat': 'minmax', 'item_type': 'list', 'cycle': 'weekly', 'params': False, 'description': 'wöchentlicher Minimalwert der letzten 30 Wochen (gleitend)'}, + 'serie_minmax_woche_max_30w': {'cat': 'serie', 'sub_cat': 'minmax', 'item_type': 'list', 'cycle': 'weekly', 'params': False, 'description': 'wöchentlicher Maximalwert der letzten 30 Wochen (gleitend)'}, + 'serie_minmax_woche_avg_30w': {'cat': 'serie', 'sub_cat': 'minmax', 'item_type': 'list', 'cycle': 'weekly', 'params': False, 'description': 'wöchentlicher Mittelwert der letzten 30 Wochen (gleitend)'}, + 'serie_minmax_tag_min_30d': {'cat': 'serie', 'sub_cat': 'minmax', 'item_type': 'list', 'cycle': 'daily', 'params': False, 'description': 'täglicher Minimalwert der letzten 30 Tage (gleitend)'}, + 'serie_minmax_tag_max_30d': {'cat': 'serie', 'sub_cat': 'minmax', 'item_type': 'list', 'cycle': 'daily', 'params': False, 'description': 'täglicher Maximalwert der letzten 30 Tage (gleitend)'}, + 'serie_minmax_tag_avg_30d': {'cat': 'serie', 'sub_cat': 'minmax', 'item_type': 'list', 'cycle': 'daily', 'params': False, 'description': 'täglicher Mittelwert der letzten 30 Tage (gleitend)'}, + 'serie_verbrauch_tag_30d': {'cat': 'serie', 'sub_cat': 'verbrauch', 'item_type': 'list', 'cycle': 'daily', 'params': False, 'description': 'Verbrauch pro Tag der letzten 30 Tage'}, + 'serie_verbrauch_woche_30w': {'cat': 'serie', 'sub_cat': 'verbrauch', 'item_type': 'list', 'cycle': 'weekly', 'params': False, 'description': 'Verbrauch pro Woche der letzten 30 Wochen'}, + 'serie_verbrauch_monat_18m': {'cat': 'serie', 'sub_cat': 'verbrauch', 'item_type': 'list', 'cycle': 'monthly', 'params': False, 'description': 'Verbrauch pro Monat der letzten 18 Monate'}, + 'serie_zaehlerstand_tag_30d': {'cat': 'serie', 'sub_cat': 'zaehler', 'item_type': 'list', 'cycle': 'daily', 'params': False, 'description': 'Zählerstand am Tagesende der letzten 30 Tage'}, + 'serie_zaehlerstand_woche_30w': {'cat': 'serie', 'sub_cat': 'zaehler', 'item_type': 'list', 'cycle': 'weekly', 'params': False, 'description': 'Zählerstand am Wochenende der letzten 30 Wochen'}, + 'serie_zaehlerstand_monat_18m': {'cat': 'serie', 'sub_cat': 'zaehler', 'item_type': 'list', 'cycle': 'monthly', 'params': False, 'description': 'Zählerstand am Monatsende der letzten 18 Monate'}, + 'serie_waermesumme_monat_24m': {'cat': 'serie', 'sub_cat': 'summe', 'item_type': 'list', 'cycle': 'monthly', 'params': False, 'description': 'monatliche Wärmesumme der letzten 24 Monate'}, + 'serie_kaeltesumme_monat_24m': {'cat': 'serie', 'sub_cat': 'summe', 'item_type': 'list', 'cycle': 'monthly', 'params': False, 'description': 'monatliche Kältesumme der letzten 24 Monate'}, + 'serie_tagesmittelwert_0d': {'cat': 'serie', 'sub_cat': 'mittel_d', 'item_type': 'list', 'cycle': 'daily', 'params': False, 'description': 'Tagesmittelwert für den aktuellen Tag'}, + 'serie_tagesmittelwert_stunde_0d': {'cat': 'serie', 'sub_cat': 'mittel_h', 'item_type': 'list', 'cycle': 'daily', 'params': False, 'description': 'Stundenmittelwert für den aktuellen Tag'}, + 'serie_tagesmittelwert_stunde_30_0d': {'cat': 'serie', 'sub_cat': 'mittel_h1', 'item_type': 'list', 'cycle': 'daily', 'params': False, 'description': 'Stundenmittelwert für den aktuellen Tag'}, + 'serie_tagesmittelwert_tag_stunde_30d': {'cat': 'serie', 'sub_cat': 'mittel_d_h', 'item_type': 'list', 'cycle': 'daily', 'params': False, 'description': 'Stundenmittelwert pro Tag der letzten 30 Tage (bspw. zur Berechnung der Tagesmitteltemperatur basierend auf den Mittelwert der Temperatur pro Stunde'}, + 'general_oldest_value': {'cat': 'gen', 'sub_cat': None, 'item_type': 'num', 'cycle': None, 'params': False, 'description': 'Ausgabe des ältesten Wertes des entsprechenden "Parent-Items" mit database Attribut'}, + 'general_oldest_log': {'cat': 'gen', 'sub_cat': None, 'item_type': 'list', 'cycle': None, 'params': False, 'description': 'Ausgabe des Timestamp des ältesten Eintrages des entsprechenden "Parent-Items" mit database Attribut'}, + 'kaeltesumme': {'cat': 'summe', 'sub_cat': None, 'item_type': 'num', 'cycle': 'daily', 'params': True, 'description': 'Berechnet die Kältesumme für einen Zeitraum, db_addon_params: (year=optional, month=optional)'}, + 'waermesumme': {'cat': 'summe', 'sub_cat': None, 'item_type': 'num', 'cycle': 'daily', 'params': True, 'description': 'Berechnet die Wärmesumme für einen Zeitraum, db_addon_params: (year=optional, month=optional)'}, + 'gruenlandtempsumme': {'cat': 'summe', 'sub_cat': None, 'item_type': 'num', 'cycle': 'daily', 'params': True, 'description': 'Berechnet die Grünlandtemperatursumme für einen Zeitraum, db_addon_params: (year=optional)'}, + 'wachstumsgradtage': {'cat': 'summe', 'sub_cat': None, 'item_type': 'num', 'cycle': 'daily', 'params': True, 'description': 'Berechnet die Wachstumsgradtage auf Basis der stündlichen Durchschnittswerte eines Tages für das laufende Jahr mit an Angabe des Temperaturschwellenwertes (threshold=Schwellentemperatur)'}, + 'wuestentage': {'cat': 'summe', 'sub_cat': 'kenntage', 'item_type': 'num', 'cycle': 'daily', 'params': True, 'description': 'Berechnet die Anzahl der Wüstentage des Jahres, db_addon_params: (year=optional)'}, + 'heisse_tage': {'cat': 'summe', 'sub_cat': 'kenntage', 'item_type': 'num', 'cycle': 'daily', 'params': True, 'description': 'Berechnet die Anzahl der heissen Tage des Jahres, db_addon_params: (year=optional)'}, + 'tropennaechte': {'cat': 'summe', 'sub_cat': 'kenntage', 'item_type': 'num', 'cycle': 'daily', 'params': True, 'description': 'Berechnet die Anzahl der Tropennächte des Jahres, db_addon_params: (year=optional)'}, + 'sommertage': {'cat': 'summe', 'sub_cat': 'kenntage', 'item_type': 'num', 'cycle': 'daily', 'params': True, 'description': 'Berechnet die Anzahl der Sommertage des Jahres, db_addon_params: (year=optional)'}, + 'heiztage': {'cat': 'summe', 'sub_cat': 'kenntage', 'item_type': 'num', 'cycle': 'daily', 'params': True, 'description': 'Berechnet die Anzahl der Heiztage des Jahres, db_addon_params: (year=optional)'}, + 'vegetationstage': {'cat': 'summe', 'sub_cat': 'kenntage', 'item_type': 'num', 'cycle': 'daily', 'params': True, 'description': 'Berechnet die Anzahl der Vegatationstage des Jahres, db_addon_params: (year=optional)'}, + 'frosttage': {'cat': 'summe', 'sub_cat': 'kenntage', 'item_type': 'num', 'cycle': 'daily', 'params': True, 'description': 'Berechnet die Anzahl der Frosttage des Jahres, db_addon_params: (year=optional)'}, + 'eistage': {'cat': 'summe', 'sub_cat': 'kenntage', 'item_type': 'num', 'cycle': 'daily', 'params': True, 'description': 'Berechnet die Anzahl der Eistage des Jahres, db_addon_params: (year=optional)'}, + 'tagesmitteltemperatur': {'cat': 'complex', 'sub_cat': None, 'item_type': 'list', 'cycle': 'daily', 'params': True, 'description': 'Berechnet die Tagesmitteltemperatur auf Basis der stündlichen Durchschnittswerte eines Tages für die angegebene Anzahl von Tagen (timeframe=day, count=integer)'}, + 'db_request': {'cat': 'complex', 'sub_cat': None, 'item_type': 'list', 'cycle': 'group', 'params': True, 'description': 'Abfrage der DB: db_addon_params: (func=mandatory, item=mandatory, timespan=mandatory, start=optional, end=optional, count=optional, group=optional, group2=optional)'}, + 'minmax': {'cat': 'complex', 'sub_cat': None, 'item_type': 'num', 'cycle': 'timeframe', 'params': True, 'description': 'Berechnet einen min/max/avg Wert für einen bestimmen Zeitraum: db_addon_params: (func=mandatory, timeframe=mandatory, start=mandatory)'}, + 'minmax_last': {'cat': 'complex', 'sub_cat': None, 'item_type': 'num', 'cycle': 'timeframe', 'params': True, 'description': 'Berechnet einen min/max/avg Wert für ein bestimmtes Zeitfenster von jetzt zurück: db_addon_params: (func=mandatory, timeframe=mandatory, start=mandatory, end=mandatory)'}, + 'verbrauch': {'cat': 'complex', 'sub_cat': None, 'item_type': 'num', 'cycle': 'timeframe', 'params': True, 'description': 'Berechnet einen Verbrauchswert für einen bestimmen Zeitraum: db_addon_params: (timeframe=mandatory, start=mandatory end=mandatory)'}, + 'zaehlerstand': {'cat': 'complex', 'sub_cat': None, 'item_type': 'num', 'cycle': 'timeframe', 'params': True, 'description': 'Berechnet einen Zählerstand für einen bestimmen Zeitpunkt: db_addon_params: (timeframe=mandatory, start=mandatory)'}, }, 'db_addon_info': { - 'db_version': {'cat': 'info', 'item_type': 'str', 'calc': 'no', 'params': False, 'description': 'Version der verbundenen Datenbank'}, - }, - 'db_addon_admin': { - 'suspend': {'cat': 'admin', 'item_type': 'bool', 'calc': 'no', 'params': False, 'description': 'Unterbricht die Aktivitäten des Plugin'}, - 'recalc_all': {'cat': 'admin', 'item_type': 'bool', 'calc': 'no', 'params': False, 'description': 'Startet einen Neuberechnungslauf aller on-demand Items'}, - 'clean_cache_values': {'cat': 'admin', 'item_type': 'bool', 'calc': 'no', 'params': False, 'description': 'Löscht Plugin-Cache und damit alle im Plugin zwischengespeicherten Werte'}, + 'db_version': {'cat': 'info', 'item_type': 'str', 'cycle': None, 'params': False, 'description': 'Version der verbundenen Datenbank'}, + }, + 'db_addon_admin': { + 'suspend': {'cat': 'admin', 'item_type': 'bool', 'cycle': None, 'params': False, 'description': 'Unterbricht die Aktivitäten des Plugin'}, + 'recalc_all': {'cat': 'admin', 'item_type': 'bool', 'cycle': None, 'params': False, 'description': 'Startet einen Neuberechnungslauf aller on-demand Items'}, + 'clean_cache_values': {'cat': 'admin', 'item_type': 'bool', 'cycle': None, 'params': False, 'description': 'Löscht Plugin-Cache und damit alle im Plugin zwischengespeicherten Werte'}, }, } diff --git a/db_addon/plugin.yaml b/db_addon/plugin.yaml index ed258d805..53a0f836a 100644 --- a/db_addon/plugin.yaml +++ b/db_addon/plugin.yaml @@ -11,8 +11,8 @@ plugin: # keywords: iot xyz # documentation: https://github.com/smarthomeNG/smarthome/wiki/CLI-Plugin # url of documentation (wiki) page support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1848494-support-thread-databaseaddon-plugin - version: 1.2.8 # Plugin version (must match the version specified in __init__.py) - sh_minversion: '1.9.3.5' # minimum shNG version to use this plugin + version: 1.2.9 # Plugin version (must match the version specified in __init__.py) + sh_minversion: 1.9.3.5 # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) py_minversion: '3.8' # minimum Python version to use for this plugin # py_maxversion: # maximum Python version to use for this plugin (leave empty if latest) diff --git a/enocean/__init__.py b/enocean/__init__.py index dcbfb629d..2a0f9c241 100755 --- a/enocean/__init__.py +++ b/enocean/__init__.py @@ -21,172 +21,45 @@ ######################################################################### import serial -import os -import sys -import struct -import time +import logging import threading -from lib.item import Items #what for? -from . import eep_parser -from . import prepare_packet_data -from lib.model.smartplugin import * +from time import sleep + +from lib.model.smartplugin import SmartPlugin +from lib.item import Items + +from .protocol import CRC +from .protocol.eep_parser import EEP_Parser +from .protocol.packet_data import Packet_Data +from .protocol.constants import ( + PACKET, PACKET_TYPE, COMMON_COMMAND, SMART_ACK, EVENT, RETURN_CODE, RORG +) from .webif import WebInterface -FCSTAB = [ - 0x00, 0x07, 0x0e, 0x09, 0x1c, 0x1b, 0x12, 0x15, - 0x38, 0x3f, 0x36, 0x31, 0x24, 0x23, 0x2a, 0x2d, - 0x70, 0x77, 0x7e, 0x79, 0x6c, 0x6b, 0x62, 0x65, - 0x48, 0x4f, 0x46, 0x41, 0x54, 0x53, 0x5a, 0x5d, - 0xe0, 0xe7, 0xee, 0xe9, 0xfc, 0xfb, 0xf2, 0xf5, - 0xd8, 0xdf, 0xd6, 0xd1, 0xc4, 0xc3, 0xca, 0xcd, - 0x90, 0x97, 0x9e, 0x99, 0x8c, 0x8b, 0x82, 0x85, - 0xa8, 0xaf, 0xa6, 0xa1, 0xb4, 0xb3, 0xba, 0xbd, - 0xc7, 0xc0, 0xc9, 0xce, 0xdb, 0xdc, 0xd5, 0xd2, - 0xff, 0xf8, 0xf1, 0xf6, 0xe3, 0xe4, 0xed, 0xea, - 0xb7, 0xb0, 0xb9, 0xbe, 0xab, 0xac, 0xa5, 0xa2, - 0x8f, 0x88, 0x81, 0x86, 0x93, 0x94, 0x9d, 0x9a, - 0x27, 0x20, 0x29, 0x2e, 0x3b, 0x3c, 0x35, 0x32, - 0x1f, 0x18, 0x11, 0x16, 0x03, 0x04, 0x0d, 0x0a, - 0x57, 0x50, 0x59, 0x5e, 0x4b, 0x4c, 0x45, 0x42, - 0x6f, 0x68, 0x61, 0x66, 0x73, 0x74, 0x7d, 0x7a, - 0x89, 0x8e, 0x87, 0x80, 0x95, 0x92, 0x9b, 0x9c, - 0xb1, 0xb6, 0xbf, 0xb8, 0xad, 0xaa, 0xa3, 0xa4, - 0xf9, 0xfe, 0xf7, 0xf0, 0xe5, 0xe2, 0xeb, 0xec, - 0xc1, 0xc6, 0xcf, 0xc8, 0xdd, 0xda, 0xd3, 0xd4, - 0x69, 0x6e, 0x67, 0x60, 0x75, 0x72, 0x7b, 0x7c, - 0x51, 0x56, 0x5f, 0x58, 0x4d, 0x4a, 0x43, 0x44, - 0x19, 0x1e, 0x17, 0x10, 0x05, 0x02, 0x0b, 0x0c, - 0x21, 0x26, 0x2f, 0x28, 0x3d, 0x3a, 0x33, 0x34, - 0x4e, 0x49, 0x40, 0x47, 0x52, 0x55, 0x5c, 0x5b, - 0x76, 0x71, 0x78, 0x7f, 0x6A, 0x6d, 0x64, 0x63, - 0x3e, 0x39, 0x30, 0x37, 0x22, 0x25, 0x2c, 0x2b, - 0x06, 0x01, 0x08, 0x0f, 0x1a, 0x1d, 0x14, 0x13, - 0xae, 0xa9, 0xa0, 0xa7, 0xb2, 0xb5, 0xbc, 0xbb, - 0x96, 0x91, 0x98, 0x9f, 0x8a, 0x8D, 0x84, 0x83, - 0xde, 0xd9, 0xd0, 0xd7, 0xc2, 0xc5, 0xcc, 0xcb, - 0xe6, 0xe1, 0xe8, 0xef, 0xfa, 0xfd, 0xf4, 0xf3 - ] - -################################ -### --- Packet Sync Byte --- ### -################################ -PACKET_SYNC_BYTE = 0x55 # PACKET SYNC BYTE - - -############################ -### --- Packet Types --- ### -############################ - -PACKET_TYPE_RADIO = 0x01 # RADIO ERP1 -PACKET_TYPE_RESPONSE = 0x02 # RESPONSE -PACKET_TYPE_RADIO_SUB_TEL = 0x03 # RADIO_SUB_TEL -PACKET_TYPE_EVENT = 0x04 # EVENT -PACKET_TYPE_COMMON_COMMAND = 0x05 # COMMON COMMAND -PACKET_TYPE_SMART_ACK_COMMAND = 0x06 # SMART ACK COMMAND -PACKET_REMOTE_MAN_COMMAND = 0x07 # REMOTE MANAGEMENT COMMAND -PACKET_TYPE_RADIO_MESSAGE = 0x09 # RADIO MESSAGE -PACKET_TYPE_RADIO_ERP2 = 0x0A # RADIO ERP2 -PACKET_TYPE_RADIO_802_15_4 = 0x10 # RADIO_802_15_4 -PACKET_TYPE_COMMAND_2_4 = 0x11 # COMMAND_2_4 - - -############################################ -### --- List of Common Command Codes --- ### -############################################ - -CO_WR_SLEEP = 0x01 # Order to enter in energy saving mode -CO_WR_RESET = 0x02 # Order to reset the device -CO_RD_VERSION = 0x03 # Read the device (SW) version /(HW) version, chip ID etc. -CO_RD_SYS_LOG = 0x04 # Read system log from device databank -CO_WR_SYS_LOG = 0x05 # Reset System log from device databank -CO_WR_BIST = 0x06 # Perform built in self test -CO_WR_IDBASE = 0x07 # Write ID range base number -CO_RD_IDBASE = 0x08 # Read ID range base number -CO_WR_REPEATER = 0x09 # Write Repeater Level off,1,2 -CO_RD_REPEATER = 0x0A # Read Repeater Level off,1,2 -CO_WR_FILTER_ADD = 0x0B # Add filter to filter list -CO_WR_FILTER_DEL = 0x0C # Delete filter from filter list -CO_WR_FILTER_DEL_ALL = 0x0D # Delete all filter -CO_WR_FILTER_ENABLE = 0x0E # Enable/Disable supplied filters -CO_RD_FILTER = 0x0F # Read supplied filters -CO_WR_WAIT_MATURITY = 0x10 # Waiting till end of maturity time before received radio telegrams will transmitted -CO_WR_SUBTEL = 0x11 # Enable/Disable transmitting additional subtelegram info -CO_WR_MEM = 0x12 # Write x bytes of the Flash, XRAM, RAM0 … -CO_RD_MEM = 0x13 # Read x bytes of the Flash, XRAM, RAM0 …. -CO_RD_MEM_ADDRESS = 0x14 # Feedback about the used address and length of the configarea and the Smart Ack Table -CO_RD_SECURITY = 0x15 # Read own security information (level, key) -CO_WR_SECURITY = 0x16 # Write own security information (level, key) -CO_WR_LEARNMODE = 0x17 # Function: Enables or disables learn mode of Controller. -CO_RD_LEARNMODE = 0x18 # Function: Reads the learn-mode state of Controller. -CO_WR_SECUREDEVICE_ADD = 0x19 # Add a secure device -CO_WR_SECUREDEVICE_DEL = 0x1A # Delete a secure device -CO_RD_SECUREDEVICE_BY_INDEX = 0x1B # Read secure device by index -CO_WR_MODE = 0x1C # Sets the gateway transceiver mode -CO_RD_NUMSECUREDEVICES = 0x1D # Read number of taught in secure devices -CO_RD_SECUREDEVICE_BY_ID = 0x1E # Read secure device by ID -CO_WR_SECUREDEVICE_ADD_PSK = 0x1F # Add Pre-shared key for inbound secure device -CO_WR_SECUREDEVICE_SENDTEACHIN = 0x20 # Send secure Teach-In message -CO_WR_TEMPORARY_RLC_WINDOW = 0x21 # Set the temporary rolling-code window for every taught-in devic -CO_RD_SECUREDEVICE_PSK = 0x22 # Read PSK -CO_RD_DUTYCYCLE_LIMIT = 0x23 # Read parameters of actual duty cycle limit -CO_SET_BAUDRATE = 0x24 # Modifies the baud rate of the EnOcean device -CO_GET_FREQUENCY_INFO = 0x25 # Reads Frequency and protocol of the Device -CO_GET_STEPCODE = 0x27 # Reads Hardware Step code and Revision of the Device - - -################################### -### --- List of Event Codes --- ### -################################### - -SA_RECLAIM_NOT_SUCCESSFUL = 0x01 # Informs the backbone of a Smart Ack Client to not successful reclaim. -SA_CONFIRM_LEARN = 0x02 # Used for SMACK to confirm/discard learn in/out -SA_LEARN_ACK = 0x03 # Inform backbone about result of learn request -CO_READY = 0x04 # Inform backbone about the readiness for operation -CO_EVENT_SECUREDEVICES = 0x05 # Informs about a secure device -CO_DUTYCYCLE_LIMIT = 0x06 # Informs about duty cycle limit -CO_TRANSMIT_FAILED = 0x07 # Informs that the device was not able to send a telegram. - - -########################################### -### --- Smart Acknowledge Defines: --- ### -########################################### - -SA_WR_LEARNMODE = 0x01 # Set/Reset Smart Ack learn mode -SA_RD_LEARNMODE = 0x02 # Get Smart Ack learn mode state -SA_WR_LEARNCONFIRM = 0x03 # Used for Smart Ack to add or delete a mailbox of a client -SA_WR_CLIENTLEARNRQ = 0x04 # Send Smart Ack Learn request (Client) -SA_WR_RESET = 0x05 # Send reset command to a Smart Ack client -SA_RD_LEARNEDCLIENTS = 0x06 # Get Smart Ack learned sensors / mailboxes -SA_WR_RECLAIMS = 0x07 # Set number of reclaim attempts -SA_WR_POSTMASTER = 0x08 # Activate/Deactivate Post master functionality - -SENT_RADIO_PACKET = 0xFF -SENT_ENCAPSULATED_RADIO_PACKET = 0xA6 class EnOcean(SmartPlugin): ALLOW_MULTIINSTANCE = False - PLUGIN_VERSION = "1.4.0" + PLUGIN_VERSION = "1.4.2" - def __init__(self, sh): - """ - Initializes the plugin. - - """ + """ Initializes the plugin. """ # Call init code of parent class (SmartPlugin) super().__init__() - self._sh = sh self.port = self.get_parameter_value("serialport") + self._log_unknown_msg = self.get_parameter_value("log_unknown_messages") + self._connect_retries = self.get_parameter_value("retry") + self._retry_cycle = self.get_parameter_value("retry_cycle") tx_id = self.get_parameter_value("tx_id") - if (len(tx_id) < 8): + if len(tx_id) < 8: self.tx_id = 0 self.logger.warning('No valid enocean stick ID configured. Transmitting is not supported') else: self.tx_id = int(tx_id, 16) self.logger.info(f"Stick TX ID configured via plugin.conf to: {tx_id}") - self._log_unknown_msg = self.get_parameter_value("log_unknown_messages") +# + self._items = [] self._tcm = None self._cmd_lock = threading.Lock() self._response_lock = threading.Condition() @@ -196,421 +69,168 @@ def __init__(self, sh): self.UTE_listen = False self.unknown_sender_id = 'None' self._block_ext_out_msg = False - # call init of eep_parser - self.eep_parser = eep_parser.EEP_Parser() - # call init of prepare_packet_data - self.prepare_packet_data = prepare_packet_data.Prepare_Packet_Data(self) - - self.init_webinterface(WebInterface) - - - def eval_telegram(self, sender_id, data, opt): - logger_debug = self.logger.isEnabledFor(logging.DEBUG) - if logger_debug: - self.logger.debug("Call function << eval_telegram >>") - for item in self._items: - # validate id for item id: - if item.conf['enocean_id'] == sender_id: - #print ("validated {0} for {1}".format(sender_id,item)) - #print ("try to get value for: {0} and {1}".format(item.conf['enocean_rorg'][0],item.conf['enocean_rorg'][1])) - rorg = item.conf['enocean_rorg'] - eval_value = item.conf['enocean_value'] - if rorg in RADIO_PAYLOAD_VALUE: # check if RORG exists - pl = eval(RADIO_PAYLOAD_VALUE[rorg]['payload_idx']) - #could be nicer - for entity in RADIO_PAYLOAD_VALUE: - if (rorg == entity) and (eval_value in RADIO_PAYLOAD_VALUE[rorg]['entities']): - value_dict = RADIO_PAYLOAD_VALUE[rorg]['entities'] - value = eval(RADIO_PAYLOAD_VALUE[rorg]['entities'][eval_value]) - if logger_debug: - self.logger.debug(f"Resulting value: {value} for {item}") - if value: # not sure about this - item(value, self.get_shortname(), 'RADIO') - - def _process_packet_type_event(self, data, optional): - logger_debug = self.logger.isEnabledFor(logging.DEBUG) - if logger_debug: - self.logger.debug("call function << _process_packet_type_event >>") - event_code = data[0] - if(event_code == SA_RECLAIM_NOT_SUCCESSFUL): - self.logger.error("SA reclaim was not successful") - elif(event_code == SA_CONFIRM_LEARN): - self.logger.info("Requesting how to handle confirm/discard learn in/out") - elif(event_code == SA_LEARN_ACK): - self.logger.info("SA lern acknowledged") - elif(event_code == CO_READY): - self.logger.info("Controller is ready for operation") - elif(event_code == CO_TRANSMIT_FAILED): - self.logger.error("Telegram transmission failed") - elif(event_code == CO_DUTYCYCLE_LIMIT): - self.logger.warning("Duty cycle limit reached") - elif(event_code == CO_EVENT_SECUREDEVICES): - self.logger.info("Secure device event packet received") - else: - self.logger.warning("Unknown event packet received") - - def _rocker_sequence(self, item, sender_id, sequence): - logger_debug = self.logger.isEnabledFor(logging.DEBUG) - if logger_debug: - self.logger.debug("Call function << _rocker_sequence >>") - try: - for step in sequence: - event, relation, delay = step.split() - #self.logger.debug("waiting for {} {} {}".format(event, relation, delay)) - if item._enocean_rs_events[event.upper()].wait(float(delay)) != (relation.upper() == "WITHIN"): - if logger_debug: - self.logger.debug(f"NOT {step} - aborting sequence!") - return - else: - if logger_debug: - self.logger.debug(f"{step}") - item._enocean_rs_events[event.upper()].clear() - continue - value = True - if 'enocean_rocker_action' in item.conf: - if item.conf['enocean_rocker_action'].upper() == "UNSET": - value = False - elif item.conf['enocean_rocker_action'].upper() == "TOGGLE": - value = not item() - item(value, self.get_shortname(), "{:08X}".format(sender_id)) - except Exception as e: - self.logger.error(f'Error handling enocean_rocker_sequence \"{sequence}\" - {e}'.format(sequence, e)) - - def _process_packet_type_radio(self, data, optional): - logger_debug = self.logger.isEnabledFor(logging.DEBUG) - if logger_debug: - self.logger.debug("Call function << _process_packet_type_radio >>") - #self.logger.warning("Processing radio message with data = [{}] / optional = [{}]".format(', '.join(['0x%02x' % b for b in data]), ', '.join(['0x%02x' % b for b in optional]))) - - choice = data[0] - payload = data[1:-5] - sender_id = int.from_bytes(data[-5:-1], byteorder='big', signed=False) - status = data[-1] - repeater_cnt = status & 0x0F - self.logger.info("Radio message: choice = {:02x} / payload = [{}] / sender_id = {:08X} / status = {} / repeat = {}".format(choice, ', '.join(['0x%02x' % b for b in payload]), sender_id, status, repeater_cnt)) - - if (len(optional) == 7): - subtelnum = optional[0] - dest_id = int.from_bytes(optional[1:5], byteorder='big', signed=False) - dBm = -optional[5] - SecurityLevel = optional[6] - if logger_debug: - self.logger.debug("Radio message with additional info: subtelnum = {} / dest_id = {:08X} / signal = {}dBm / SecurityLevel = {}".format(subtelnum, dest_id, dBm, SecurityLevel)) - if (choice == 0xD4) and (self.UTE_listen == True): - self.logger.info("Call send_UTE_response") - self._send_UTE_response(data, optional) - if sender_id in self._rx_items: - if logger_debug: - self.logger.debug("Sender ID found in item list") - # iterate over all eep known for this id and get list of associated items - for eep,items in self._rx_items[sender_id].items(): - # check if choice matches first byte in eep (this seems to be the only way to find right eep for this particular packet) - if eep.startswith("{:02X}".format(choice)): - # call parser for particular eep - returns dictionary with key-value pairs - results = self.eep_parser.Parse(eep, payload, status) - if logger_debug: - self.logger.debug(f"Radio message results = {results}") - if 'DEBUG' in results: - self.logger.warning("DEBUG Info: processing radio message with data = [{}] / optional = [{}]".format(', '.join(['0x%02x' % b for b in data]), ', '.join(['0x%02x' % b for b in optional]))) - self.logger.warning(f"Radio message results = {results}") - self.logger.warning("Radio message: choice = {:02x} / payload = [{}] / sender_id = {:08X} / status = {} / repeat = {}".format(choice, ', '.join(['0x%02x' % b for b in payload]), sender_id, status, repeater_cnt)) + self.log_for_debug = self.logger.isEnabledFor(logging.DEBUG) - for item in items: - rx_key = item.conf['enocean_rx_key'].upper() - if rx_key in results: - if 'enocean_rocker_sequence' in item.conf: - try: - if hasattr(item, '_enocean_rs_thread') and item._enocean_rs_thread.is_alive(): - if results[rx_key]: - if logger_debug: - self.logger.debug("Sending pressed event") - item._enocean_rs_events["PRESSED"].set() - else: - if logger_debug: - self.logger.debug("Sending released event") - item._enocean_rs_events["RELEASED"].set() - elif results[rx_key]: - item._enocean_rs_events = {'PRESSED': threading.Event(), 'RELEASED': threading.Event()} - item._enocean_rs_thread = threading.Thread(target=self._rocker_sequence, name="enocean-rs", args=(item, sender_id, item.conf['enocean_rocker_sequence'].split(','), )) - #self.logger.info("starting enocean_rocker_sequence thread") - item._enocean_rs_thread.daemon = True - item._enocean_rs_thread.start() - except Exception as e: - self.logger.error(f"Error handling enocean_rocker_sequence: {e}") - else: - item(results[rx_key], self.get_shortname(), "{:08X}".format(sender_id)) - elif (sender_id <= self.tx_id + 127) and (sender_id >= self.tx_id): - if logger_debug: - self.logger.debug("Received repeated enocean stick message") - else: - self.unknown_sender_id = "{:08X}".format(sender_id) - if self._log_unknown_msg: - self.logger.info("Unknown ID = {:08X}".format(sender_id)) - self.logger.warning("Unknown device sent radio message: choice = {:02x} / payload = [{}] / sender_id = {:08X} / status = {} / repeat = {}".format(choice, ', '.join(['0x%02x' % b for b in payload]), sender_id, status, repeater_cnt)) - - - def _process_packet_type_smart_ack_command(self, data, optional): - self.logger.warning("Smart acknowledge command 0x06 received but not supported at the moment") + # init eep_parser + self.eep_parser = EEP_Parser(self.logger) + # init prepare_packet_data + self.prepare_packet_data = Packet_Data(self) - def _process_packet_type_response(self, data, optional): - logger_debug = self.logger.isEnabledFor(logging.DEBUG) - if logger_debug: - self.logger.debug("Call function << _process_packet_type_response >>") - RETURN_CODES = ['OK', 'ERROR', 'NOT SUPPORTED', 'WRONG PARAM', 'OPERATION DENIED'] - if (self._last_cmd_code == SENT_RADIO_PACKET) and (len(data) == 1): - if logger_debug: - self.logger.debug(f"Sending command returned code = {RETURN_CODES[data[0]]}") - elif (self._last_packet_type == PACKET_TYPE_COMMON_COMMAND) and (self._last_cmd_code == CO_WR_RESET) and (len(data) == 1): - self.logger.info(f"Reset returned code = {RETURN_CODES[data[0]]}") - elif (self._last_packet_type == PACKET_TYPE_COMMON_COMMAND) and (self._last_cmd_code == CO_WR_LEARNMODE) and (len(data) == 1): - self.logger.info(f"Write LearnMode returned code = {RETURN_CODES[data[0]]}") - elif (self._last_packet_type == PACKET_TYPE_COMMON_COMMAND) and (self._last_cmd_code == CO_RD_VERSION): - if (data[0] == 0) and (len(data) == 33): - self.logger.info("Chip ID = 0x{} / Chip Version = 0x{}".format(''.join(['%02x' % b for b in data[9:13]]), ''.join(['%02x' % b for b in data[13:17]]))) - self.logger.info("APP version = {} / API version = {} / App description = {}".format('.'.join(['%d' % b for b in data[1:5]]), '.'.join(['%d' % b for b in data[5:9]]), ''.join(['%c' % b for b in data[17:33]]))) - elif (data[0] == 0) and (len(data) == 0): - self.logger.error("Reading version: No answer") - else: - self.logger.error(f"Reading version returned code = {RETURN_CODES[data[0]]}, length = {len(data)}") - elif (self._last_packet_type == PACKET_TYPE_COMMON_COMMAND) and (self._last_cmd_code == CO_RD_IDBASE): - if (data[0] == 0) and (len(data) == 5): - self.logger.info("Base ID = 0x{}".format(''.join(['%02x' % b for b in data[1:5]]))) - if (self.tx_id == 0): - self.tx_id = int.from_bytes(data[1:5], byteorder='big', signed=False) - self.logger.info("Transmit ID set set automatically by reading chips BaseID") - if (len(optional) == 1): - self.logger.info(f"Remaining write cycles for Base ID = {optional[0]}") - elif (data[0] == 0) and (len(data) == 0): - self.logger.error("Reading Base ID: No answer") - else: - self.logger.error(f"Reading Base ID returned code = {RETURN_CODES[data[0]]} and {len(data)} bytes") - elif (self._last_packet_type == PACKET_TYPE_COMMON_COMMAND) and (self._last_cmd_code == CO_WR_BIST): - if (data[0] == 0) and (len(data) == 2): - if (data[1] == 0): - self.logger.info("Built in self test result: All OK") - else: - self.logger.info(f"Built in self test result: Problem, code = {data[1]}") - elif (data[0] == 0) and (len(data) == 0): - self.logger.error("Doing built in self test: No answer") - else: - self.logger.error(f"Doing built in self test returned code = {RETURN_CODES[data[0]]}") - elif (self._last_packet_type == PACKET_TYPE_COMMON_COMMAND) and (self._last_cmd_code == CO_RD_LEARNMODE): - if (data[0] == 0) and (len(data) == 2): - self.logger.info("Reading LearnMode = 0x{}".format(''.join(['%02x' % b for b in data[1]]))) - if (len(optional) == 1): - self.logger.info("Learn channel = {}".format(optional[0])) - elif (data[0] == 0) and (len(data) == 0): - self.logger.error("Reading LearnMode: No answer") - elif (self._last_packet_type == PACKET_TYPE_COMMON_COMMAND) and (self._last_cmd_code == CO_RD_NUMSECUREDEVICES): - if (data[0] == 0) and (len(data) == 2): - self.logger.info("Number of taught in devices = 0x{}".format(''.join(['%02x' % b for b in data[1]]))) - elif (data[0] == 0) and (len(data) == 0): - self.logger.error("Reading NUMSECUREDEVICES: No answer") - elif (data[0] == 2) and (len(data) == 1): - self.logger.error("Reading NUMSECUREDEVICES: Command not supported") - else: - self.logger.error("Reading NUMSECUREDEVICES: Unknown error") - elif (self._last_packet_type == PACKET_TYPE_SMART_ACK_COMMAND) and (self._last_cmd_code == SA_WR_LEARNMODE): - self.logger.info(f"Setting SmartAck mode returned code = {RETURN_CODES[data[0]]}") - elif (self._last_packet_type == PACKET_TYPE_SMART_ACK_COMMAND) and (self._last_cmd_code == SA_RD_LEARNEDCLIENTS): - if (data[0] == 0): - self.logger.info(f"Number of smart acknowledge mailboxes = {int((len(data)-1)/9)}") - else: - self.logger.error(f"Requesting SmartAck mailboxes returned code = {RETURN_CODES[data[0]]}") - else: - self.logger.error("Processing unexpected response with return code = {} / data = [{}] / optional = [{}]".format(RETURN_CODES[data[0]], ', '.join(['0x%02x' % b for b in data]), ', '.join(['0x%02x' % b for b in optional]))) - self._response_lock.acquire() - self._response_lock.notify() - self._response_lock.release() + # init crc parser + self.crc = CRC() - def _startup(self): - self.logger.debug("Call function << _startup >>") - # request one time information - self.logger.info("Resetting device") - self._send_common_command(CO_WR_RESET) - self.logger.info("Requesting id-base") - self._send_common_command(CO_RD_IDBASE) - self.logger.info("Requesting version information") - self._send_common_command(CO_RD_VERSION) - self.logger.debug("Ending connect-thread") + self.init_webinterface(WebInterface) def run(self): - logger_debug = self.logger.isEnabledFor(logging.DEBUG) - if logger_debug: - self.logger.debug("Call function << run >>") + if self.log_for_debug: + self.logger.debug("Run method called") + self.alive = True self.UTE_listen = False - - # open serial or serial2TCP device: - try: - self._tcm = serial.serial_for_url(self.port, 57600, timeout=1.5) - except Exception as e: - self._tcm = None - self._init_complete = False - self.logger.error(f"Exception occurred during serial open: {e}") - return - else: - self.logger.info(f"Serial port successfully opened at port {self.port}") - - t = threading.Thread(target=self._startup, name="enocean-startup") - # if you need to create child threads, do not make them daemon = True! - # They will not shutdown properly. (It's a python bug) - t.daemon = False - t.start() msg = [] + while self.alive: + + # just try connecting anytime the serial object is not initialized + connect_count = 0 + while self._tcm is None and self.alive: + if self._connect_retries > 0 and connect_count >= self._connect_retries: + self.alive = False + break + if not self.connect(): + connect_count += 1 + self.logger.info(f'connecting failed {connect_count} times. Retrying after 5 seconds...') + sleep(self._retry_cycle) + + # main loop, read from device + readin = None try: readin = self._tcm.read(1000) except Exception as e: - self.logger.error(f"Exception during tcm read occurred: {e}") - break - else: - if readin: - msg += readin - if logger_debug: - self.logger.debug("Data received") - # check if header is complete (6bytes including sync) - # 0x55 (SYNC) + 4bytes (HEADER) + 1byte(HEADER-CRC) - while (len(msg) >= 6): - #check header for CRC - if (msg[0] == PACKET_SYNC_BYTE) and (self._calc_crc8(msg[1:5]) == msg[5]): - # header bytes: sync; length of data (2); optional length; packet type; crc - data_length = (msg[1] << 8) + msg[2] - opt_length = msg[3] - packet_type = msg[4] - msg_length = data_length + opt_length + 7 - if logger_debug: - self.logger.debug("Received header with data_length = {} / opt_length = 0x{:02x} / type = {}".format(data_length, opt_length, packet_type)) - - # break if msg is not yet complete: - if (len(msg) < msg_length): - break - - # msg complete - if (self._calc_crc8(msg[6:msg_length - 1]) == msg[msg_length - 1]): - if logger_debug: - self.logger.debug("Accepted package with type = 0x{:02x} / len = {} / data = [{}]!".format(packet_type, msg_length, ', '.join(['0x%02x' % b for b in msg]))) - data = msg[6:msg_length - (opt_length + 1)] - optional = msg[(6 + data_length):msg_length - 1] - if (packet_type == PACKET_TYPE_RADIO): - self._process_packet_type_radio(data, optional) - elif (packet_type == PACKET_TYPE_SMART_ACK_COMMAND): - self._process_packet_type_smart_ack_command(data, optional) - elif (packet_type == PACKET_TYPE_RESPONSE): - self._process_packet_type_response(data, optional) - elif (packet_type == PACKET_TYPE_EVENT): - self._process_packet_type_event(data, optional) - else: - self.logger.error("Received packet with unknown type = 0x{:02x} - len = {} / data = [{}]".format(packet_type, msg_length, ', '.join(['0x%02x' % b for b in msg]))) + if self.alive: + self.logger.error(f"Exception during tcm read occurred: {e}") + # reset serial device + try: + self._tcm.close() + except Exception: + pass + self._tcm = None + continue + + if readin: + msg += readin + if self.log_for_debug: + self.logger.debug(f"Data received: {readin}") + # check if header is complete (6bytes including sync) + # 0x55 (SYNC) + 4bytes (HEADER) + 1byte(HEADER-CRC) + while len(msg) >= 6: + # check header for CRC + if msg[0] == PACKET.SYNC_BYTE and msg[5] == self.crc(msg[1:5]): + # header bytes: sync; length of data (2); optional length; packet type; crc + data_length = (msg[1] << 8) + msg[2] + opt_length = msg[3] + packet_type = msg[4] + msg_length = data_length + opt_length + 7 + if self.log_for_debug: + self.logger.debug(f"Received header with data_length = {data_length} / opt_length = 0x{opt_length:02x} / type = {packet_type}") + + # break if msg is not yet complete: + if len(msg) < msg_length: + break + + # msg complete + if self.crc(msg[6:msg_length - 1]) == msg[msg_length - 1]: + if self.log_for_debug: + self.logger.debug("Accepted package with type = 0x{:02x} / len = {} / data = [{}]!".format(packet_type, msg_length, ', '.join(['0x%02x' % b for b in msg]))) + data = msg[6:msg_length - (opt_length + 1)] + optional = msg[data_length + 6:msg_length - 1] + if packet_type == PACKET_TYPE.RADIO: + self._process_packet_type_radio(data, optional) + elif packet_type == PACKET_TYPE.SMART_ACK_COMMAND: + self._process_packet_type_smart_ack_command(data, optional) + elif packet_type == PACKET_TYPE.RESPONSE: + self._process_packet_type_response(data, optional) + elif packet_type == PACKET_TYPE.EVENT: + self._process_packet_type_event(data, optional) else: - self.logger.error("Crc error - dumping packet with type = 0x{:02x} / len = {} / data = [{}]!".format(packet_type, msg_length, ', '.join(['0x%02x' % b for b in msg]))) - msg = msg[msg_length:] + self.logger.error("Received packet with unknown type = 0x{:02x} - len = {} / data = [{}]".format(packet_type, msg_length, ', '.join(['0x%02x' % b for b in msg]))) else: - #self.logger.warning("Consuming [0x{:02x}] from input buffer!".format(msg[0])) - msg.pop(0) - try: - self._tcm.close() - except Exception as e: - self.logger.error(f"Exception during tcm close occured: {e}") - else: - self.logger.info(f"Enocean serial device closed") - self.logger.info("Run method stopped") - - def stop(self): - self.logger.debug("Call function << stop >>") - self.alive = False - - def get_tx_id_as_hex(self): - hexstring = "{:08X}".format(self.tx_id) - return hexstring - - def get_serial_status_as_string(self): - if (self._tcm and self._tcm.is_open): - return "open" - else: - return "not connected" - - def get_log_unknown_msg(self): - return self._log_unknown_msg + self.logger.error("Crc error - dumping packet with type = 0x{:02x} / len = {} / data = [{}]!".format(packet_type, msg_length, ', '.join(['0x%02x' % b for b in msg]))) + msg = msg[msg_length:] + else: + # self.logger.warning("Consuming [0x{:02x}] from input buffer!".format(msg[0])) + msg.pop(0) - def toggle_log_unknown_msg(self): - self._log_unknown_msg = not self._log_unknown_msg - - def _send_UTE_response(self, data, optional): - self.logger.debug("Call function << _send_UTE_response >>") - choice = data[0] - payload = data[1:-5] - #sender_id = int.from_bytes(data[-5:-1], byteorder='big', signed=False) - #status = data[-1] - #repeater_cnt = status & 0x0F - SubTel = 0x03 - db = 0xFF - Secu = 0x0 + # self.alive is False or connect error caused loop exit + self.stop() - self._send_radio_packet(self.learn_id, choice, [0x91, payload[1], payload[2], payload[3], payload[4], payload[5], payload[6]],[SubTel, data[-5], data[-4], data[-3], data[-2], db, Secu] )#payload[0] = 0x91: EEP Teach-in response, Request accepted, teach-in successful, bidirectional - self.UTE_listen = False - self.logger.info("Sending UTE response and end listening") + def stop(self): + self.logger.debug("Stop method called") + self.alive = False + self.disconnect() def parse_item(self, item): - self.logger.debug("Call function << parse_item >>") + self.logger.debug("parse_item method called") if 'enocean_rx_key' in item.conf: # look for info from the most specific info to the broadest (key->eep->id) - one id might use multiple eep might define multiple keys eep_item = item found_eep = True - while (not 'enocean_rx_eep' in eep_item.conf): + while 'enocean_rx_eep' not in eep_item.conf: eep_item = eep_item.return_parent() - if (eep_item is self._sh): + if eep_item is Items.get_instance(): self.logger.error(f"Could not find enocean_rx_eep for item {item}") found_eep = False + break id_item = eep_item found_rx_id = True - while (not 'enocean_rx_id' in id_item.conf): + while 'enocean_rx_id' not in id_item.conf: id_item = id_item.return_parent() - if (id_item is self._sh): + if id_item is Items.get_instance(): self.logger.error(f"Could not find enocean_rx_id for item {item}") found_rx_id = False + break # Only proceed, if valid rx_id and eep could be found: if found_rx_id and found_eep: rx_key = item.conf['enocean_rx_key'].upper() rx_eep = eep_item.conf['enocean_rx_eep'].upper() - rx_id = int(id_item.conf['enocean_rx_id'],16) + rx_id = int(id_item.conf['enocean_rx_id'], 16) # check if there is a function to parse payload if self.eep_parser.CanParse(rx_eep): - + if (rx_key in ['A0', 'A1', 'B0', 'B1']): - self.logger.warning(f"Key \"{rx_key}\" does not match EEP - \"0\" (Zero, number) should be \"O\" (letter) (same for \"1\" and \"I\") - will be accepted for now") + self.logger.warning(f'Key "{rx_key}" does not match EEP - "0" (Zero, number) should be "O" (letter) (same for "1" and "I") - will be accepted for now') rx_key = rx_key.replace('0', 'O').replace("1", 'I') - if (not rx_id in self._rx_items): + if rx_id not in self._rx_items: self._rx_items[rx_id] = {rx_eep: [item]} - elif (not rx_eep in self._rx_items[rx_id]): + elif rx_eep not in self._rx_items[rx_id]: self._rx_items[rx_id][rx_eep] = [item] - elif (not item in self._rx_items[rx_id][rx_eep]): + elif item not in self._rx_items[rx_id][rx_eep]: self._rx_items[rx_id][rx_eep].append(item) - self.logger.info("Item {} listens to id {:08X} with eep {} key {}".format(item, rx_id, rx_eep, rx_key)) - #self.logger.info(f"self._rx_items = {self._rx_items}") - + self.logger.info(f"Item {item} listens to id {rx_id:08X} with eep {rx_eep} key {rx_key}") + # self.logger.info(f"self._rx_items = {self._rx_items}") + if 'enocean_tx_eep' in item.conf: self.logger.debug(f"TX eep found in item {item._name}") - - if not 'enocean_tx_id_offset' in item.conf: + + if 'enocean_tx_id_offset' not in item.conf: self.logger.error(f"TX eep found for item {item._name} but no tx id offset specified.") return tx_offset = item.conf['enocean_tx_id_offset'] - if not (tx_offset in self._used_tx_offsets): + if tx_offset not in self._used_tx_offsets: self._used_tx_offsets.append(tx_offset) self._used_tx_offsets.sort() self.logger.debug(f"Debug offset list: {self._used_tx_offsets}") for x in range(1, 127): - if not x in self._used_tx_offsets: + if x not in self._used_tx_offsets: self._unused_tx_offset = x self.logger.debug(f"Next free offset set to {self._unused_tx_offset}") break @@ -620,59 +240,101 @@ def parse_item(self, item): # register item for event handling via smarthomeNG core. Needed for sending control actions: return self.update_item - - def update_item(self, item, caller=None, source=None, dest=None): - logger_debug = self.logger.isEnabledFor(logging.DEBUG) - if logger_debug: - self.logger.debug("Call function << update_item >>") + if self.log_for_debug: + self.logger.debug("update_item method called") - #self.logger.warning(f"Debug: update item: caller: {caller}, shortname: {self.get_shortname()}, item: {item.id()}") + # self.logger.warning(f"Debug: update item: caller: {caller}, shortname: {self.get_shortname()}, item: {item.id()}") if caller != self.get_shortname(): - if logger_debug: - self.logger.debug(f'Item << {item} >> updated externally.') + if self.log_for_debug: + self.logger.debug(f'Item {item} updated externally.') if self._block_ext_out_msg: - self.logger.warning('Sending manually blocked by user. Aborting') - return None + self.logger.warning('Transmitting manually blocked by user. Aborting') + return if 'enocean_tx_eep' in item.conf: if isinstance(item.conf['enocean_tx_eep'], str): tx_eep = item.conf['enocean_tx_eep'] - if logger_debug: + if self.log_for_debug: self.logger.debug(f'item << {item} >> has tx_eep') # check if Data can be Prepared - if not self.prepare_packet_data.CanDataPrepare(tx_eep): + if not self.prepare_packet_data.CanPrepareData(tx_eep): self.logger.error(f'enocean-update_item: method missing for prepare telegram data for {tx_eep}') else: # call method prepare_packet_data(item, tx_eep) id_offset, rorg, payload, optional = self.prepare_packet_data.PrepareData(item, tx_eep) self._send_radio_packet(id_offset, rorg, payload, optional) else: - self.logger.error(f'tx_eep {tx_eep} is not a string value') + self.logger.error('tx_eep is not a string value') else: - if logger_debug: - self.logger.debug(f'Item << {item} >>has no tx_eep value') + if self.log_for_debug: + self.logger.debug(f'Item {item} has no tx_eep value') - def read_num_securedivices(self): - self.logger.debug("Call function << read_num_securedivices >>") - self._send_common_command(CO_RD_NUMSECUREDEVICES) - self.logger.info("Read number of secured devices") + def connect(self, startup=True): + """ open serial or serial2TCP device """ + self.logger.debug(f'trying to connect to device at {self.port}') + try: + self._tcm = serial.serial_for_url(self.port, 57600, timeout=1.5) + except Exception as e: + self._tcm = None + self.logger.error(f"Exception occurred during serial open: {e}") + return False + else: + self.logger.info(f"Serial port successfully opened at port {self.port}") + +# why startup in separate thread? time to startup? collision with receiving? + if startup: + t = threading.Thread(target=self._startup, name="enocean-startup") + t.daemon = False + t.start() + return True + + def disconnect(self): + """ close serial or serial2TCP device """ + try: + self._tcm.close() + except Exception: + pass + self.logger.info("Enocean serial device closed") + + def _startup(self): + """ send startup sequence to device """ + self.logger.debug("_startup method called") + + # request one time information + self.logger.info("Resetting device") + self._send_common_command(COMMON_COMMAND.WR_RESET) + self.logger.info("Requesting id-base") + self._send_common_command(COMMON_COMMAND.RD_IDBASE) + self.logger.info("Requesting version information") + self._send_common_command(COMMON_COMMAND.RD_VERSION) + self.logger.debug("Ending startup-thread") + +# +# public EnOcean interface methods +# + + def read_num_securedevices(self): + """ read number of secure devices """ + self.logger.debug("read_num_securedevices method called") + self._send_common_command(COMMON_COMMAND.RD_NUMSECUREDEVICES) + self.logger.info("Read number of secured devices") - # Request all taught in smart acknowledge devices that have a mailbox def get_smart_ack_devices(self): - self.logger.debug("Call function << get_smart_ack_devices >>") - self._send_smart_ack_command(SA_RD_LEARNEDCLIENTS) + """ request all smart acknowledge devices """ + self.logger.debug("get_smart_ack_devices method called") + self._send_smart_ack_command(SMART_ACK.RD_LEARNEDCLIENTS) self.logger.info("Requesting all available smart acknowledge mailboxes") - def reset_stick(self): - self.logger.debug("Call function << reset_stick >>") + """ reset EnOcean transmitter """ + self.logger.debug("reset_stick method called") self.logger.info("Resetting device") - self._send_common_command(CO_WR_RESET) + self._send_common_command(COMMON_COMMAND.WR_RESET) def block_external_out_messages(self, block=True): - self.logger.debug("Call function << block_external_out_messages >>") + self.logger.debug("block_external_out_messages method called") if block: self.logger.info("Blocking of external out messages activated") self._block_ext_out_msg = True @@ -683,213 +345,435 @@ def block_external_out_messages(self, block=True): self.logger.error("Invalid argument. Must be True/False") def toggle_block_external_out_messages(self): - self.logger.debug("Call function << toggle block_external_out_messages >>") - if self._block_ext_out_msg == False: + self.logger.debug("toggle block_external_out_messages method called") + if not self._block_ext_out_msg: self.logger.info("Blocking of external out messages activated") self._block_ext_out_msg = True else: self.logger.info("Blocking of external out messages deactivated") self._block_ext_out_msg = False - def toggle_UTE_mode(self,id_offset=0): + def toggle_UTE_mode(self, id_offset=0): self.logger.debug("Toggle UTE mode") - if self.UTE_listen == True: + if self.UTE_listen: self.logger.info("UTE mode deactivated") self.UTE_listen = False - elif (id_offset is not None) and not (id_offset == 0): + elif id_offset: self.start_UTE_learnmode(id_offset) - self.logger.info("UTE mode activated for ID offset") + self.logger.info(f"UTE mode activated for ID offset {id_offset}") def send_bit(self): self.logger.info("Trigger Built-In Self Test telegram") - self._send_common_command(CO_WR_BIST) + self._send_common_command(COMMON_COMMAND.WR_BIST) def version(self): self.logger.info("Request stick version") - self._send_common_command(CO_RD_VERSION) + self._send_common_command(COMMON_COMMAND.RD_VERSION) - def _send_packet(self, packet_type, data=[], optional=[]): - #self.logger.debug("Call function << _send_packet >>") - length_optional = len(optional) - if length_optional > 255: - self.logger.error(f"Optional too long ({length_optional} bytes, 255 allowed)") - return None - length_data = len(data) - if length_data > 65535: - self.logger.error(f"Data too long ({length_data} bytes, 65535 allowed)") - return None +# +# Utility methods +# - packet = bytearray([PACKET_SYNC_BYTE]) - packet += length_data.to_bytes(2, byteorder='big') + bytes([length_optional, packet_type]) - packet += bytes([self._calc_crc8(packet[1:5])]) - packet += bytes(data + optional) - packet += bytes([self._calc_crc8(packet[6:])]) - self.logger.info("Sending packet with len = {} / data = [{}]!".format(len(packet), ', '.join(['0x%02x' % b for b in packet]))) - - # Send out serial data: - if not (self._tcm and self._tcm.is_open): - self.logger.debug("Trying serial reinit") - try: - self._tcm = serial.serial_for_url(self.port, 57600, timeout=1.5) - except Exception as e: - self._tcm = None - self.logger.error(f"Exception occurred during serial reinit: {e}") - else: - self.logger.debug("Serial reinit successful") - if self._tcm: - try: - self._tcm.write(packet) - except Exception as e: - self.logger.error(f"Exception during tcm write occurred: {e}") - self.logger.debug("Trying serial reinit after failed write") - try: - self._tcm = serial.serial_for_url(self.port, 57600, timeout=1.5) - except Exception as e: - self._tcm = None - self.logger.error(f"Exception occurred during serial reinit after failed write: {e}") - else: - self.logger.debug("Serial reinit successful after failed write") - try: - self._tcm.write(packet) - except Exception as e: - self.logger.error(f"Exception occurred during tcm write after successful serial reinit: {e}") - - def _send_smart_ack_command(self, _code, data=[]): - #self.logger.debug("Call function << _send_smart_ack_command >>") + def get_tx_id_as_hex(self): + hexstring = "{:08X}".format(self.tx_id) + return hexstring + + def is_connected(self): + return self._tcm and self._tcm.is_open + + def get_serial_status_as_string(self): + return "open" if self.is_connected() else "not connected" + + def get_log_unknown_msg(self): + return self._log_unknown_msg + + def toggle_log_unknown_msg(self): + self._log_unknown_msg = not self._log_unknown_msg + +# +# (private) packet / protocol methods +# + + def _send_smart_ack_command(self, code, data=[]): + # self.logger.debug("_send_smart_ack_command method called") self._cmd_lock.acquire() - self._last_cmd_code = _code - self._last_packet_type = PACKET_TYPE_SMART_ACK_COMMAND - self._send_packet(PACKET_TYPE_SMART_ACK_COMMAND, [_code] + data) + self._last_cmd_code = code + self._last_packet_type = PACKET_TYPE.SMART_ACK_COMMAND + self._send_packet(PACKET_TYPE.SMART_ACK_COMMAND, [code] + data) self._response_lock.acquire() # wait 5sec for response self._response_lock.wait(5) self._response_lock.release() self._cmd_lock.release() - def _send_common_command(self, _code, data=[], optional=[]): - #self.logger.debug("Call function << _send_common_command >>") + def _send_common_command(self, code, data=[], optional=[]): + # self.logger.debug("_send_common_command method called") self._cmd_lock.acquire() - self._last_cmd_code = _code - self._last_packet_type = PACKET_TYPE_COMMON_COMMAND - self._send_packet(PACKET_TYPE_COMMON_COMMAND, [_code] + data, optional) + self._last_cmd_code = code + self._last_packet_type = PACKET_TYPE.COMMON_COMMAND + self._send_packet(PACKET_TYPE.COMMON_COMMAND, [code] + data, optional) self._response_lock.acquire() # wait 5sec for response self._response_lock.wait(5) self._response_lock.release() self._cmd_lock.release() - def _send_radio_packet(self, id_offset, _code, data=[], optional=[]): - #self.logger.debug("Call function << _send_radio_packet >>") + def _send_radio_packet(self, id_offset, code, data=[], optional=[]): + # self.logger.debug("_send_radio_packet method called") if (id_offset < 0) or (id_offset > 127): self.logger.error(f"Invalid base ID offset range. (Is {id_offset}, must be [0 127])") return self._cmd_lock.acquire() - self._last_cmd_code = SENT_RADIO_PACKET - self._send_packet(PACKET_TYPE_RADIO, [_code] + data + list((self.tx_id + id_offset).to_bytes(4, byteorder='big')) + [0x00], optional) + self._last_cmd_code = PACKET.SENT_RADIO + self._send_packet(PACKET_TYPE.RADIO, [code] + data + list((self.tx_id + id_offset).to_bytes(4, byteorder='big')) + [0x00], optional) self._response_lock.acquire() # wait 1sec for response self._response_lock.wait(1) self._response_lock.release() self._cmd_lock.release() - - + def _send_UTE_response(self, data, optional): + self.logger.debug("_send_UTE_response method called") + choice = data[0] + payload = data[1:-5] + # sender_id = int.from_bytes(data[-5:-1], byteorder='big', signed=False) + # status = data[-1] + # repeater_cnt = status & 0x0F + db = 0xFF + Secu = 0x0 + # payload[0] = 0x91: EEP Teach-in response, Request accepted, teach-in successful, bidirectional + self._send_radio_packet(self.learn_id, choice, [0x91, payload[1], payload[2], payload[3], payload[4], payload[5], payload[6]], [PACKET_TYPE.RADIO_SUB_TEL, data[-5], data[-4], data[-3], data[-2], db, Secu] ) + self.UTE_listen = False + self.logger.info("Sent UTE response and ended listening") + + def _rocker_sequence(self, item, sender_id, sequence): + if self.log_for_debug: + self.logger.debug("_rocker_sequence method called") + try: + for step in sequence: + event, relation, delay = step.split() + # self.logger.debug("waiting for {} {} {}".format(event, relation, delay)) + if item._enocean_rs_events[event.upper()].wait(float(delay)) != (relation.upper() == "WITHIN"): + if self.log_for_debug: + self.logger.debug(f"NOT {step} - aborting sequence!") + return + else: + if self.log_for_debug: + self.logger.debug(f"{step}") + item._enocean_rs_events[event.upper()].clear() + continue + value = True + if 'enocean_rocker_action' in item.conf: + if item.conf['enocean_rocker_action'].upper() == "UNSET": + value = False + elif item.conf['enocean_rocker_action'].upper() == "TOGGLE": + value = not item() + item(value, self.get_shortname(), "{:08X}".format(sender_id)) + except Exception as e: + self.logger.error(f'Error handling enocean_rocker_sequence \"{sequence}\" - {e}') + + def _send_packet(self, packet_type, data=[], optional=[]): + # self.logger.debug("_send_packet method called") + length_optional = len(optional) + if length_optional > 255: + self.logger.error(f"Optional too long ({length_optional} bytes, 255 allowed)") + return + length_data = len(data) + if length_data > 65535: + self.logger.error(f"Data too long ({length_data} bytes, 65535 allowed)") + return + + packet = bytearray([PACKET.SYNC_BYTE]) + packet += length_data.to_bytes(2, byteorder='big') + bytes([length_optional, packet_type]) + packet += bytes([self.crc(packet[1:5])]) + packet += bytes(data + optional) + packet += bytes([self.crc(packet[6:])]) + self.logger.info("Sending packet with len = {} / data = [{}]!".format(len(packet), ', '.join(['0x%02x' % b for b in packet]))) + + # check connection, reconnect + if not self.is_connected(): + self.logger.debug("Trying serial reinit") + if not self.connect(startup=False): + self.logger.error('Connection failed, not sending.') + return + try: + self._tcm.write(packet) + return + except Exception as e: + self.logger.error(f"Exception during tcm write occurred: {e}") + self.logger.debug("Trying serial reinit after failed write") + + if not self.connect(startup=False): + self.logger.error('Connection failed again, not sending. Giving up.') + return + + try: + self._tcm.write(packet) + except Exception as e: + self.logger.error(f"Writing failed twice, giving up: {e}") + + def _process_packet_type_event(self, data, optional): + if self.log_for_debug: + self.logger.debug("_process_packet_type_event method called") + event_code = data[0] + if event_code == EVENT.RECLAIM_NOT_SUCCESSFUL: + self.logger.error("SA reclaim was not successful") + elif event_code == EVENT.CONFIRM_LEARN: + self.logger.info("Requesting how to handle confirm/discard learn in/out") + elif event_code == EVENT.LEARN_ACK: + self.logger.info("SA lern acknowledged") + elif event_code == EVENT.READY: + self.logger.info("Controller is ready for operation") + elif event_code == EVENT.TRANSMIT_FAILED: + self.logger.error("Telegram transmission failed") + elif event_code == EVENT.DUTYCYCLE_LIMIT: + self.logger.warning("Duty cycle limit reached") + elif event_code == EVENT.EVENT_SECUREDEVICES: + self.logger.info("Secure device event packet received") + else: + self.logger.warning("Unknown event packet received") + + def _process_packet_type_radio(self, data, optional): + if self.log_for_debug: + self.logger.debug("_process_packet_type_radio method called") + # self.logger.warning("Processing radio message with data = [{}] / optional = [{}]".format(', '.join(['0x%02x' % b for b in data]), ', '.join(['0x%02x' % b for b in optional]))) + + choice = data[0] + payload = data[1:-5] + sender_id = int.from_bytes(data[-5:-1], byteorder='big', signed=False) + status = data[-1] + repeater_cnt = status & 0x0F + self.logger.info("Radio message: choice = {:02x} / payload = [{}] / sender_id = {:08X} / status = {} / repeat = {}".format(choice, ', '.join(['0x%02x' % b for b in payload]), sender_id, status, repeater_cnt)) + + if len(optional) == 7: + subtelnum = optional[0] + dest_id = int.from_bytes(optional[1:5], byteorder='big', signed=False) + dBm = -optional[5] + SecurityLevel = optional[6] + if self.log_for_debug: + self.logger.debug(f"Radio message with additional info: subtelnum = {subtelnum} / dest_id = {dest_id:08X} / signal = {dBm} dBm / SecurityLevel = {SecurityLevel}") + if choice == 0xD4 and self.UTE_listen: + self.logger.info("Call send_UTE_response") + self._send_UTE_response(data, optional) + + if sender_id in self._rx_items: + if self.log_for_debug: + self.logger.debug("Sender ID found in item list") + # iterate over all eep known for this id and get list of associated items + for eep, items in self._rx_items[sender_id].items(): + # check if choice matches first byte in eep (this seems to be the only way to find right eep for this particular packet) + if eep.startswith("{:02X}".format(choice)): + # call parser for particular eep - returns dictionary with key-value pairs + results = self.eep_parser(eep, payload, status) + if self.log_for_debug: + self.logger.debug(f"Radio message results = {results}") + if 'DEBUG' in results: + self.logger.warning("DEBUG Info: processing radio message with data = [{}] / optional = [{}]".format(', '.join(['0x%02x' % b for b in data]), ', '.join(['0x%02x' % b for b in optional]))) + self.logger.warning(f"Radio message results = {results}") + self.logger.warning("Radio message: choice = {:02x} / payload = [{}] / sender_id = {:08X} / status = {} / repeat = {}".format(choice, ', '.join(['0x%02x' % b for b in payload]), sender_id, status, repeater_cnt)) + + for item in items: + rx_key = item.conf['enocean_rx_key'].upper() + if rx_key in results: + if 'enocean_rocker_sequence' in item.conf: + try: + if hasattr(item, '_enocean_rs_thread') and item._enocean_rs_thread.is_alive(): + if results[rx_key]: + if self.log_for_debug: + self.logger.debug("Sending pressed event") + item._enocean_rs_events["PRESSED"].set() + else: + if self.log_for_debug: + self.logger.debug("Sending released event") + item._enocean_rs_events["RELEASED"].set() + elif results[rx_key]: + item._enocean_rs_events = {'PRESSED': threading.Event(), 'RELEASED': threading.Event()} + item._enocean_rs_thread = threading.Thread(target=self._rocker_sequence, name="enocean-rs", args=(item, sender_id, item.conf['enocean_rocker_sequence'].split(','), )) + # self.logger.info("starting enocean_rocker_sequence thread") + item._enocean_rs_thread.start() + except Exception as e: + self.logger.error(f"Error handling enocean_rocker_sequence: {e}") + else: + item(results[rx_key], self.get_shortname(), f"{sender_id:08X}") + elif sender_id <= self.tx_id + 127 and sender_id >= self.tx_id: + if self.log_for_debug: + self.logger.debug("Received repeated enocean stick message") + else: + self.unknown_sender_id = f"{sender_id:08X}" + if self._log_unknown_msg: + self.logger.info(f"Unknown ID = {sender_id:08X}") + self.logger.warning("Unknown device sent radio message: choice = {:02x} / payload = [{}] / sender_id = {:08X} / status = {} / repeat = {}".format(choice, ', '.join(['0x%02x' % b for b in payload]), sender_id, status, repeater_cnt)) + + def _process_packet_type_smart_ack_command(self, data, optional): + self.logger.warning("Smart acknowledge command 0x06 received but not supported at the moment") + + def _process_packet_type_response(self, data, optional): + if self.log_for_debug: + self.logger.debug("_process_packet_type_response method called") + + # handle sent packet + if self._last_cmd_code == PACKET.SENT_RADIO and len(data) == 1: + + if self.log_for_debug: + self.logger.debug(f"Sending command returned code = {RETURN_CODE(data[0])}") + + # handle common commands + elif self._last_packet_type == PACKET_TYPE.COMMON_COMMAND: + + if self._last_cmd_code == COMMON_COMMAND.WR_RESET and len(data) == 1: + self.logger.info(f"Reset returned code = {RETURN_CODE(data[0])}") + + elif self._last_cmd_code == COMMON_COMMAND.WR_LEARNMODE and len(data) == 1: + self.logger.info(f"Write LearnMode returned code = {RETURN_CODE(data[0])}") + + elif self._last_cmd_code == COMMON_COMMAND.RD_VERSION: + if data[0] == 0 and len(data) == 33: + self.logger.info("Chip ID = 0x{} / Chip Version = 0x{}".format(''.join(['%02x' % b for b in data[9:13]]), ''.join(['%02x' % b for b in data[13:17]]))) + self.logger.info("APP version = {} / API version = {} / App description = {}".format('.'.join(['%d' % b for b in data[1:5]]), '.'.join(['%d' % b for b in data[5:9]]), ''.join(['%c' % b for b in data[17:33]]))) + elif data[0] == 0 and len(data) == 0: + self.logger.error("Reading version: No answer") + else: + self.logger.error(f"Reading version returned code = {RETURN_CODE(data[0])}, length = {len(data)}") + + elif self._last_cmd_code == COMMON_COMMAND.RD_IDBASE: + if data[0] == 0 and len(data) == 5: + self.logger.info("Base ID = 0x{}".format(''.join(['%02x' % b for b in data[1:5]]))) + if self.tx_id == 0: + self.tx_id = int.from_bytes(data[1:5], byteorder='big', signed=False) + self.logger.info("Transmit ID set set automatically by reading chips BaseID") + if len(optional) == 1: + self.logger.info(f"Remaining write cycles for Base ID = {optional[0]}") + elif data[0] == 0 and len(data) == 0: + self.logger.error("Reading Base ID: No answer") + else: + self.logger.error(f"Reading Base ID returned code = {RETURN_CODE(data[0])} and {len(data)} bytes") + + elif self._last_cmd_code == COMMON_COMMAND.WR_BIST: + if data[0] == 0 and len(data) == 2: + if data[1] == 0: + self.logger.info("Built in self test result: All OK") + else: + self.logger.info(f"Built in self test result: Problem, code = {data[1]}") + elif data[0] == 0 and len(data) == 0: + self.logger.error("Doing built in self test: No answer") + else: + self.logger.error(f"Doing built in self test returned code = {RETURN_CODE(data[0])}") + + elif self._last_cmd_code == COMMON_COMMAND.RD_LEARNMODE: + if data[0] == 0 and len(data) == 2: + self.logger.info("Reading LearnMode = 0x{}".format(''.join(['%02x' % b for b in data[1]]))) + if len(optional) == 1: + self.logger.info("Learn channel = {}".format(optional[0])) + elif data[0] == 0 and len(data) == 0: + self.logger.error("Reading LearnMode: No answer") + + elif self._last_cmd_code == COMMON_COMMAND.RD_NUMSECUREDEVICES: + if data[0] == 0 and len(data) == 2: + self.logger.info("Number of taught in devices = 0x{}".format(''.join(['%02x' % b for b in data[1]]))) + elif data[0] == 0 and len(data) == 0: + self.logger.error("Reading NUMSECUREDEVICES: No answer") + elif data[0] == 2 and len(data) == 1: + self.logger.error("Reading NUMSECUREDEVICES: Command not supported") + else: + self.logger.error("Reading NUMSECUREDEVICES: Unknown error") + elif self._last_packet_type == PACKET_TYPE.SMART_ACK_COMMAND: + + # handle SmartAck commands + if self._last_cmd_code == SMART_ACK.WR_LEARNMODE: + self.logger.info(f"Setting SmartAck mode returned code = {RETURN_CODE(data[0])}") + + elif self._last_cmd_code == SMART_ACK.RD_LEARNEDCLIENTS: + if data[0] == 0: + self.logger.info(f"Number of smart acknowledge mailboxes = {int((len(data)-1)/9)}") + else: + self.logger.error(f"Requesting SmartAck mailboxes returned code = {RETURN_CODE(data[0])}") + else: + self.logger.error("Processing unexpected response with return code = {} / data = [{}] / optional = [{}]".format(RETURN_CODE(data[0]), ', '.join(['0x%02x' % b for b in data]), ', '.join(['0x%02x' % b for b in optional]))) + + self._response_lock.acquire() + self._response_lock.notify() + self._response_lock.release() + +# +# Definitions of Learn Methods +# -#################################################### -### --- START - Definitions of Learn Methods --- ### -#################################################### def send_learn_protocol(self, id_offset=0, device=10): - self.logger.debug("Call function << send_learn_protocol >>") + self.logger.debug("send_learn_protocol method called") # define RORG - rorg = 0xA5 - + rorg = RORG.BS4 + # check offset range between 0 and 127 - if (id_offset < 0) or (id_offset > 127): + if not 0 <= id_offset <= 127: self.logger.error(f'ID offset with value = {id_offset} out of range (0-127). Aborting.') return False + # device range 10 - 19 --> Learn protocol for switch actuators - elif (device == 10): + if device == 10: + # Prepare Data for Eltako switch FSR61, Eltako FSVA-230V payload = [0xE0, 0x40, 0x0D, 0x80] self.logger.info('Sending learn telegram for switch command with [Device], [ID-Offset], [RORG], [payload] / [{}], [{:#04x}], [{:#04x}], [{}]'.format(device, id_offset, rorg, ', '.join('{:#04x}'.format(x) for x in payload))) + # device range 20 - 29 --> Learn protocol for dim actuators - elif (device == 20): + elif device == 20: + # Only for Eltako FSUD-230V payload = [0x02, 0x00, 0x00, 0x00] self.logger.info('Sending learn telegram for dim command with [Device], [ID-Offset], [RORG], [payload] / [{}], [{:#04x}], [{:#04x}], [{}]'.format(device, id_offset, rorg, ', '.join('{:#04x}'.format(x) for x in payload))) - elif (device == 21): + elif device == 21: + # For Eltako FHK61SSR dim device (EEP A5-38-08) payload = [0xE0, 0x40, 0x0D, 0x80] self.logger.info('Sending learn telegram for dim command with [Device], [ID-Offset], [RORG], [payload] / [{}], [{:#04x}], [{:#04x}], [{}]'.format(device, id_offset, rorg, ', '.join('{:#04x}'.format(x) for x in payload))) - elif (device == 22): + elif device == 22: + # For Eltako FRGBW71L RGB dim devices (EEP 07-3F-7F) payload = [0xFF, 0xF8, 0x0D, 0x87] self.logger.info('Sending learn telegram for rgbw dim command with [Device], [ID-Offset], [RORG], [payload] / [{}], [{:#04x}], [{:#04x}], [{}]'.format(device, id_offset, rorg, ', '.join('{:#04x}'.format(x) for x in payload))) + # device range 30 - 39 --> Learn protocol for radiator valves - elif (device == 30): + elif device == 30: + # Radiator Valve payload = [0x00, 0x00, 0x00, 0x00] self.logger.info('Sending learn telegram for radiator valve with [Device], [ID-Offset], [RORG], [payload] / [{}], [{:#04x}], [{:#04x}], [{}]'.format(device, id_offset, rorg, ', '.join('{:#04x}'.format(x) for x in payload))) + # device range 40 - 49 --> Learn protocol for other actuators - elif (device == 40): + elif device == 40: + # Eltako shutter actor FSB14, FSB61, FSB71 payload = [0xFF, 0xF8, 0x0D, 0x80] self.logger.info('Sending learn telegram for actuator with [Device], [ID-Offset], [RORG], [payload] / [{}], [{:#04x}], [{:#04x}], [{}]'.format(device, id_offset, rorg, ', '.join('{:#04x}'.format(x) for x in payload))) else: - self.logger.error(f'Sending learn telegram with invalid device! Device {device} actually not defined!') + self.logger.error(f'Sending learn telegram with invalid device! Device {device} currently not defined!') return False + # Send radio package self._send_radio_packet(id_offset, rorg, payload) return True - def start_UTE_learnmode(self, id_offset=0): - self.logger.debug("Call function << start_UTE_learnmode >>") + self.logger.debug("start_UTE_learnmode method called") self.UTE_listen = True self.learn_id = id_offset self.logger.info("Listening for UTE package ('D4')") - - + def enter_learn_mode(self, onoff=1): - self.logger.debug("Call function << enter_learn_mode >>") - if (onoff == 1): - self._send_common_command(CO_WR_LEARNMODE, [0x01, 0x00, 0x00, 0x00, 0x00],[0xFF]) + self.logger.debug("enter_learn_mode method called") + if onoff == 1: + self._send_common_command(COMMON_COMMAND.WR_LEARNMODE, [0x01, 0x00, 0x00, 0x00, 0x00], [0xFF]) self.logger.info("Entering learning mode") - return None else: - self._send_common_command(CO_WR_LEARNMODE, [0x00, 0x00, 0x00, 0x00, 0x00],[0xFF]) + self._send_common_command(COMMON_COMMAND.WR_LEARNMODE, [0x00, 0x00, 0x00, 0x00, 0x00], [0xFF]) self.logger.info("Leaving learning mode") - return None - # This function enables/disables the controller's smart acknowledge mode def set_smart_ack_learn_mode(self, onoff=1): - self.logger.debug("Call function << set_smart_ack_learn_mode >>") - if (onoff == 1): - self._send_smart_ack_command(SA_WR_LEARNMODE, [0x01, 0x00, 0x00, 0x00, 0x00, 0x00]) + self.logger.debug("set_smart_ack_learn_mode method called") + if onoff == 1: + self._send_smart_ack_command(SMART_ACK.WR_LEARNMODE, [0x01, 0x00, 0x00, 0x00, 0x00, 0x00]) self.logger.info("Enabling smart acknowledge learning mode") - return None else: - self._send_smart_ack_command(SA_WR_LEARNMODE, [0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) + self._send_smart_ack_command(SMART_ACK.WR_LEARNMODE, [0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) self.logger.info("Disabling smart acknowledge learning mode") - return None - -################################################## -### --- END - Definitions of Learn Methods --- ### -################################################## - - -################################# -### --- START - Calc CRC8 --- ### -################################# - def _calc_crc8(self, msg, crc=0): - #self.logger.debug("Call function << _calc_crc8 >>") - for i in msg: - crc = FCSTAB[crc ^ i] - return crc - -############################### -### --- END - Calc CRC8 --- ### -############################### - - diff --git a/enocean/plugin.yaml b/enocean/plugin.yaml index c1742e146..443a10827 100755 --- a/enocean/plugin.yaml +++ b/enocean/plugin.yaml @@ -16,11 +16,11 @@ plugin: # url of the support thread support: https://knx-user-forum.de/forum/supportforen/smarthome-py/26542-featurewunsch-enocean-plugin/page13 - version: 1.4.0 # Plugin version - sh_minversion: 1.3 # minimum shNG version to use this plugin + version: 1.4.2 # Plugin version + sh_minversion: '1.9' # minimum shNG version to use this plugin #sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) - multi_instance: False # plugin supports multi instance - restartable: True + multi_instance: false # plugin supports multi instance + restartable: true classname: EnOcean # class containing the plugin parameters: @@ -46,6 +46,19 @@ parameters: en: 'Log messages from unknown devices to logfile' default: 'False' + retry: + type: int + description: + de: 'Anzahl der Verbindungsversuche (0 = kein Limit)' + en: 'Number of connect retries (0 = no limit)' + default: 10 + + retry_cycle: + type: int + description: + de: 'Pause zwischen Verbindungsversuchen (in Sekunden)' + en: 'pause interval between connect retries (in seconds)' + default: 5 item_attributes: # Definition of item attributes defined by this plugin diff --git a/enocean/protocol/__init__.py b/enocean/protocol/__init__.py new file mode 100644 index 000000000..c54f0df93 --- /dev/null +++ b/enocean/protocol/__init__.py @@ -0,0 +1,65 @@ +#!/usr/bin/env python3 +# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab +######################################################################### +######################################################################### +# Enocean plugin for SmartHomeNG. https://github.com/smarthomeNG// +# +# This plugin is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This plugin is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this plugin. If not, see . +######################################################################### + +# this module contains EnOcean protocol routines + + +class CRC(): + """ provides CRC calculations """ + + CRC_TABLE = ( + 0x00, 0x07, 0x0e, 0x09, 0x1c, 0x1b, 0x12, 0x15, + 0x38, 0x3f, 0x36, 0x31, 0x24, 0x23, 0x2a, 0x2d, + 0x70, 0x77, 0x7e, 0x79, 0x6c, 0x6b, 0x62, 0x65, + 0x48, 0x4f, 0x46, 0x41, 0x54, 0x53, 0x5a, 0x5d, + 0xe0, 0xe7, 0xee, 0xe9, 0xfc, 0xfb, 0xf2, 0xf5, + 0xd8, 0xdf, 0xd6, 0xd1, 0xc4, 0xc3, 0xca, 0xcd, + 0x90, 0x97, 0x9e, 0x99, 0x8c, 0x8b, 0x82, 0x85, + 0xa8, 0xaf, 0xa6, 0xa1, 0xb4, 0xb3, 0xba, 0xbd, + 0xc7, 0xc0, 0xc9, 0xce, 0xdb, 0xdc, 0xd5, 0xd2, + 0xff, 0xf8, 0xf1, 0xf6, 0xe3, 0xe4, 0xed, 0xea, + 0xb7, 0xb0, 0xb9, 0xbe, 0xab, 0xac, 0xa5, 0xa2, + 0x8f, 0x88, 0x81, 0x86, 0x93, 0x94, 0x9d, 0x9a, + 0x27, 0x20, 0x29, 0x2e, 0x3b, 0x3c, 0x35, 0x32, + 0x1f, 0x18, 0x11, 0x16, 0x03, 0x04, 0x0d, 0x0a, + 0x57, 0x50, 0x59, 0x5e, 0x4b, 0x4c, 0x45, 0x42, + 0x6f, 0x68, 0x61, 0x66, 0x73, 0x74, 0x7d, 0x7a, + 0x89, 0x8e, 0x87, 0x80, 0x95, 0x92, 0x9b, 0x9c, + 0xb1, 0xb6, 0xbf, 0xb8, 0xad, 0xaa, 0xa3, 0xa4, + 0xf9, 0xfe, 0xf7, 0xf0, 0xe5, 0xe2, 0xeb, 0xec, + 0xc1, 0xc6, 0xcf, 0xc8, 0xdd, 0xda, 0xd3, 0xd4, + 0x69, 0x6e, 0x67, 0x60, 0x75, 0x72, 0x7b, 0x7c, + 0x51, 0x56, 0x5f, 0x58, 0x4d, 0x4a, 0x43, 0x44, + 0x19, 0x1e, 0x17, 0x10, 0x05, 0x02, 0x0b, 0x0c, + 0x21, 0x26, 0x2f, 0x28, 0x3d, 0x3a, 0x33, 0x34, + 0x4e, 0x49, 0x40, 0x47, 0x52, 0x55, 0x5c, 0x5b, + 0x76, 0x71, 0x78, 0x7f, 0x6A, 0x6d, 0x64, 0x63, + 0x3e, 0x39, 0x30, 0x37, 0x22, 0x25, 0x2c, 0x2b, + 0x06, 0x01, 0x08, 0x0f, 0x1a, 0x1d, 0x14, 0x13, + 0xae, 0xa9, 0xa0, 0xa7, 0xb2, 0xb5, 0xbc, 0xbb, + 0x96, 0x91, 0x98, 0x9f, 0x8a, 0x8D, 0x84, 0x83, + 0xde, 0xd9, 0xd0, 0xd7, 0xc2, 0xc5, 0xcc, 0xcb, + 0xe6, 0xe1, 0xe8, 0xef, 0xfa, 0xfd, 0xf4, 0xf3 + ) + + def __call__(self, msg, crc=0): + for i in msg: + crc = self.CRC_TABLE[crc ^ i] + return crc diff --git a/enocean/protocol/constants.py b/enocean/protocol/constants.py new file mode 100644 index 000000000..6f74c9c65 --- /dev/null +++ b/enocean/protocol/constants.py @@ -0,0 +1,164 @@ +#!/usr/bin/env python3 +# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab +######################################################################### +######################################################################### +# Enocean plugin for SmartHomeNG. https://github.com/smarthomeNG// +# +# This plugin is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This plugin is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this plugin. If not, see . +######################################################################### + +from enum import IntEnum + + +class PACKET(IntEnum): + """ generic packet identifiers """ + SYNC_BYTE = 0x55 + SENT_RADIO = 0xFF + SENT_ENCAPSULATED_RADIO = 0xA6 + + +class RORG(IntEnum): + """ encapsulates EEP types from EnOcean Equipment Profiles v2.61 """ + UNDEFINED = 0x00 + RPS = 0xF6 + BS1 = 0xD5 + BS4 = 0xA5 + VLD = 0xD2 + MSC = 0xD1 + ADT = 0xA6 + SM_LRN_REQ = 0xC6 + SM_LRN_ANS = 0xC7 + SM_REC = 0xA7 + SYS_EX = 0xC5 + SEC = 0x30 + SEC_ENCAPS = 0x31 + UTE = 0xD4 + + +class PACKET_TYPE(IntEnum): + """ encapsulates packet types """ + RESERVED = 0x00 + RADIO = 0x01 # RADIO ERP1 + RADIO_ERP1 = 0x01 # RADIO ERP1 => Kept for backwards compatibility reasons, for example custom packet. Generation shouldn't be affected... + RESPONSE = 0x02 # RESPONSE + RADIO_SUB_TEL = 0x03 # RADIO_SUB_TEL + EVENT = 0x04 # EVENT + COMMON_COMMAND = 0x05 # COMMON COMMAND + SMART_ACK_COMMAND = 0x06 # SMART ACK COMMAND + REMOTE_MAN_COMMAND = 0x07 # REMOTE MANAGEMENT COMMAND + RADIO_MESSAGE = 0x09 # RADIO MESSAGE + RADIO_ERP2 = 0x0A # RADIO ERP2 + RADIO_802_15_4 = 0x10 # RADIO_802_15_4_RAW_Packet + COMMAND_2_4 = 0x11 # COMMAND 2.4 GHz + + +class EVENT(IntEnum): + """ encapsulates Event Codes """ + RECLAIM_NOT_SUCCESSFUL = 0x01 # Informs the backbone of a Smart Ack Client to not successful reclaim. + CONFIRM_LEARN = 0x02 # Used for SMACK to confirm/discard learn in/out + LEARN_ACK = 0x03 # Inform backbone about result of learn request + READY = 0x04 # Inform backbone about the readiness for operation + EVENT_SECUREDEVICES = 0x05 # Informs about a secure device + DUTYCYCLE_LIMIT = 0x06 # Informs about duty cycle limit + TRANSMIT_FAILED = 0x07 # Informs that the device was not able to send a telegram. + TX_DONE = 0x08 # Informs the external host that the device has finished all transmissions. + LRN_MODE_DISABLED = 0x09 # Informs the external host that the learn mode has been disabled due to timeout. + + +class COMMON_COMMAND(IntEnum): + """ encapsulates Common Command Codes """ + WR_SLEEP = 0x01 # Enter in energy saving mode + WR_RESET = 0x02 # Reset the device + RD_VERSION = 0x03 # Read the device (SW) version /(HW) version, chip ID etc. + RD_SYS_LOG = 0x04 # Read system log from device databank + WR_SYS_LOG = 0x05 # Reset System log from device databank + WR_BIST = 0x06 # Perform built in self test + WR_IDBASE = 0x07 # Write ID range base number + RD_IDBASE = 0x08 # Read ID range base number + WR_REPEATER = 0x09 # Write Repeater Level off,1,2 + RD_REPEATER = 0x0A # Read Repeater Level off,1,2 + WR_FILTER_ADD = 0x0B # Add filter to filter list + WR_FILTER_DEL = 0x0C # Delete filter from filter list + WR_FILTER_DEL_ALL = 0x0D # Delete all filter + WR_FILTER_ENABLE = 0x0E # Enable/Disable supplied filters + RD_FILTER = 0x0F # Read supplied filters + WR_WAIT_MATURITY = 0x10 # Waiting till end of maturity time before received radio telegrams will transmitted + WR_SUBTEL = 0x11 # Enable/Disable transmitting additional subtelegram info + WR_MEM = 0x12 # Write x bytes of the Flash, XRAM, RAM0 … + RD_MEM = 0x13 # Read x bytes of the Flash, XRAM, RAM0 …. + RD_MEM_ADDRESS = 0x14 # Feedback about the used address and length of the configarea and the Smart Ack Table + RD_SECURITY = 0x15 # Read own security information (level, key) + WR_SECURITY = 0x16 # Write own security information (level, key) + WR_LEARNMODE = 0x17 # Function: Enables or disables learn mode of Controller. + RD_LEARNMODE = 0x18 # Function: Reads the learn-mode state of Controller. + WR_SECUREDEVICE_ADD = 0x19 # Add a secure device + WR_SECUREDEVICE_DEL = 0x1A # Delete a secure device + RD_SECUREDEVICE_BY_INDEX = 0x1B # Read secure device by index + WR_MODE = 0x1C # Sets the gateway transceiver mode + RD_NUMSECUREDEVICES = 0x1D # Read number of taught in secure devices + RD_SECUREDEVICE_BY_ID = 0x1E # Read secure device by ID + WR_SECUREDEVICE_ADD_PSK = 0x1F # Add Pre-shared key for inbound secure device + WR_SECUREDEVICE_SENDTEACHIN = 0x20 # Send secure Teach-In message + WR_TEMPORARY_RLC_WINDOW = 0x21 # Set the temporary rolling-code window for every taught-in devic + RD_SECUREDEVICE_PSK = 0x22 # Read PSK + RD_DUTYCYCLE_LIMIT = 0x23 # Read parameters of actual duty cycle limit + SET_BAUDRATE = 0x24 # Modifies the baud rate of the EnOcean device + GET_FREQUENCY_INFO = 0x25 # Reads Frequency and protocol of the Device + GET_STEPCODE = 0x27 # Reads Hardware Step code and Revision of the Device + WR_REMAN_CODE = 0x2E # Set the security code to unlock Remote Management functionality via radio + WR_STARTUP_DELAY = 0x2F # Set the startup delay (time from power up until start of operation) + WR_REMAN_REPEATING = 0x30 # Select if REMAN telegrams originating from this module can be repeated + RD_REMAN_REPEATING = 0x31 # Check if REMAN telegrams originating from this module can be repeated + SET_NOISETHRESHOLD = 0x32 # Set the RSSI noise threshold level for telegram reception + GET_NOISETHRESHOLD = 0x33 # Read the RSSI noise threshold level for telegram reception + WR_RLC_SAVE_PERIOD = 0x36 # Set the period in which outgoing RLCs are saved to the EEPROM + WR_RLC_LEGACY_MODE = 0x37 # Activate the legacy RLC security mode allowing roll-over and using the RLC acceptance window for 24bit explicit RLC + WR_SECUREDEVICEV2_ADD = 0x38 # Add secure device to secure link table + RD_SECUREDEVICEV2_BY_INDEX = 0x39 # Read secure device from secure link table using the table index + WR_RSSITEST_MODE = 0x3A # Control the state of the RSSI-Test mode + RD_RSSITEST_MODE = 0x3B # Read the state of the RSSI-Test mode + WR_SECUREDEVICE_MAINTENANCEKEY = 0x3C # Add the maintenance key information into the secure link table + RD_SECUREDEVICE_MAINTENANCEKEY = 0x3D # Read by index the maintenance key information from the secure link table + WR_TRANSPARENT_MODE = 0x3E # Control the state of the transparent mode + RD_TRANSPARENT_MODE = 0x3F # Read the state of the transparent mode + WR_TX_ONLY_MODE = 0x40 # Control the state of the TX only mode + RD_TX_ONLY_MODE = 0x41 # Read the state of the TX only mode + + +class SMART_ACK(IntEnum): + """ encapsulates Smart Acknowledge codes """ + WR_LEARNMODE = 0x01 # Set/Reset Smart Ack learn mode + RD_LEARNMODE = 0x02 # Get Smart Ack learn mode state + WR_LEARNCONFIRM = 0x03 # Used for Smart Ack to add or delete a mailbox of a client + WR_CLIENTLEARNRQ = 0x04 # Send Smart Ack Learn request (Client) + WR_RESET = 0x05 # Send reset command to a Smart Ack client + RD_LEARNEDCLIENTS = 0x06 # Get Smart Ack learned sensors / mailboxes + WR_RECLAIMS = 0x07 # Set number of reclaim attempts + WR_POSTMASTER = 0x08 # Activate/Deactivate Post master functionality + + +class RETURN_CODE(IntEnum): + """ encapsulates return codes """ + OK = 0x00 + ERROR = 0x01 + NOT_SUPPORTED = 0x02 + WRONG_PARAM = 0x03 + OPERATION_DENIED = 0x04 + + +class PARSE_RESULT(IntEnum): + """ encapsulates parsing return codes """ + OK = 0x00 + INCOMPLETE = 0x01 + CRC_MISMATCH = 0x03 diff --git a/enocean/eep_parser.py b/enocean/protocol/eep_parser.py similarity index 71% rename from enocean/eep_parser.py rename to enocean/protocol/eep_parser.py index e017f7014..d9d90def3 100755 --- a/enocean/eep_parser.py +++ b/enocean/protocol/eep_parser.py @@ -1,26 +1,58 @@ +#!/usr/bin/env python3 +# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab +######################################################################### +# Copyright 2013-2014 Robert Budde robert@ing-budde.de +# Copyright 2014 Alexander Schwithal aschwith +######################################################################### +# Enocean plugin for SmartHomeNG. https://github.com/smarthomeNG// +# +# This plugin is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This plugin is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this plugin. If not, see . +######################################################################### + import logging + class EEP_Parser(): - def __init__(self): + def __init__(self, plg_logger=None): self.logger = logging.getLogger(__name__) - self.logger.info('Eep-parser instantiated') + self.logger.info('EEP-parser instantiated') + + # create plugin logger for errors + self.plg_logger = plg_logger + if not self.plg_logger: + self.plg_logger = self.logger def CanParse(self, eep): found = callable(getattr(self, "_parse_eep_" + eep, None)) - if (not found): + if not found: self.logger.error(f"eep-parser: missing parser for eep {eep} - there should be a _parse_eep_{eep}-function!") return found - def Parse(self, eep, payload, status): - #self.logger.debug('Parser called with eep = {} / payload = {} / status = {}'.format(eep, ', '.join(hex(x) for x in payload), hex(status))) - results = getattr(self, "_parse_eep_" + eep)(payload, status) - #self.logger.info('Parser returns {results}') + def __call__(self, eep, payload, status): + # self.logger.debug('Parser called with eep = {} / payload = {} / status = {}'.format(eep, ', '.join(hex(x) for x in payload), hex(status))) + try: + results = getattr(self, "_parse_eep_" + eep)(payload, status) + except Exception as e: + self.plg_logger.warning(f'EEP-Parser: error on parsing eep {eep}: {e}') + return + + # self.logger.info('Parser returns {results}') return results -##################################################### -### --- Definitions for RORG = A5 / ORG = 07 --- ### -##################################################### +# Definitions for RORG = A5 / ORG = 07 + def _parse_eep_A5_02_01(self, payload, status): return {'TMP': (0 - (payload[2] * 40 / 255))} @@ -112,23 +144,23 @@ def _parse_eep_A5_04_02(self, payload, status): # temperature in degree Celsius from -20.0 degC - 60degC result['TMP'] = -20.0 + (payload[2] / 250.0 * 80.0) return result - + def _parse_eep_A5_06_01(self, payload, status): # Brightness sensor, for example Eltako FAH60 self.logger.debug('Parsing A5_06_01: Brightness sensor') result = {} # Calculation of brightness in lux - if (payload[3] == 0x0F) and (payload[1] > 0x00) and (payload[1] <= 0xFF): + if payload[3] == 0x0F and payload[1] > 0x00 and payload[1] <= 0xFF: # If Data-Messege AND DataByte 2 is between: 0x00 = 300 lux and 0xFF = 30.000 lux result['BRI'] = round(((payload[1] / 255.0 * (30000 - 300)) + 300), 2) - elif (payload[3] == 0x0F) and (payload[1] == 0x00): + elif payload[3] == 0x0F and payload[1] == 0x00: # If Data-Messege AND DataByte 2: 0x00 then read DataByte 3 - result['BRI'] = (payload[0]) + result['BRI'] = payload[0] else: # No Data Message - result['BRI'] = (-1) + result['BRI'] = -1 # only trigger the logger info when 'BRI' > 0 - if (result['BRI'] > 0): + if result['BRI'] > 0: self.logger.info(f"Brightness: {result['BRI']}") return result @@ -136,7 +168,7 @@ def _parse_eep_A5_07_03(self, payload, status): # Occupancy sensor with supply voltage monitor, NodOne self.logger.debug("Parsing A5_07_03: Occupancy sensor") result = {} - is_data = ((payload[3] & 0x08) == 0x08) # learn or data telegeram: 1:data, 0:learn + is_data = (payload[3] & 0x08) == 0x08 # learn or data telegeram: 1:data, 0:learn if not is_data: self.logger.info("Occupancy sensor: Received learn telegram.") return result @@ -144,9 +176,9 @@ def _parse_eep_A5_07_03(self, payload, status): if payload[0] > 250: self.logger.error(f"Occupancy sensor issued error code: {payload[0]}") else: - result['SVC'] = (payload[0] / 255.0 * 5.0) # supply voltage in volts - result['ILL'] = (payload[1] << 2) + ((payload[2] & 0xC0) >> 6) # 10 bit illumination in lux - result['PIR'] = ((payload[3] & 0x80) == 0x80) # Movement flag, 1:motion detected + result['SVC'] = payload[0] / 255.0 * 5.0 # supply voltage in volts + result['ILL'] = payload[1] << 2 + (payload[2] & 0xC0) >> 6 # 10 bit illumination in lux + result['PIR'] = (payload[3] & 0x80) == 0x80 # Movement flag, 1:motion detected self.logger.debug(f"Occupancy: PIR:{result['PIR']} illumination: {result['ILL']}lx, voltage: {result['SVC']}V") return result @@ -154,9 +186,9 @@ def _parse_eep_A5_08_01(self, payload, status): # Brightness and movement sensor, for example eltako FBH65TFB self.logger.debug("Parsing A5_08_01: Movement sensor") result = {} - result['BRI'] = (payload[1] / 255.0 * 2048) # brightness in lux - result['MOV'] = not ((payload[3] & 0x02) == 0x02) # movement - #self.logger.debug(f"Movement: {result['MOV']}, brightness: {result['BRI']}") + result['BRI'] = payload[1] / 255.0 * 2048 # brightness in lux + result['MOV'] = not (payload[3] & 0x02) == 0x02 # movement + # self.logger.debug(f"Movement: {result['MOV']}, brightness: {result['BRI']}") return result def _parse_eep_A5_11_04(self, payload, status): @@ -168,14 +200,14 @@ def _parse_eep_A5_11_04(self, payload, status): # Data_byte0 = 0x08 = Dimmer aus, 0x09 = Dimmer an self.logger.debug("Processing A5_11_04: Dimmer Status on/off") results = {} - # if !( (payload[0] == 0x02) and (payload[2] == 0x00)): + # if !( (payload[0] == 0x02 and payload[2] == 0x00)): # self.logger.error("Error in processing A5_11_04: static byte missmatch") # return results results['D'] = payload[1] - if (payload[3] == 0x08): + if payload[3] == 0x08: # Dimmer is off results['STAT'] = 0 - elif (payload[3] == 0x09): + elif payload[3] == 0x09: # Dimmer is on results['STAT'] = 1 return results @@ -184,40 +216,39 @@ def _parse_eep_A5_12_01(self, payload, status): # Status command from switch actor with powermeter, for example Eltako FSVA-230 results = {} status_byte = payload[3] - is_data = (status_byte & 0x08) == 0x08 - if(is_data == False): + is_data = status_byte & 0x08 == 0x08 + if is_data is False: self.logger.debug("Processing A5_12_01: powermeter: is learn telegram. Aborting.") return results - is_power = (status_byte & 0x04) == 0x04 - div_enum = (status_byte & 0x03) + is_power = status_byte & 0x04 == 0x04 + div_enum = status_byte & 0x03 divisor = 1.0 - if(div_enum == 0): + if div_enum == 0: divisor = 1.0 - elif(div_enum == 1): + elif div_enum == 1: divisor = 10.0 - elif(div_enum == 2): + elif div_enum == 2: divisor = 100.0 - elif(div_enum == 3): + elif div_enum == 3: divisor = 1000.0 - else: + else: self.logger.warning(f"Processing A5_12_01: Unknown enum ({div_enum}) for divisor") self.logger.debug(f"Processing A5_12_01: divisor is {divisor}") - if(is_power): + if is_power: self.logger.debug("Processing A5_12_01: powermeter: Unit is Watts") else: self.logger.debug("Processing A5_12_01: powermeter: Unit is kWh") - value = (payload[0] << 16) + (payload[1] << 8) + payload[2] - value = value / divisor + value = (payload[0] << 16 + payload[1] << 8 + payload[2]) / divisor self.logger.debug(f"Processing A5_12_01: powermeter: {value} W") # It is confirmed by Eltako that with the use of multiple repeaters in an Eltako network, values can be corrupted in random cases. # Catching these random errors via plausibility check: if value > 2300: self.logger.warning(f"A5_12_01 plausibility error: power value {value} is greater than 2300W, which is not plausible. Skipping.") - #self.logger.warning(f"A5_12_01 exception: value {value}, divisor {divisor}, divenum {div_enum}, statusPayload {status_byte}, header status {status}") - #self.logger.warning(f"A5_12_01 exception: payloads 0-3: {payload[0]},{payload[1]},{payload[2]},{payload[3]}") + # self.logger.warning(f"A5_12_01 exception: value {value}, divisor {divisor}, divenum {div_enum}, statusPayload {status_byte}, header status {status}") + # self.logger.warning(f"A5_12_01 exception: payloads 0-3: {payload[0]},{payload[1]},{payload[2]},{payload[3]}") results['DEBUG'] = 1 return results @@ -229,24 +260,24 @@ def _parse_eep_A5_20_04(self, payload, status): self.logger.debug("Processing A5_20_04") results = {} status_byte = payload[3] - #1: temperature setpoint, 0: feed temperature - TS = ((status_byte & 1 << 6) == 1 << 6) - #1: failure, 0: normal - FL = ((status_byte & 1 << 7) == 1 << 7) - #1: locked, 0: unlocked - BLS= ((status_byte& 1 << 5) == 1 << 5) + # 1: temperature setpoint, 0: feed temperature + TS = status_byte & 1 << 6 == 1 << 6 + # 1: failure, 0: normal + FL = status_byte & 1 << 7 == 1 << 7 + # 1: locked, 0: unlocked + BLS = status_byte & 1 << 5 == 1 << 5 results['BLS'] = BLS # current valve position 0-100% results['CP'] = payload[0] # Current feet temperature or setpoint - if(TS == 1): - results['TS'] = 10 + (payload[1]/255*20) + if TS == 1: + results['TS'] = 10 + payload[1] / 255 * 20 else: - results['FT'] = 20 + (payload[1]/255*60) + results['FT'] = 20 + payload[1] / 255 * 60 # Current room temperature or failure code - if (FL == 0): - results['TMP'] = 10 + (payload[2]/255*20) - else: + if FL == 0: + results['TMP'] = 10 + payload[2] / 255 * 20 + else: results['FC'] = payload[2] results['STATUS'] = status_byte return results @@ -259,7 +290,7 @@ def _parse_eep_A5_30_01(self, payload, status): self.logger.warning("A5_30_03 is learn telegram") return results # Data_byte1 = 0x00 / 0xFF - results['ALARM'] = (payload[2] == 0x00) + results['ALARM'] = payload[2] == 0x00 # Battery linear: 0-120 (bat low), 121-255(bat high) results['BAT'] = payload[1] return results @@ -276,27 +307,26 @@ def _parse_eep_A5_30_03(self, payload, status): self.logger.error("EEP A5_30_03 not according to spec.") return results # Data_byte2 = Temperatur 0...40 °C (255...0) - results['TEMP'] = 40 - (payload[1]/255*40) + results['TEMP'] = 40 - payload[1] / 255 * 40 # Data_byte1 = 0x0F = Alarm, 0x1F = kein Alarm - results['ALARM'] = (payload[2] == 0x0F) + results['ALARM'] = payload[2] == 0x0F return results - def _parse_eep_A5_38_08(self, payload, status): results = {} - if (payload[1] == 2): # Dimming + if payload[1] == 2: # Dimming results['EDIM'] = payload[2] results['RMP'] = payload[3] - results['LRNB'] = ((payload[4] & 1 << 3) == 1 << 3) - results['EDIM_R'] = ((payload[4] & 1 << 2) == 1 << 2) - results['STR'] = ((payload[4] & 1 << 1) == 1 << 1) - results['SW'] = ((payload[4] & 1 << 0) == 1 << 0) + results['LRNB'] = payload[4] & 1 << 3 == 1 << 3 + results['EDIM_R'] = payload[4] & 1 << 2 == 1 << 2 + results['STR'] = payload[4] & 1 << 1 == 1 << 1 + results['SW'] = payload[4] & 1 << 0 == 1 << 0 return results def _parse_eep_A5_3F_7F(self, payload, status): self.logger.debug("Processing A5_3F_7F") results = {'DI_3': (payload[3] & 1 << 3) == 1 << 3, 'DI_2': (payload[3] & 1 << 2) == 1 << 2, 'DI_1': (payload[3] & 1 << 1) == 1 << 1, 'DI_0': (payload[3] & 1 << 0) == 1 << 0} - results['AD_0'] = (((payload[1] & 0x03) << 8) + payload[2]) * 1.8 / pow(2, 10) + results['AD_0'] = ((payload[1] & 0x03) << 8 + payload[2]) * 1.8 / pow(2, 10) results['AD_1'] = (payload[1] >> 2) * 1.8 / pow(2, 6) results['AD_2'] = payload[0] * 1.8 / pow(2, 8) return results @@ -316,26 +346,25 @@ def _parse_eep_A5_0G_03(self, payload, status): self.logger.debug(f"eep-parser input status = {status}") results = {} runtime_s = ((payload[0] << 8) + payload[1]) / 10 - if (payload[2] == 1): + if payload[2] == 1: self.logger.debug(f"Shutter moved {runtime_s} s 'upwards'") results['MOVE'] = runtime_s * -1 - elif (payload[2] == 2): + elif payload[2] == 2: self.logger.debug(f"Shutter moved {runtime_s} s 'downwards'") results['MOVE'] = runtime_s return results -##################################################### -### --- Definitions for RORG = D2 / ORG = D2 --- ### -##################################################### +# Definitions for RORG = D2 / ORG = D2 + def _parse_eep_D2_01_07(self, payload, status): # self.logger.debug("Processing D2_01_07: VLD Switch") results = {} # self.logger.info(f'D2 Switch Feedback 0:{payload[0]} 1:{payload[1]} 2:{payload[2]}') - if (payload[2] == 0x80): + if payload[2] == 0x80: # Switch is off results['STAT'] = 0 self.logger.debug('D2 Switch off') - elif (payload[2] == 0xe4): + elif payload[2] == 0xe4: # Switch is on results['STAT'] = 1 self.logger.debug('D2 Switch on') @@ -345,55 +374,50 @@ def _parse_eep_D2_01_12(self, payload, status): # self.logger.debug("Processing D2_01_12: VLD Switch") results = {} # self.logger.info(f'D2 Switch Feedback 0:{payload[0]} 1:{payload[1]} 2:{payload[2]}') - if (payload[1] == 0x60) and (payload[2] == 0x80): + if payload[1] == 0x60 and payload[2] == 0x80: # Switch is off results['STAT_A'] = 0 self.logger.debug('D2 Switch Channel A: off') - elif (payload[1] == 0x60) and (payload[2] == 0xe4): + elif payload[1] == 0x60 and payload[2] == 0xe4: # Switch is on results['STAT_A'] = 1 self.logger.debug('D2 Channel A: Switch on') - elif (payload[1] == 0x61) and (payload[2] == 0x80): + elif payload[1] == 0x61 and payload[2] == 0x80: # Switch is off results['STAT_B'] = 0 self.logger.debug('D2 SwitchChannel A: off') - elif (payload[1] == 0x61) and (payload[2] == 0xe4): + elif payload[1] == 0x61 and payload[2] == 0xe4: # Switch is on results['STAT_B'] = 1 self.logger.debug('D2 Switch Channel B: on') return results -#################################################### -### --- Definitions for RORG = D5 / ORG = 06 --- ### -#################################################### +# Definitions for RORG = D5 / ORG = 06 + def _parse_eep_D5_00_01(self, payload, status): # Window/Door Contact Sensor, for example Eltako FTK, FTKB self.logger.debug("Processing D5_00_01: Door contact") - return {'STATUS': (payload[0] & 0x01) == 0x01} + return {'STATUS': payload[0] & 0x01 == 0x01} +# Definitions for RORG = F6 / ORG = 05 -#################################################### -### --- Definitions for RORG = F6 / ORG = 05 --- ### -#################################################### def _parse_eep_F6_02_01(self, payload, status): self.logger.debug("Processing F6_02_01: Rocker Switch, 2 Rocker, Light and Blind Control - Application Style 1") results = {} R1 = (payload[0] & 0xE0) >> 5 - EB = (payload[0] & (1<<4) == (1<<4)) R2 = (payload[0] & 0x0E) >> 1 - SA = (payload[0] & (1<<0) == (1<<0)) - NU = (status & (1<<4) == (1<<4)) + SA = payload[0] & 1 == 1 + NU = status & (1 << 4) == (1 << 4) - if (NU): + if NU: results['AI'] = (R1 == 0) or (SA and (R2 == 0)) results['AO'] = (R1 == 1) or (SA and (R2 == 1)) results['BI'] = (R1 == 2) or (SA and (R2 == 2)) results['BO'] = (R1 == 3) or (SA and (R2 == 3)) - elif (not NU) and (payload[0] == 0x00): + elif not NU and payload[0] == 0x00: results = {'AI': False, 'AO': False, 'BI': False, 'BO': False} else: self.logger.error("Parser detected invalid state encoding - check your switch!") - pass return results def _parse_eep_F6_02_02(self, payload, status): @@ -408,34 +432,34 @@ def _parse_eep_F6_02_03(self, payload, status): self.logger.debug("Processing F6_02_03: Rocker Switch, 2 Rocker") results = {} # Button A1: Dimm light down - results['AI'] = (payload[0]) == 0x10 + results['AI'] = payload[0] == 0x10 # Button A0: Dimm light up - results['AO'] = (payload[0]) == 0x30 + results['AO'] = payload[0] == 0x30 # Button B1: Dimm light down - results['BI'] = (payload[0]) == 0x50 + results['BI'] = payload[0] == 0x50 # Button B0: Dimm light up - results['BO'] = (payload[0]) == 0x70 - if (payload[0] == 0x70): + results['BO'] = payload[0] == 0x70 + if payload[0] == 0x70: results['B'] = True - elif (payload[0] == 0x50): + elif payload[0] == 0x50: results['B'] = False - elif (payload[0] == 0x30): + elif payload[0] == 0x30: results['A'] = True - elif (payload[0] == 0x10): + elif payload[0] == 0x10: results['A'] = False - return results + return results def _parse_eep_F6_10_00(self, payload, status): self.logger.debug(f"Processing F6_10_00: Mechanical Handle sends payload {payload[0]}") results = {} # Eltako defines 0xF0 for closed status. Enocean spec defines masking of lower 4 bit: - if (payload[0] & 0b11110000) == 0b11110000: + if payload[0] & 0b11110000 == 0b11110000: results['STATUS'] = 0 # Eltako defines 0xE0 for window open (horizontal) up status. Enocean spec defines the following masking: - elif (payload[0] & 0b11010000) == 0b11000000: + elif payload[0] & 0b11010000 == 0b11000000: results['STATUS'] = 1 # Eltako defines 0xD0 for open/right up status. Enocean spec defines masking of lower 4 bit: - elif (payload[0] & 0b11110000) == 0b11010000: + elif payload[0] & 0b11110000 == 0b11010000: results['STATUS'] = 2 else: self.logger.error(f"Error in F6_10_00 handle status, payload: {payload[0]} unknown") @@ -453,19 +477,19 @@ def _parse_eep_F6_0G_03(self, payload, status): ''' self.logger.debug("Processing F6_0G_03: shutter actor") self.logger.debug("payload = [{}]".format(', '.join(['0x%02X' % b for b in payload]))) - self.logger.debug("status: {}".format(status)) + self.logger.debug(f"status: {status}") results = {} - if (payload[0] == 0x70): + if payload[0] == 0x70: results['POSITION'] = 0 results['B'] = 0 - elif (payload[0] == 0x50): + elif payload[0] == 0x50: results['POSITION'] = 255 results['B'] = 0 - elif (payload[0] == 0x01): + elif payload[0] == 0x01: results['STATUS'] = 'Start moving up' results['B'] = 1 - elif (payload[0] == 0x02): + elif payload[0] == 0x02: results['STATUS'] = 'Start moving down' results['B'] = 2 - self.logger.debug('parse_eep_F6_0G_03 returns: {}'.format(results)) + self.logger.debug(f'parse_eep_F6_0G_03 returns: {results}') return results diff --git a/enocean/prepare_packet_data.py b/enocean/protocol/packet_data.py similarity index 80% rename from enocean/prepare_packet_data.py rename to enocean/protocol/packet_data.py index 79f9f76e6..e4683bdb6 100755 --- a/enocean/prepare_packet_data.py +++ b/enocean/protocol/packet_data.py @@ -22,9 +22,10 @@ import logging from lib.utils import Utils +from .constants import RORG, PACKET_TYPE -class Prepare_Packet_Data(): +class Packet_Data(): def __init__(self, plugin_instance): """ @@ -35,13 +36,13 @@ def __init__(self, plugin_instance): # Get the plugin instance from encocean class self._plugin_instance = plugin_instance - def CanDataPrepare(self, tx_eep): + def CanPrepareData(self, tx_eep): """ This Method checks if there is an available Prepare Data Method for the tx_eep """ found = callable(getattr(self, '_prepare_data_for_tx_eep_' + tx_eep, None)) - if (not found): - self.logger.error(f"enocean-CanDataPrepare: missing tx_eep for pepare send data {tx_eep} - there should be a _prepare_data_for_tx_eep_{tx_eep}-function!") + if not found: + self.logger.error(f"enocean-CanPrepareData: missing tx_eep for pepare send data {tx_eep} - there should be a _prepare_data_for_tx_eep_{tx_eep}-function!") return found def PrepareData(self, item, tx_eep): @@ -54,40 +55,34 @@ def PrepareData(self, item, tx_eep): if self._plugin_instance.has_iattr(item.conf, 'enocean_tx_id_offset'): self.logger.debug("enocean-PrepareData: item has valid enocean_tx_id_offset") id_offset = int(self._plugin_instance.get_iattr_value(item.conf, 'enocean_tx_id_offset')) - if (id_offset < 0) or (id_offset > 127): + if id_offset < 0 or id_offset > 127: self.logger.error('enocean-PrepareData: ID offset out of range (0-127). Aborting.') - return None + return else: self.logger.info(f"enocean-PrepareData: {tx_eep} item has no attribute ''enocean_tx_id_offset''! Set to default = 0") id_offset = 0 - # start prepare data + # start prepare data rorg, payload, optional = getattr(self, '_prepare_data_for_tx_eep_' + tx_eep)(item, tx_eep) - #self.logger.info('enocean-PrepareData: {} returns [{:#04x}], [{}], [{}]'.format(tx_eep, rorg, ', '.join('{:#04x}'.format(x) for x in payload), ', '.join('{:#04x}'.format(x) for x in optional))) + # self.logger.info('enocean-PrepareData: {} returns [{:#04x}], [{}], [{}]'.format(tx_eep, rorg, ', '.join('{:#04x}'.format(x) for x in payload), ', '.join('{:#04x}'.format(x) for x in optional))) return id_offset, rorg, payload, optional +# Definitions for RORG = A5 / ORG = 07 -##################################################### -### --- Definitions for RORG = A5 / ORG = 07 --- ### -### --> Definition of 4BS Telegrams ### -##################################################### - - def _prepare_data_for_tx_eep_A5_20_04(self, item, tx_eep): """ ### --- Data for radiator valve command --- ### """ self.logger.debug(f'enocean-PrepareData: prepare data for tx_eep {tx_eep}') - rorg = 0xa5 temperature = item() # define default values: - MC = 1 # off - WUC = 3 # 120 seconds - BLC = 0 # unlocked - LRNB = 1 # data - DSO = 0 # 0 degree + MC = 1 # off + WUC = 3 # 120 seconds + BLC = 0 # unlocked + LRNB = 1 # data + DSO = 0 # 0 degree valve_position = 50 - for sibling in get_children(item.parent): + for sibling in item.return_parent().get_children(): if hasattr(sibling, 'MC'): MC = sibling() if hasattr(sibling, 'WUC'): @@ -100,24 +95,22 @@ def _prepare_data_for_tx_eep_A5_20_04(self, item, tx_eep): DSO = sibling() if hasattr(sibling, 'VALVE_POSITION'): valve_position = sibling() - TSP = int((temperature -10)*255/30) - status = 0 + (MC << 1) + (WUC << 2) + TSP = int((temperature - 10) * 255 / 30) + status = 0 + (MC << 1) + (WUC << 2) status2 = (BLC << 5) + (LRNB << 4) + (DSO << 2) - payload = [valve_position, TSP, status , status2] + payload = [valve_position, TSP, status, status2] optional = [] - return rorg, payload, optional - - - def _prepare_data_for_tx_eep_A5_38_08_01(self, item, tx_eep): + return RORG.BS4, payload, optional + + def _prepare_data_for_tx_eep_A5_38_08_01(self, item, tx_eep): """ ### --- Data for A5-38_08 command 1 --- ### Eltako Devices: - FSR14-2x, FSR14-4x, FSR14SSR, FSR71 + FSR14-2x, FSR14-4x, FSR14SSR, FSR71 FSR61, FSR61NP, FSR61G, FSR61LN, FLC61NP This method has the function to prepare the packet data in case of switching device on or off """ self.logger.debug(f'enocean-PrepareData: prepare data for tx_eep {tx_eep}') - rorg = 0xa5 block = 0 # check if item has attribute block_switch if self._plugin_instance.has_iattr(item.conf, 'block_switch'): @@ -133,10 +126,9 @@ def _prepare_data_for_tx_eep_A5_38_08_01(self, item, tx_eep): payload = [0x01, 0x00, 0x00, int(9 + block)] self.logger.debug(f'enocean-PrepareData: {tx_eep} prepare data to switch on') optional = [] - return rorg, payload, optional - - - def _prepare_data_for_tx_eep_A5_38_08_02(self, item, tx_eep): + return RORG.BS4, payload, optional + + def _prepare_data_for_tx_eep_A5_38_08_02(self, item, tx_eep): """ ### --- Data for A5-38_08 command 2 --- ### Eltako Devices: @@ -145,8 +137,7 @@ def _prepare_data_for_tx_eep_A5_38_08_02(self, item, tx_eep): This method has the function to prepare the packet data in case of switching the dimmer device on or off, but calculate also the correct data of dim_speed and dim_value for further solutions. """ - #self.logger.debug(f'enocean-PrepareData: prepare data for tx_eep {tx_eep}') - rorg = 0xa5 + # self.logger.debug(f'enocean-PrepareData: prepare data for tx_eep {tx_eep}') block = 0 # check if item has attribute block_dim_value if self._plugin_instance.has_iattr(item.level.conf, 'block_dim_value'): @@ -158,7 +149,7 @@ def _prepare_data_for_tx_eep_A5_38_08_02(self, item, tx_eep): dim_speed = self._plugin_instance.get_iattr_value(item.level.conf, 'dim_speed') # bound dim_speed values to [0 - 100] % dim_speed = max(0, min(100, int(dim_speed))) - #self.logger.debug(f'enocean-PrepareData: {tx_eep} use dim_speed = {dim_speed} %') + # self.logger.debug(f'enocean-PrepareData: {tx_eep} use dim_speed = {dim_speed} %') # calculate dimspeed from percent into integer # 0x01 --> fastest speed --> 100 % # 0xFF --> slowest speed --> 0 % @@ -166,29 +157,28 @@ def _prepare_data_for_tx_eep_A5_38_08_02(self, item, tx_eep): else: # use intern dim_speed of the dim device dim_speed = 0 - #self.logger.debug('enocean-PrepareData: no attribute dim_speed --> use intern dim speed') + # self.logger.debug('enocean-PrepareData: no attribute dim_speed --> use intern dim speed') if not item(): # if value is False --> Switch off dim_value = 0 payload = [0x02, int(dim_value), int(dim_speed), int(8 + block)] - #self.logger.debug('enocean-PrepareData: prepare data to switch off for command for A5_38_08_02') + # self.logger.debug('enocean-PrepareData: prepare data to switch off for command for A5_38_08_02') else: # check if reference dim value exists if 'ref_level' in item.level.conf: dim_value = int(item.level.conf['ref_level']) # check range of dim_value [0 - 100] % dim_value = max(0, min(100, int(dim_value))) - #self.logger.debug(f'enocean-PrepareData: {tx_eep} ref_level {dim_value} % found for A5_38_08_02') + # self.logger.debug(f'enocean-PrepareData: {tx_eep} ref_level {dim_value} % found for A5_38_08_02') else: # set dim_value on 100 % == 0x64 dim_value = 0x64 self.logger.debug(f'enocean-PrepareData: {tx_eep} no ref_level found! Setting to default 100 %') payload = [0x02, int(dim_value), int(dim_speed), int(9 + block)] optional = [] - return rorg, payload, optional - - - def _prepare_data_for_tx_eep_A5_38_08_03(self, item, tx_eep): + return RORG.BS4, payload, optional + + def _prepare_data_for_tx_eep_A5_38_08_03(self, item, tx_eep): """ ### --- Data for A5-38_08 command 3--- ### Eltako Devices: @@ -198,14 +188,13 @@ def _prepare_data_for_tx_eep_A5_38_08_03(self, item, tx_eep): In case of dim_value == 0 the dimmer is switched off. """ self.logger.debug(f'enocean-PrepareData: prepare data for tx_eep {tx_eep}') - rorg = 0xa5 block = 0 # check if item has attribute block_dim_value if self._plugin_instance.has_iattr(item.conf, 'block_dim_value'): block_value = self._plugin_instance.get_iattr_value(item.conf, 'block_dim_value') if Utils.to_bool(block_value): block = 4 - # check if item has attribite dim_speed + # check if item has attribite dim_speed if self._plugin_instance.has_iattr(item.conf, 'dim_speed'): dim_speed = self._plugin_instance.get_iattr_value(item.conf, 'dim_speed') # bound dim_speed values to [0 - 100] % @@ -214,7 +203,7 @@ def _prepare_data_for_tx_eep_A5_38_08_03(self, item, tx_eep): # calculate dimspeed from percent into hex # 0x01 --> fastest speed --> 100 % # 0xFF --> slowest speed --> 0 % - dim_speed = (255 - (254 * dim_speed/100)) + dim_speed = (255 - (254 * dim_speed / 100)) else: # use intern dim_speed of the dim device dim_speed = 0x00 @@ -232,10 +221,9 @@ def _prepare_data_for_tx_eep_A5_38_08_03(self, item, tx_eep): dim_value = dim_value payload = [0x02, int(dim_value), int(dim_speed), int(9 + block)] optional = [] - return rorg, payload, optional - - - def _prepare_data_for_tx_eep_A5_3F_7F(self, item, tx_eep): + return RORG.BS4, payload, optional + + def _prepare_data_for_tx_eep_A5_3F_7F(self, item, tx_eep): """ ### --- Data for A5-3F-7F - Universal Actuator Command --- ### Eltako Devices: @@ -244,14 +232,13 @@ def _prepare_data_for_tx_eep_A5_3F_7F(self, item, tx_eep): The Runtime is set in [0 - 255] s """ self.logger.debug(f'enocean-PrepareData: prepare data for tx_eep {tx_eep}') - rorg = 0xa5 block = 0 # check if item has attribute block_switch if self._plugin_instance.has_iattr(item.conf, 'block_switch'): block_value = self._plugin_instance.get_iattr_value(item.conf, 'block_switch') if Utils.to_bool(block_value): block = 4 - # check if item has attribite enocean_rtime + # check if item has attribite enocean_rtime if self._plugin_instance.has_iattr(item.conf, 'enocean_rtime'): rtime = self._plugin_instance.get_iattr_value(item.conf, 'enocean_rtime') # rtime [0 - 255] s @@ -263,25 +250,24 @@ def _prepare_data_for_tx_eep_A5_3F_7F(self, item, tx_eep): self.logger.debug(f'enocean-PrepareData: {tx_eep} actuator runtime not specified set to {rtime} s.') # check command (up, stop, or down) command = int(item()) - if(command == 0): + if command == 0: # Stopp moving command_hex_code = 0x00 - elif(command == 1): + elif command == 1: # moving up command_hex_code = 0x01 - elif(command == 2): + elif command == 2: # moving down command_hex_code = 0x02 else: self.logger.error(f'enocean-PrepareData: {tx_eep} sending actuator command failed: invalid command {command}') - return None + return # define payload payload = [0x00, rtime, command_hex_code, int(8 + block)] optional = [] - return rorg, payload, optional - - - def _prepare_data_for_tx_eep_07_3F_7F(self, item, tx_eep): + return RORG.BS4, payload, optional + + def _prepare_data_for_tx_eep_07_3F_7F(self, item, tx_eep): """ ### --- Data for 07-3F-7F Command --- ### Eltako Devices: @@ -294,8 +280,9 @@ def _prepare_data_for_tx_eep_07_3F_7F(self, item, tx_eep): Color: bit0 = red, bit1= green, bit2 = blue, bit3 = white """ self.logger.debug(f'enocean-PrepareData: prepare data for tx_eep {tx_eep}') + # NOTE: not an official RORG value! rorg = 0x07 - # check if item has attribite dim_speed + # check if item has attribite dim_speed if self._plugin_instance.has_iattr(item.conf, 'dim_speed'): dim_speed = self._plugin_instance.get_iattr_value(item.conf, 'dim_speed') dim_speed = max(0, min(100, int(dim_speed))) @@ -303,89 +290,87 @@ def _prepare_data_for_tx_eep_07_3F_7F(self, item, tx_eep): # calculate dimspeed from percent into hex # 0x01 --> fastest speed --> 100 % # 0xFF --> slowest speed --> 0 % - dim_speed = (255 - (254 * dim_speed/100)) + dim_speed = (255 - (254 * dim_speed / 100)) else: # use intern dim_speed of the dim device dim_speed = 0x00 self.logger.debug(f'enocean-PrepareData: {tx_eep} no attribute dim_speed --> use intern dim speed') + # check the color of the item if self._plugin_instance.has_iattr(item.conf, 'color'): color = self._plugin_instance.get_iattr_value(item.conf, 'color') - if (color == 'red'): + color_hex = '' + if color == 'red': color_hex = 0x01 - elif (color == 'green'): + elif color == 'green': color_hex = 0x02 - elif (color == 'blue'): + elif color == 'blue': color_hex = 0x04 - elif (color == 'white'): + elif color == 'white': color_hex = 0x08 else: self.logger.error(f'enocean-PrepareData: {item} has no attribute color --> please specify color!') - return None + return + # Aufdimmen: [dim_speed, color_hex, 0x30, 0x0F] # Abdimmen: [dim_speed, color_hex, 0x31, 0x0F] # Dimmstop: [dim_speed, color_hex, 0x32, 0x0F] # check command (up, stop, or down) command = int(item()) - if(command == 0): + if command == 0: # dim up command_hex_code = 0x30 - elif(command == 1): + elif command == 1: # dim down command_hex_code = 0x31 - elif(command == 2): + elif command == 2: # stop dim command_hex_code = 0x32 else: self.logger.error(f'enocean-PrepareData: {tx_eep} sending actuator command failed: invalid command {command}') - return None + return # define payload payload = [int(dim_speed), color_hex, command_hex_code, 0x0F] optional = [] return rorg, payload, optional - - -############################################################# -### --- Definitions for RORG = D2 --- ### -### --> Definition EnOcean Variable Length Telegram (VLD) ### -############################################################# - def _prepare_data_for_tx_eep_D2_01_07(self, item, tx_eep): +# Definitions for RORG = D2, EnOcean Variable Length Telegram (VLD) + + def _prepare_data_for_tx_eep_D2_01_07(self, item, tx_eep): """ ### --- Data for D2_01_07 (VLD) --- ### Prepare data for Devices with Varable Length Telegram. - There is currently no device information available. - Optional 'pulsewidth' - Attribute was removed, it can be realized with the smarthomeNG + There is currently no device information available. + Optional 'pulsewidth' - Attribute was removed, it can be realized with the smarthomeNG build in function autotimer! """ self.logger.debug(f'enocean-PrepareData: prepare data for tx_eep {tx_eep}') - rorg = 0xD2 - SubTel = 0x03 db = 0xFF Secu = 0x0 if self._plugin_instance.has_iattr(item.conf, 'enocean_rx_id'): rx_id = int(self._plugin_instance.get_iattr_value(item.conf, 'enocean_rx_id'), 16) - if (rx_id < 0) or (rx_id > 0xFFFFFFFF): + if rx_id < 0 or rx_id > 0xFFFFFFFF: self.logger.error(f'enocean-PrepareData: {tx_eep} rx-ID-Offset out of range (0-127). Aborting.') - return None + return self.logger.debug(f'enocean-PrepareData: {tx_eep} enocean_rx_id found.') else: rx_id = 0 self.logger.debug(f'enocean-PrepareData: {tx_eep} no enocean_rx_id found!') # Prepare Data Packet - if (item() == 0): + if item() == 0: payload = [0x01, 0x1E, 0x00] - optional = [SubTel, rx_id, db, Secu] - elif (item() == 1): + optional = [PACKET_TYPE.RADIO_SUB_TEL, rx_id, db, Secu] + elif item() == 1: payload = [0x01, 0x1E, 0x01] - optional = [SubTel, rx_id, db, Secu] + optional = [PACKET_TYPE.RADIO_SUB_TEL, rx_id, db, Secu] else: self.logger.error(f'enocean-PrepareData: {tx_eep} undefined Value. Error!') - return None + return # packet_data_prepared = (id_offset, 0xD2, payload, [0x03, 0xFF, 0xBA, 0xD0, 0x00, 0xFF, 0x0]) self.logger.info(f'enocean-PrepareData: {tx_eep} Packet Data Prepared for {tx_eep} (VLD)') - optional = [SubTel, rx_id, db, Secu] - return rorg, payload, optional + optional = [PACKET_TYPE.RADIO_SUB_TEL, rx_id, db, Secu] + + return RORG.VLD, payload, optional def _prepare_data_for_tx_eep_D2_01_12(self, item, tx_eep): """ @@ -396,24 +381,22 @@ def _prepare_data_for_tx_eep_D2_01_12(self, item, tx_eep): build in function autotimer! """ self.logger.debug(f'enocean-PrepareData: prepare data for tx_eep {tx_eep}') - rorg = 0xD2 - SubTel = 0x03 db = 0xFF Secu = 0x0 if self._plugin_instance.has_iattr(item.conf, 'enocean_rx_id'): rx_id = int(self._plugin_instance.get_iattr_value(item.conf, 'enocean_rx_id'), 16) - if (rx_id < 0) or (rx_id > 0xFFFFFFFF): + if rx_id < 0 or rx_id > 0xFFFFFFFF: self.logger.error(f'enocean-PrepareData: {tx_eep} rx-ID-Offset out of range (0-127). Aborting.') - return None + return self.logger.debug(f'enocean-PrepareData: {tx_eep} enocean_rx_id found.') else: rx_id = 0 self.logger.debug(f'enocean-PrepareData: {tx_eep} no enocean_rx_id found!') if self._plugin_instance.has_iattr(item.conf, 'enocean_channel'): schannel = self._plugin_instance.get_iattr_value(item.conf, 'enocean_channel') - if (schannel == "A"): + if schannel == "A": channel = 0x00 - elif (schannel == "B"): + elif schannel == "B": channel = 0x01 else: channel = 0x1E @@ -422,16 +405,17 @@ def _prepare_data_for_tx_eep_D2_01_12(self, item, tx_eep): channel = 0x1E self.logger.debug(f'enocean-PrepareData: {tx_eep} no enocean_channel found!') # Prepare Data Packet - if (item() == 0): + if item() == 0: payload = [0x01, channel, 0x00] - optional = [SubTel, rx_id, db, Secu] - elif (item() == 1): + optional = [PACKET_TYPE.RADIO_SUB_TEL, rx_id, db, Secu] + elif item() == 1: payload = [0x01, channel, 0x01] - optional = [SubTel, rx_id, db, Secu] + optional = [PACKET_TYPE.RADIO_SUB_TEL, rx_id, db, Secu] else: self.logger.error(f'enocean-PrepareData: {tx_eep} undefined Value. Error!') - return None + return # packet_data_prepared = (id_offset, 0xD2, payload, [0x03, 0xFF, 0xBA, 0xD0, 0x00, 0xFF, 0x0]) self.logger.info(f'enocean-PrepareData: {tx_eep} Packet Data Prepared for {tx_eep} (VLD)') - optional = [SubTel, rx_id, db, Secu] - return rorg, payload, optional + optional = [PACKET_TYPE.RADIO_SUB_TEL, rx_id, db, Secu] + + return RORG.VLD, payload, optional diff --git a/modbus_tcp/__init__.py b/modbus_tcp/__init__.py index bc75596c2..90559c425 100755 --- a/modbus_tcp/__init__.py +++ b/modbus_tcp/__init__.py @@ -47,13 +47,14 @@ AttrObjectType = 'modBusObjectType' AttrDirection = 'modBusDirection' + class modbus_tcp(SmartPlugin): """ This class provides a Plugin for SmarthomeNG to read and or write to modbus devices. """ - PLUGIN_VERSION = '1.0.12' + PLUGIN_VERSION = '1.0.13' def __init__(self, sh, *args, **kwargs): """ @@ -85,9 +86,11 @@ def __init__(self, sh, *args, **kwargs): if not (self._cycle or self._crontab): self.logger.error(f"{self.get_fullname()}: no update cycle or crontab set. Modbus will not be queried automatically") - self._slaveUnit = int(self.get_parameter_value('slaveUnit')) + self._slaveUnit = self.get_parameter_value('slaveUnit') self._slaveUnitRegisterDependend = False + self._pause_item_path = self.get_parameter_value('pause_item') + self._sh = sh self._regToRead = {} self._regToWrite = {} @@ -99,8 +102,6 @@ def __init__(self, sh, *args, **kwargs): self.init_webinterface(WebInterface) - return - def run(self): """ Run method for the plugin @@ -108,67 +109,25 @@ def run(self): self.logger.debug(f"Plugin '{self.get_fullname()}': run method called") if self.alive: return + self.alive = True - self.set_suspend(by='run()') - + if self._cycle or self._crontab: - self.error_count = 0 # Initialize error count - if not self.suspended: - self._create_cyclic_scheduler() + self.error_count = 0 # Initialize error count + self.scheduler_add('poll_device_' + self._host, self.poll_device, cycle=self._cycle, cron=self._crontab, prio=5) self.logger.debug(f"Plugin '{self.get_fullname()}': run method finished ") - def _create_cyclic_scheduler(self): - self.scheduler_add('poll_device_' + self._host, self.poll_device, cycle=self._cycle, cron=self._crontab, prio=5) - - def _remove_cyclic_scheduler(self): - self.scheduler_remove('poll_device_' + self._host) - def stop(self): """ Stop method for the plugin """ self.alive = False self.logger.debug(f"Plugin '{self.get_fullname()}': stop method called") - self._remove_cyclic_scheduler() + self.scheduler_remove('poll_device_' + self._host) self._Mclient.close() self.connected = False self.logger.debug(f"Plugin '{self.get_fullname()}': stop method finished") - # sh.plugins.return_plugin('pluginName').suspend() - def set_suspend(self, suspend_active=None, by=None): - """ - enable / disable suspend mode: open/close connections, schedulers - """ - - if suspend_active is None: - if self._suspend_item is not None: - # if no parameter set, try to use item setting - suspend_active = bool(self._suspend_item()) - else: - # if not available, default to "resume" (non-breaking default) - suspend_active = False - - # print debug logging - if suspend_active: - msg = 'Suspend mode enabled' - else: - msg = 'Suspend mode disabled' - if by: - msg += f' (set by {by})' - self.logger.debug(msg) - - # activate selected mode, use smartplugin methods - if suspend_active: - self.suspend(by) - else: - self.resume(by) - - if suspend_active: - self._remove_cyclic_scheduler() - else: - self._create_cyclic_scheduler() - - def parse_item(self, item): """ Default plugin parse_item method. Is called when the plugin is initialized. @@ -178,10 +137,10 @@ def parse_item(self, item): :param item: The item to process. """ - # check for suspend item - if item.property.path == self._suspend_item_path: - self.logger.debug(f'suspend item {item.property.path} registered') - self._suspend_item = item + # check for pause item + if item.property.path == self._pause_item_path: + self.logger.debug(f'pause item {item.property.path} registered') + self._pause_item = item self.add_item(item, updating=True) return self.update_item @@ -207,7 +166,7 @@ def parse_item(self, item): if self.has_iattr(item.conf, AttrObjectType): objectType = self.get_iattr_value(item.conf, AttrObjectType) - reg = str(objectType) # dictionary key: objectType.regAddr.slaveUnit // HoldingRegister.528.1 + reg = str(objectType) # dictionary key: objectType.regAddr.slaveUnit // HoldingRegister.528.1 reg += '.' reg += str(regAddr) reg += '.' @@ -274,7 +233,7 @@ def poll_device(self): changes on it's own, but has to be polled to get the actual status. It is called by the scheduler which is set within run() method. """ - if self.suspended: + if not self.alive: return with self.lock: @@ -302,7 +261,6 @@ def poll_device(self): try: for reg, regPara in self._regToRead.items(): with self.lock: - regAddr = regPara['regAddr'] value = self.__read_Registers(regPara) # self.logger.debug(f"value read: {value} type: {type(value)}") if value is not None: @@ -330,8 +288,6 @@ def poll_device(self): except Exception as e: self.logger.error(f"something went wrong in the poll_device function: {e}") - - # called each time an item changes. def update_item(self, item, caller=None, source=None, dest=None): """ Item has been updated @@ -349,21 +305,16 @@ def update_item(self, item, caller=None, source=None, dest=None): slaveUnit = self._slaveUnit dataDirection = 'read' - # check for suspend item - if item is self._suspend_item: + # check for pause item + if item is self._pause_item: if caller != self.get_shortname(): - self.logger.debug(f'Suspend item changed to {item()}') - self.set_suspend(item(), by=f'suspend item {item.property.path}') + self.logger.debug(f'pause item changed to {item()}') + if item() and self.alive: + self.stop() + elif not item() and not self.alive: + self.run() return - if self.suspended: - if self.suspend_log_update is None or self.suspend_log_update is False: # debug - Nachricht nur 1x ausgeben - self.logger.info('Plugin is suspended, data will not be written') - self.suspend_log_update = True - return - else: - self.suspend_log_update = False - if caller == self.get_fullname(): # self.logger.debug(f'item was changed by the plugin itself - caller:{caller} source:{source} dest:{dest}') return @@ -389,7 +340,7 @@ def update_item(self, item, caller=None, source=None, dest=None): # else: # self.logger.debug(f'update_item:{item} default modBusObjectTyp: {objectType}') - reg = str(objectType) # Dict-key: HoldingRegister.528.1 *** objectType.regAddr.slaveUnit *** + reg = str(objectType) # Dict-key: HoldingRegister.528.1 *** objectType.regAddr.slaveUnit *** reg += '.' reg += str(regAddr) reg += '.' @@ -417,8 +368,6 @@ def update_item(self, item, caller=None, source=None, dest=None): self.connected = False return - startTime = datetime.now() - regCount = 0 try: self.__write_Registers(regPara, item()) except Exception as e: @@ -431,19 +380,13 @@ def __write_Registers(self, regPara, value): bo = regPara['byteOrder'] wo = regPara['wordOrder'] dataTypeStr = regPara['dataType'] - dataType = ''.join(filter(str.isalpha, dataTypeStr)) # vom dataType die Ziffen entfernen z.B. uint16 = uint - registerCount = 0 # Anzahl der zu schreibenden Register (Words) + dataType = ''.join(filter(str.isalpha, dataTypeStr)) # vom dataType die Ziffen entfernen z.B. uint16 = uint try: - bits = int(''.join(filter(str.isdigit, dataTypeStr))) # bit-Zahl aus aus dataType z.B. uint16 = 16 + bits = int(''.join(filter(str.isdigit, dataTypeStr))) # bit-Zahl aus aus dataType z.B. uint16 = 16 except: bits = 16 - if dataType.lower() == 'string': - registerCount = int(bits / 2) # bei string: bits = bytes !! string16 -> 16Byte - 8 registerCount - else: - registerCount = int(bits / 16) - if regPara['factor'] != 1: # self.logger.debug(f"value {value} divided by: {regPara['factor']}") value = value * (1 / regPara['factor']) @@ -480,11 +423,11 @@ def __write_Registers(self, regPara, value): builder.add_string(value) elif dataType.lower() == 'bit': if objectType == 'Coil' or objectType == 'DiscreteInput': - if not isinstance(value, bool): # test is boolean + if not isinstance(value, bool): # test is boolean self.logger.error(f"Value is not boolean: {value}") return else: - if set(value).issubset({'0', '1'}) and bool(value): # test is bit-string '00110101' + if set(value).issubset({'0', '1'}) and bool(value): # test is bit-string '00110101' builder.add_bits(value) else: self.logger.error(f"Value is not a bitstring: {value}") @@ -541,13 +484,13 @@ def __read_Registers(self, regPara): bits = 16 if dataType.lower() == 'string': - registerCount = int(bits / 2) # bei string: bits = bytes !! string16 -> 16Byte - 8 registerCount + registerCount = int(bits / 2) # bei string: bits = bytes !! string16 -> 16Byte - 8 registerCount else: registerCount = int(bits / 16) - if self.connected == False: + if not self.connected: self.logger.error(f"not connected to {self._host}:{self._port}") - return None + return # self.logger.debug(f"read {objectType}.{address}.{slaveUnit} (address.slaveUnit) regCount:{registerCount}") if objectType == 'Coil': @@ -560,11 +503,11 @@ def __read_Registers(self, regPara): result = self._Mclient.read_holding_registers(address, registerCount, slave=slaveUnit) else: self.logger.error(f"{AttrObjectType} not supported: {objectType}") - return None + return if result.isError(): self.logger.error(f"read error: {result} {objectType}.{address}.{slaveUnit} (address.slaveUnit) regCount:{registerCount}") - return None + return if objectType == 'Coil': value = result.bits[0] @@ -615,4 +558,3 @@ def __read_Registers(self, regPara): return decoder.decode_bits() else: self.logger.error(f"Number of bits or datatype not supported : {dataTypeStr}") - return None diff --git a/modbus_tcp/plugin.yaml b/modbus_tcp/plugin.yaml index e78b1e7cd..c3e3d8c0a 100755 --- a/modbus_tcp/plugin.yaml +++ b/modbus_tcp/plugin.yaml @@ -11,13 +11,13 @@ plugin: keywords: modbus_tcp modbus smartmeter inverter heatpump #documentation: http://smarthomeng.de/user/plugins/modbus_tcp/user_doc.html support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1154368-einbindung-von-modbus-tcp - version: 1.0.12 # Plugin version - sh_minversion: '1.8' # minimum shNG version to use this plugin + version: 1.0.13 # Plugin version + sh_minversion: '1.10' # minimum shNG version to use this plugin #sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) py_minversion: '3.6' # py_maxversion: # maximum Python version to use for this plugin (leave empty if latest) - multi_instance: True # plugin supports multi instance - restartable: unknown + multi_instance: true # plugin supports multi instance + restartable: true classname: modbus_tcp # class containing the plugin parameters: @@ -26,6 +26,7 @@ parameters: description: de: 'IP Adresse des Modbus-Geraetes' en: 'IP address from the modbus-device' + mandatory: true port: type: int @@ -34,6 +35,7 @@ parameters: description: de: 'modbus Port' en: 'modbus port' + mandatory: true cycle: type: int @@ -41,7 +43,7 @@ parameters: valid_min: 0 description: de: 'Update Zyklus in Sekunden. Wenn der Wert 0 ist, wird keine Abfrage über cycle ausgeführt' - en: 'Update cycle in seconds. If value is 0 then noch query will be made by means of cycle' + en: 'Update cycle in seconds. If value is 0 then no query will be made by means of cycle' crontab: type: str @@ -50,12 +52,20 @@ parameters: en: 'Update by means of a crontab' slaveUnit: - type: num + type: int default: 1 description: de: 'Slave-Addresse der zu lesenden Modbus-Einheit' en: 'slave-address of the Modbus-Unit to read' + pause_item: + type: str + default: '' + description: + de: 'Item, um die Ausführung des Plugins zu steuern' + en: 'item for controlling plugin execution' + + item_attributes: modBusObjectType: type: str diff --git a/russound/__init__.py b/russound/__init__.py index ad6a07379..1d4520f10 100755 --- a/russound/__init__.py +++ b/russound/__init__.py @@ -42,7 +42,7 @@ class Russound(SmartPlugin): the update functions for the items """ - PLUGIN_VERSION = '1.7.2' + PLUGIN_VERSION = '1.7.3' def __init__(self, sh, *args, **kwargs): """ @@ -52,16 +52,10 @@ def __init__(self, sh, *args, **kwargs): if '.'.join(VERSION.split('.', 2)[:2]) <= '1.5': self.logger = logging.getLogger(__name__) - super().__init__(sh, args, kwargs) - try: - # sh = self.get_sh() to get it. - self.host = self.get_parameter_value('host') - self.port = self.get_parameter_value('port') - except KeyError as e: - self.logger.critical( - "Plugin '{}': Inconsistent plugin (invalid metadata definition: {} not defined)".format(self.get_shortname(), e)) - self._init_complete = False - return + super().__init__() + self.host = self.get_parameter_value('host') + self.port = self.get_parameter_value('port') + self._pause_item_path = self.get_parameter_value('pause_item') # Initialization code goes here self.terminator = RESP_DELIMITER @@ -69,10 +63,8 @@ def __init__(self, sh, *args, **kwargs): self._client.set_callbacks(data_received=self.found_terminator) self.params = {} self.sources = {} - self.suspended = False - + self.init_webinterface() - return def run(self): """ @@ -80,21 +72,30 @@ def run(self): """ self.logger.debug("Run method called") if not self._client.connect(): - self.logger.debug(f'Connection to {self.host}:{self.port} not possible. Plugin deactivated.') + self.logger.debug(f'Connection to {self.host}:{self.port} not possible. Plugin stopped.') + self.stop() return + self.alive = True + if self._pause_item: + self._pause_item(False, self.get_fullname()) def activate(self): - self.logger.debug("Activate method called, queries to russound will be resumes and data will be written again") - self.resume() - + self.logger.debug("Activate method called, but is deprecated. Please move to run()") + self.run() + def stop(self): """ Stop method for the plugin """ self.logger.debug("Stop method called") self.alive = False - self._client.close() + if self._pause_item: + self._pause_item(True, self.get_fullname()) + try: + self._client.close() + except Exception: + pass def connect(self): self._client.open() @@ -121,10 +122,10 @@ def parse_item(self, item): # self.logger.debug("Source {0} added".format(s)) # return None - if item.property.path == self._suspend_item_path: - self._suspend_item = item - self.logger.info(f'set suspend_item to {item.property.path}') - return + if item.property.path == self._pause_item_path: + self._pause_item = item + self.logger.info(f'set pause_item to {item.property.path}') + return self.update_item if self.has_iattr(item.conf, 'rus_path'): self.logger.debug("parse item: {}".format(item)) @@ -177,9 +178,6 @@ def parse_item(self, item): return self.update_item - def parse_logic(self, logic): - pass - def _restrict(self, val, minval, maxval): if val < minval: return minval @@ -200,19 +198,21 @@ def update_item(self, item, caller=None, source=None, dest=None): :param source: if given it represents the source :param dest: if given it represents the dest """ + # check for pause item + if item is self._pause_item: + if caller != self.get_shortname(): + self.logger.debug(f'pause item changed to {item()}') + if item() and self.alive: + self.stop() + elif not item() and not self.alive: + self.run() + return + if self.alive and caller != self.get_shortname(): # code to execute if the plugin is not stopped # and only, if the item has not been changed by this this plugin: self.logger.info("Update item: {}, item has been changed outside this plugin (caller={}, source={}, dest={})".format(item.property.path, caller, source, dest)) - if item.property.path == self._suspend_item_path: - if self._suspend_item is not None: - if item(): - self.suspend(f'suspend item {item.property.path}') - else: - self.resume(f'suspend item {item.property.path}') - return - if self.has_iattr(item.conf, 'rus_path'): path = self.get_iattr_value(item.conf, 'rus_path') p = self.params[path] @@ -282,18 +282,11 @@ def _send_cmd(self, cmd): if not self.alive: self.logger.error('Trying to send data but plugin is not running') return - - if self.suspended: - self.logger.debug('Plugin is suspended, data will not be written') - return self.logger.debug("Sending request: {0}".format(cmd)) # if connection is closed we don't wait for sh.con to reopen it # instead we reconnect immediatly -# - # if not self.connected: - # self.connect() if not self._client.connected: self._client.connect() diff --git a/russound/plugin.yaml b/russound/plugin.yaml index a10bc0484..673a04b41 100755 --- a/russound/plugin.yaml +++ b/russound/plugin.yaml @@ -12,8 +12,8 @@ plugin: documentation: https://www.smarthomeng.de/developer/plugins/russound/user_doc.html # url of documentation (wiki) page support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1800440-support-thread-für-das-russound-plugin - version: 1.7.2 # Plugin version - sh_minversion: '1.9.0' # minimum shNG version to use this plugin + version: 1.7.3 # Plugin version + sh_minversion: '1.10.0' # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) multi_instance: false # plugin supports multi instance restartable: true @@ -36,12 +36,12 @@ parameters: en: 'Russound port' fr: "Port de Russound" - standby_item: + pause_item: type: str default: '' description: - de: 'Item zum Aktivieren des Suspend-Modus' - en: 'item for activating suspend mode' + de: 'Item zum Anhalten/Fortsetzen des Plugins' + en: 'item for stopping/starting the plugin' item_attributes: rus_path: diff --git a/smartvisu/__init__.py b/smartvisu/__init__.py index c89069c3b..732eca49d 100755 --- a/smartvisu/__init__.py +++ b/smartvisu/__init__.py @@ -46,7 +46,7 @@ class SmartVisu(SmartPlugin): - PLUGIN_VERSION="1.8.14" + PLUGIN_VERSION="1.8.15" ALLOW_MULTIINSTANCE = True visu_definition = None diff --git a/smartvisu/plugin.yaml b/smartvisu/plugin.yaml index 2b0fbb9fa..bd6878872 100755 --- a/smartvisu/plugin.yaml +++ b/smartvisu/plugin.yaml @@ -12,7 +12,7 @@ plugin: #documentation: '' support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1586800-support-thread-für-das-smartvisu-plugin - version: 1.8.14 # Plugin version + version: 1.8.15 # Plugin version sh_minversion: '1.9.3.5' # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) py_minversion: '3.6' # minimum Python version to use for this plugin diff --git a/smartvisu/webif/__init__.py b/smartvisu/webif/__init__.py index 01c20f72c..798bf4303 100755 --- a/smartvisu/webif/__init__.py +++ b/smartvisu/webif/__init__.py @@ -105,6 +105,12 @@ def index(self, reload=None): client['hostname'] = clientinfo.get('hostname', '') client['browser'] = clientinfo.get('browser', '') client['browserversion'] = clientinfo.get('browserversion', '') + + client['osname'] = clientinfo.get('os_name', '') + client['osversion'] = clientinfo.get('os_vers', '') + client['osversionname'] = clientinfo.get('os_vname', '') + client['platformtype'] = clientinfo.get('pl_type', '') + client['platformvendor'] = clientinfo.get('pl_vendor', '') clients.append(client) clients_sorted = sorted(clients, key=lambda k: k['name']) @@ -168,6 +174,12 @@ def get_data_html(self, dataSet=None): value_dict['hostname'] = clientinfo.get('hostname', '') value_dict['browser'] = clientinfo.get('browser', '') value_dict['browserversion'] = clientinfo.get('browserversion', '') + + value_dict['osname'] = clientinfo.get('os_name', '') + value_dict['osversion'] = clientinfo.get('os_vers', '') + value_dict['osversionname'] = clientinfo.get('os_vname', '') + value_dict['platformtype'] = clientinfo.get('pl_type', '') + value_dict['platformvendor'] = clientinfo.get('pl_vendor', '') client_list.append(value_dict) plglogics = [] diff --git a/smartvisu/webif/templates/index.html b/smartvisu/webif/templates/index.html index ba957b0df..801f371a6 100755 --- a/smartvisu/webif/templates/index.html +++ b/smartvisu/webif/templates/index.html @@ -203,6 +203,14 @@ let hostname = clients[client]['hostname']; let browser = clients[client]['browser']; let browserversion = clients[client]['browserversion']; + + let osname = clients[client]['osname']; + let osversion = clients[client]['osversion']; + let osversionname = clients[client]['osversionname']; + let platformtype = clients[client]['platformtype']; + let platformvendor = clients[client]['platformvendor']; + + if (hostname === '') hostname = platformvendor + ' ' + platformtype + ' (' + osname + ' ' + osversion + ')' //let newRow = clienttable.row.add( [ null, name, ip, port, protocol, sw + ' ' + swversion, browser + ' ' + browserversion, hostname] ); clientdata.push([ null, name, ip, port, protocol, sw + ' ' + swversion, browser + ' ' + browserversion, hostname]); } diff --git a/stateengine/StateEngineAction.py b/stateengine/StateEngineAction.py index 36497db3e..e8f2392da 100755 --- a/stateengine/StateEngineAction.py +++ b/stateengine/StateEngineAction.py @@ -24,7 +24,6 @@ from . import StateEngineDefaults import datetime from lib.shtime import Shtime -from lib.item import Items import re @@ -68,7 +67,6 @@ def __init__(self, abitem, name: str): self._parent = self._abitem.id self._caller = StateEngineDefaults.plugin_identification self.shtime = Shtime.get_instance() - self.itemsApi = Items.get_instance() self._name = name self.__delay = StateEngineValue.SeValue(self._abitem, "delay") self.__repeat = None @@ -88,7 +86,7 @@ def __init__(self, abitem, name: str): def update_delay(self, value): _issue_list = [] - _, _, _issue = self.__delay.set(value) + _, _, _issue, _ = self.__delay.set(value) if _issue: _issue = {self._name: {'issue': _issue, 'attribute': 'delay', 'issueorigin': [{'state': 'unknown', 'action': self._function}]}} @@ -104,7 +102,7 @@ def update_delay(self, value): def update_instanteval(self, value): if self.__instanteval is None: self.__instanteval = StateEngineValue.SeValue(self._abitem, "instanteval", False, "bool") - _, _, _issue = self.__instanteval.set(value) + _, _, _issue, _ = self.__instanteval.set(value) _issue = {self._name: {'issue': _issue, 'attribute': 'instanteval', 'issueorigin': [{'state': 'unknown', 'action': self._function}]}} return _issue @@ -112,37 +110,37 @@ def update_instanteval(self, value): def update_repeat(self, value): if self.__repeat is None: self.__repeat = StateEngineValue.SeValue(self._abitem, "repeat", False, "bool") - _, _, _issue = self.__repeat.set(value) + _, _, _issue, _ = self.__repeat.set(value) _issue = {self._name: {'issue': _issue, 'attribute': 'repeat', 'issueorigin': [{'state': 'unknown', 'action': self._function}]}} return _issue def update_order(self, value): - _, _, _issue = self.__order.set(value) + _, _, _issue, _ = self.__order.set(value) _issue = {self._name: {'issue': _issue, 'attribute': 'order', 'issueorigin': [{'state': 'unknown', 'action': self._function}]}} return _issue def update_conditionset(self, value): - _, _, _issue = self.conditionset.set(value) + _, _, _issue, _ = self.conditionset.set(value) _issue = {self._name: {'issue': _issue, 'attribute': 'conditionset', 'issueorigin': [{'state': 'unknown', 'action': self._function}]}} return _issue def update_previousconditionset(self, value): - _, _, _issue = self.previousconditionset.set(value) + _, _, _issue, _ = self.previousconditionset.set(value) _issue = {self._name: {'issue': _issue, 'attribute': 'previousconditionset', 'issueorigin': [{'state': 'unknown', 'action': self._function}]}} return _issue def update_previousstate_conditionset(self, value): - _, _, _issue = self.previousstate_conditionset.set(value) + _, _, _issue, _ = self.previousstate_conditionset.set(value) _issue = {self._name: {'issue': _issue, 'attribute': 'previousstate_conditionset', 'issueorigin': [{'state': 'unknown', 'action': self._function}]}} return _issue def update_mode(self, value): - _value, _, _issue = self.__mode.set(value) + _value, _, _issue, _ = self.__mode.set(value) _issue = {self._name: {'issue': _issue, 'attribute': 'mode', 'issueorigin': [{'state': 'unknown', 'action': self._function}]}} return _value[0], _issue @@ -271,7 +269,7 @@ def check_getitem_fromeval(self, check_item, check_value=None, check_mindelta=No self._caller += '_self' #self._log_develop("Got item from eval on {} {}", self._function, check_item) else: - self._log_develop("Got no item from eval on {} with initial item {}", self._function, self.__item) + self._log_develop("Got no item from eval on {} with initial item {}", self._function, item) except Exception as ex: _issue = {self._name: {'issue': ex, 'issueorigin': [{'state': 'unknown', 'action': self._function}]}} # raise Exception("Problem evaluating item '{}' from eval: {}".format(check_item, ex)) @@ -288,10 +286,10 @@ def check_getitem_fromeval(self, check_item, check_value=None, check_mindelta=No 'issueorigin': [{'state': 'unknown', 'action': self._function}]}} return check_item, check_value, check_mindelta, _issue - def check_complete(self, item_state, check_item, check_status, check_mindelta, check_value, action_type, evals_items=None): + def check_complete(self, state, check_item, check_status, check_mindelta, check_value, action_type, evals_items=None, use=None): _issue = {self._name: {'issue': None, - 'issueorigin': [{'state': item_state.property.path, 'action': self._function}]}} - self._log_develop("Check item {} status {} value {} evals_items {}", check_item, check_status, check_value, evals_items) + 'issueorigin': [{'state': state.id, 'action': self._function}]}} + self._log_develop("Check item {} status {} value {} use {} evals_items {}", check_item, check_status, check_value, use, evals_items) try: _name = evals_items.get(self.name) if _name is not None: @@ -302,43 +300,40 @@ def check_complete(self, item_state, check_item, check_status, check_mindelta, c _eval = _eval if _eval not in (None, "None") else None check_item = _selfitem or _eval if check_item is None: - _returnitem, _returnissue = self._abitem.return_item(_item) - check_item = _returnitem + check_item, _returnissue = self._abitem.return_item(_item) else: _returnissue = None _issue = {self._name: {'issue': _returnissue, - 'issueorigin': [{'state': item_state.property.path, 'action': self._function}]}} - self._log_debug("Check item {} status {} value {} _returnissue {}", check_item, check_status, check_value, - _returnissue) + 'issueorigin': [{'state': state.id, 'action': self._function}]}} + self._log_debug("Check item {} status {} value {} _returnissue {}", check_item, check_status, + check_value, _returnissue) except Exception as ex: self._log_info("No valid item info for action {}, trying to get differently. Problem: {}", self._name, ex) # missing item in action: Try to find it. if check_item is None: - item = StateEngineTools.find_attribute(self._sh, item_state, "se_item_" + self._name) + item = StateEngineTools.find_attribute(self._sh, state, "se_item_" + self._name, 0, use) if item is not None: check_item, _issue = self._abitem.return_item(item) _issue = {self._name: {'issue': _issue, - 'issueorigin': [{'state': item_state.property.path, 'action': self._function}]}} + 'issueorigin': [{'state': state.id, 'action': self._function}]}} else: - item = StateEngineTools.find_attribute(self._sh, item_state, "se_eval_" + self._name) + item = StateEngineTools.find_attribute(self._sh, state, "se_eval_" + self._name, 0, use) if item is not None: check_item = str(item) if check_item is None and _issue[self._name].get('issue') is None: _issue = {self._name: {'issue': ['Item not defined in rules section'], - 'issueorigin': [{'state': item_state.property.path, 'action': self._function}]}} + 'issueorigin': [{'state': state.id, 'action': self._function}]}} # missing status in action: Try to find it. if check_status is None: - status = StateEngineTools.find_attribute(self._sh, item_state, "se_status_" + self._name) + status = StateEngineTools.find_attribute(self._sh, state, "se_status_" + self._name, 0, use) if status is not None: check_status, _issue = self._abitem.return_item(status) _issue = {self._name: {'issue': _issue, - 'issueorigin': [{'state': item_state.property.path, 'action': self._function}]}} - elif check_status is not None: - check_status = str(status) + 'issueorigin': [{'state': state.id, 'action': self._function}]}} if check_mindelta.is_empty(): - mindelta = StateEngineTools.find_attribute(self._sh, item_state, "se_mindelta_" + self._name) + mindelta = StateEngineTools.find_attribute(self._sh, state, "se_mindelta_" + self._name, 0, use) if mindelta is not None: check_mindelta.set(mindelta) @@ -361,7 +356,7 @@ def check_complete(self, item_state, check_item, check_status, check_mindelta, c self._log_develop("Issue with {} action {}", action_type, _issue) else: _issue = {self._name: {'issue': None, - 'issueorigin': [{'state': item_state.property.path, 'action': self._function}]}} + 'issueorigin': [{'state': state.id, 'action': self._function}]}} return check_item, check_status, check_mindelta, check_value, _issue @@ -401,7 +396,7 @@ def _check_condition(condition: str): if _matching: self._log_debug("Given {} {} matches current one: {}", condition, _orig_cond, _updated__current_condition) _condition_met.append(_updated__current_condition) - _conditions_met_count +=1 + _conditions_met_count += 1 else: self._log_debug("Given {} {} not matching current one: {}", condition, _orig_cond, _updated__current_condition) except Exception as ex: @@ -436,7 +431,7 @@ def _update_repeat_webif(value: bool): self._getitem_fromeval() self._log_decrease_indent() _validitem = True - except Exception as ex: + except Exception: _validitem = False self._log_decrease_indent() if not self._can_execute(state): @@ -446,13 +441,13 @@ def _update_repeat_webif(value: bool): condition_necessary = 0 current_condition_met, cur_conditions_met, cur_condition_necessary = _check_condition('conditionset') conditions_met += cur_conditions_met - condition_necessary += cur_condition_necessary + condition_necessary += min(1, cur_condition_necessary) previous_condition_met, prev_conditions_met, prev_condition_necessary = _check_condition('previousconditionset') conditions_met += prev_conditions_met - condition_necessary += prev_condition_necessary + condition_necessary += min(1, prev_condition_necessary) previousstate_condition_met, prevst_conditions_met, prevst_condition_necessary = _check_condition('previousstate_conditionset') conditions_met += prevst_conditions_met - condition_necessary += prevst_condition_necessary + condition_necessary += min(1, prevst_condition_necessary) self._log_develop("Action '{0}': conditions met: {1}, necessary {2}.", self._name, conditions_met, condition_necessary) if conditions_met < condition_necessary: self._log_info("Action '{0}': Skipping because not all conditions are met.", self._name) @@ -509,14 +504,14 @@ def _update_repeat_webif(value: bool): else: self._waitforexecute(state, actionname, self._name, repeat_text, delay, current_condition_met, previous_condition_met, previousstate_condition_met) - _update_delay_webif('actions_stay', _delay_info) - _update_delay_webif('actions_enter', _delay_info) - _update_delay_webif('actions_enter_or_stay', _delay_info) + _update_delay_webif('actions_stay', str(_delay_info)) + _update_delay_webif('actions_enter', str(_delay_info)) + _update_delay_webif('actions_enter_or_stay', str(_delay_info)) try: state.update_name(state.state_item) _key_name = ['{}'.format(state.id), 'name'] self._abitem.update_webif(_key_name, state.name) - _update_delay_webif('actions_leave', _delay_info) + _update_delay_webif('actions_leave', str(_delay_info)) except Exception: pass @@ -526,8 +521,8 @@ def update(self, value): raise NotImplementedError("Class {} doesn't implement update()".format(self.__class__.__name__)) # Complete action - # item_state: state item to read from - def complete(self, item_state, evals_items=None): + # state: state (item) to read from + def complete(self, state, evals_items=None, use=None): raise NotImplementedError("Class {} doesn't implement complete()".format(self.__class__.__name__)) # Check if execution is possible @@ -537,7 +532,14 @@ def _can_execute(self, state): def get(self): return True - def _waitforexecute(self, state, actionname: str, namevar: str = "", repeat_text: str = "", delay: int = 0, current_condition: str = "", previous_condition: str = "", previousstate_condition: str = ""): + def _waitforexecute(self, state, actionname: str, namevar: str = "", repeat_text: str = "", delay: int = 0, current_condition: list[str] = None, previous_condition: list[str] = None, previousstate_condition: list[str] = None): + if current_condition is None: + current_condition = [] + if previous_condition is None: + previous_condition = [] + if previousstate_condition is None: + previousstate_condition = [] + self._log_decrease_indent(50) self._log_increase_indent() if delay == 0: @@ -594,6 +596,7 @@ class SeActionSetItem(SeActionBase): def __init__(self, abitem, name: str): super().__init__(abitem, name) self.__item = None + self.__eval_item = None self.__status = None self.__delta = 0 self.__value = StateEngineValue.SeValue(self._abitem, "value") @@ -606,6 +609,7 @@ def __repr__(self): def _getitem_fromeval(self): if self.__item is None: return + self.__eval_item = self.__item self.__item, self.__value, self.__mindelta, _issue = self.check_getitem_fromeval(self.__item, self.__value, self.__mindelta) if self.__item is None: @@ -615,15 +619,15 @@ def _getitem_fromeval(self): # set the action based on a set_(action_name) attribute # value: Value of the set_(action_name) attribute def update(self, value): - _, _, _issue = self.__value.set(value) + _, _, _issue, _ = self.__value.set(value) _issue = {self._name: {'issue': _issue, 'issueorigin': [{'state': 'unknown', 'action': self._function}]}} return _issue # Complete action - # item_state: state item to read from - def complete(self, item_state, evals_items=None): + # state: state (item) to read from + def complete(self, state, evals_items=None, use=None): self.__item, self.__status, self.__mindelta, self.__value, _issue = self.check_complete( - item_state, self.__item, self.__status, self.__mindelta, self.__value, "set", evals_items) + state, self.__item, self.__status, self.__mindelta, self.__value, "set", evals_items, use) self._action_status = _issue return _issue @@ -643,8 +647,6 @@ def write_to_logger(self): self._log_debug("item: {0}", self.__item.property.path) else: self._log_debug("item is not defined! Check log file.") - if self.__status is not None: - self._log_debug("status: {0}", self.__status.property.path) self.__mindelta.write_to_logger() self.__value.write_to_logger() @@ -675,7 +677,7 @@ def real_execute(self, state, actionname: str, namevar: str = "", repeat_text: s if value is None: self._log_debug("{0}: Value is None", actionname) - pat = "(?:[^,\(]*)\'(.*?)\'" + pat = r"(?:[^,(]*)\'(.*?)\'" self.update_webif_actionstatus(state, re.findall(pat, actionname)[0], 'False', 'Value is None') return @@ -706,10 +708,11 @@ def _execute_set_add_remove(self, state, actionname, namevar, repeat_text, item, self._log_decrease_indent() self._log_debug("{0}: Set '{1}' to '{2}'{3}", actionname, item.property.path, value, repeat_text) source = self.set_source(current_condition, previous_condition, previousstate_condition) - pat = "(?:[^,\(]*)\'(.*?)\'" + pat = r"(?:[^,(]*)\'(.*?)\'" self.update_webif_actionstatus(state, re.findall(pat, actionname)[0], 'True') # noinspection PyCallingNonCallable item(value, caller=self._caller, source=source) + self.__item = self.__eval_item def get(self): orig_item = self.__item @@ -723,7 +726,7 @@ def get(self): item = str(self.__item.property.path) else: item = None - except Exception as ex: + except Exception: item = None try: val = self.__value.get() @@ -733,18 +736,19 @@ def get(self): value = None except Exception: value = None + self.__item = orig_item mindelta = self.__mindelta.get() if mindelta is None: result = {'function': str(self._function), 'item': item, 'item_from_eval': item_from_eval, - 'value': value, 'conditionset': str(self.conditionset.get()), - 'previousconditionset': str(self.previousconditionset.get()), - 'previousstate_conditionset': str(self.previousstate_conditionset.get()), 'actionstatus': {}} + 'value': value, 'conditionset': self.conditionset.get(), + 'previousconditionset': self.previousconditionset.get(), + 'previousstate_conditionset': self.previousstate_conditionset.get(), 'actionstatus': {}} else: result = {'function': str(self._function), 'item': item, 'item_from_eval': item_from_eval, - 'value': value, 'conditionset': str(self.conditionset.get()), - 'previousconditionset': str(self.previousconditionset.get()), - 'previousstate_conditionset': str(self.previousstate_conditionset.get()), 'actionstatus': {}, - 'delta': str(self.__delta), 'mindelta': str(mindelta)} + 'value': value, 'conditionset': self.conditionset.get(), + 'previousconditionset': self.previousconditionset.get(), + 'previousstate_conditionset': self.previousstate_conditionset.get(), 'actionstatus': {}, + 'delta': str(self.__delta), 'mindelta': str(mindelta)} return result @@ -770,8 +774,8 @@ def update(self, value): return _issue # Complete action - # item_state: state item to read from - def complete(self, item_state, evals_items=None): + # state: state (item) to read from + def complete(self, state, evals_items=None, use=None): self._scheduler_name = "{}-SeByAttrDelayTimer".format(self.__byattr) _issue = {self._name: {'issue': None, 'attribute': self.__byattr, 'issueorigin': [{'state': 'unknown', 'action': self._function}]}} @@ -791,14 +795,14 @@ def real_execute(self, state, actionname: str, namevar: str = "", repeat_text: s self._log_info("{0}: Setting values by attribute '{1}'.{2}", actionname, self.__byattr, repeat_text) self.update_webif_actionstatus(state, self._name, 'True') source = self.set_source(current_condition, previous_condition, previousstate_condition) - for item in self.itemsApi.find_items(self.__byattr): + for item in self._sh.find_items(self.__byattr): self._log_info("\t{0} = {1}", item.property.path, item.conf[self.__byattr]) item(item.conf[self.__byattr], caller=self._caller, source=source) def get(self): result = {'function': str(self._function), 'byattr': str(self.__byattr), - 'conditionset': str(self.conditionset.get()), 'previousconditionset': str(self.previousconditionset.get()), - 'previousstate_conditionset': str(self.previousstate_conditionset.get()), 'actionstatus': {}} + 'conditionset': self.conditionset.get(), 'previousconditionset': self.previousconditionset.get(), + 'previousstate_conditionset': self.previousstate_conditionset.get(), 'actionstatus': {}} return result @@ -822,14 +826,14 @@ def update(self, value): logic, value = StateEngineTools.partition_strip(value, ":") self.__logic = logic value = None if value == "" else value - _, _, _issue = self.__value.set(value) + _, _, _issue, _ = self.__value.set(value) _issue = {self._name: {'issue': _issue, 'logic': self.__logic, 'issueorigin': [{'state': 'unknown', 'action': self._function}]}} return _issue # Complete action - # item_state: state item to read from - def complete(self, item_state, evals_items=None): + # state: state (item) to read from + def complete(self, state, evals_items=None, use=None): self._scheduler_name = "{}-SeLogicDelayTimer".format(self.__logic) _issue = {self._name: {'issue': None, 'logic': self.__logic, 'issueorigin': [{'state': 'unknown', 'action': self._function}]}} @@ -869,11 +873,12 @@ def get(self): except Exception: value = None result = {'function': str(self._function), 'logic': str(self.__logic), - 'value': value, - 'conditionset': str(self.conditionset.get()), 'previousconditionset': str(self.previousconditionset.get()), - 'previousstate_conditionset': str(self.previousstate_conditionset.get()), 'actionstatus': {}} + 'value': value, + 'conditionset': self.conditionset.get(), 'previousconditionset': self.previousconditionset.get(), + 'previousstate_conditionset': self.previousstate_conditionset.get(), 'actionstatus': {}} return result + # Class representing a single "se_run" action class SeActionRun(SeActionBase): # Initialize the action @@ -902,8 +907,8 @@ def update(self, value): return _issue # Complete action - # item_state: state item to read from - def complete(self, item_state, evals_items=None): + # state: state (item) to read from + def complete(self, state, evals_items=None, use=None): self._scheduler_name = "{}-SeRunDelayTimer".format(StateEngineTools.get_eval_name(self.__eval)) _issue = {self._name: {'issue': None, 'eval': StateEngineTools.get_eval_name(self.__eval), 'issueorigin': [{'state': 'unknown', 'action': self._function}]}} @@ -943,7 +948,7 @@ def real_execute(self, state, actionname: str, namevar: str = "", repeat_text: s self._log_decrease_indent() text = "{0}: Problem evaluating '{1}': {2}." self.update_webif_actionstatus(state, self._name, 'False', 'Problem evaluating: {}'.format(ex)) - self._log_error(text.format(actionname, StateEngineTools.get_eval_name(self.__eval), ex)) + self._log_error(text, actionname, StateEngineTools.get_eval_name(self.__eval), ex) else: try: if returnvalue: @@ -962,12 +967,12 @@ def real_execute(self, state, actionname: str, namevar: str = "", repeat_text: s self._log_decrease_indent() self.update_webif_actionstatus(state, self._name, 'False', 'Problem calling: {}'.format(ex)) text = "{0}: Problem calling '{0}': {1}." - self._log_error(text.format(actionname, StateEngineTools.get_eval_name(self.__eval), ex)) + self._log_error(text, actionname, StateEngineTools.get_eval_name(self.__eval), ex) def get(self): result = {'function': str(self._function), 'eval': str(self.__eval), - 'conditionset': str(self.conditionset.get()), 'previousconditionset': str(self.previousconditionset.get()), - 'previousstate_conditionset': str(self.previousstate_conditionset.get()), 'actionstatus': {}} + 'conditionset': self.conditionset.get(), 'previousconditionset': self.previousconditionset.get(), + 'previousstate_conditionset': self.previousstate_conditionset.get(), 'actionstatus': {}} return result @@ -979,8 +984,10 @@ class SeActionForceItem(SeActionBase): def __init__(self, abitem, name: str): super().__init__(abitem, name) self.__item = None + self.__eval_item = None self.__status = None self.__value = StateEngineValue.SeValue(self._abitem, "value") + self.__delta = 0 self.__mindelta = StateEngineValue.SeValue(self._abitem, "mindelta") self._function = "force set" @@ -990,15 +997,15 @@ def __repr__(self): # set the action based on a set_(action_name) attribute # value: Value of the set_(action_name) attribute def update(self, value): - _, _, _issue = self.__value.set(value) + _, _, _issue, _ = self.__value.set(value) _issue = {self._name: {'issue': _issue, 'issueorigin': [{'state': 'unknown', 'action': self._function}]}} return _issue # Complete action - # item_state: state item to read from - def complete(self, item_state, evals_items=None): + # state: state (item) to read from + def complete(self, state, evals_items=None, use=None): self.__item, self.__status, self.__mindelta, self.__value, _issue = self.check_complete( - item_state, self.__item, self.__status, self.__mindelta, self.__value, "force", evals_items) + state, self.__item, self.__status, self.__mindelta, self.__value, "force", evals_items, use) self._action_status = _issue return _issue @@ -1045,6 +1052,7 @@ def _can_execute(self, state): def _getitem_fromeval(self): if self.__item is None: return + self.__eval_item = self.__item self.__item, self.__value, self.__mindelta, _issue = self.check_getitem_fromeval(self.__item, self.__value, self.__mindelta) if self.__item is None: @@ -1061,7 +1069,8 @@ def real_execute(self, state, actionname: str, namevar: str = "", repeat_text: s if value is None: self._log_debug("{0}: Value is None", actionname) - self.update_webif_actionstatus(state, self._name, 'False', 'Value is None') + pat = r"(?:[^,(]*)\'(.*?)\'" + self.update_webif_actionstatus(state, re.findall(pat, actionname)[0], 'False', 'Value is None') return if returnvalue: @@ -1070,12 +1079,19 @@ def real_execute(self, state, actionname: str, namevar: str = "", repeat_text: s if not self.__mindelta.is_empty(): mindelta = self.__mindelta.get() - # noinspection PyCallingNonCallable - delta = float(abs(self.__item() - value)) + if self.__status is not None: + # noinspection PyCallingNonCallable + delta = float(abs(self.__status() - value)) + additionaltext = "of statusitem " + else: + delta = float(abs(self.__item() - value)) + additionaltext = "" + + self.__delta = delta if delta < mindelta: + text = "{0}: Not setting '{1}' to '{2}' because delta {3}'{4:.2}' is lower than mindelta '{5}'" + self._log_debug(text, actionname, self.__item.property.path, value, additionaltext, delta, mindelta) self.update_webif_actionstatus(state, self._name, 'False') - text = "{0}: Not setting '{1}' to '{2}' because delta '{3:.2}' is lower than mindelta '{4}'" - self._log_debug(text, actionname, self.__item.property.path, value, delta, mindelta) return source = self.set_source(current_condition, previous_condition, previousstate_condition) # Set to different value first ("force") @@ -1103,6 +1119,7 @@ def real_execute(self, state, actionname: str, namevar: str = "", repeat_text: s self.update_webif_actionstatus(state, self._name, 'True') # noinspection PyCallingNonCallable self.__item(value, caller=self._caller, source=source) + self.__item = self.__eval_item def get(self): orig_item = self.__item @@ -1126,9 +1143,19 @@ def get(self): value = None except Exception: value = None - result = {'function': str(self._function), 'item': item, 'item_from_eval': item_from_eval, 'value': value, - 'conditionset': str(self.conditionset.get()), 'previousconditionset': str(self.previousconditionset.get()), - 'previousstate_conditionset': str(self.previousstate_conditionset.get()), 'actionstatus': {}} + self.__item = orig_item + mindelta = self.__mindelta.get() + if mindelta is None: + result = {'function': str(self._function), 'item': item, 'item_from_eval': item_from_eval, + 'value': value, 'conditionset': self.conditionset.get(), + 'previousconditionset': self.previousconditionset.get(), + 'previousstate_conditionset': self.previousstate_conditionset.get(), 'actionstatus': {}} + else: + result = {'function': str(self._function), 'item': item, 'item_from_eval': item_from_eval, + 'value': value, 'conditionset': self.conditionset.get(), + 'previousconditionset': self.previousconditionset.get(), + 'previousstate_conditionset': self.previousstate_conditionset.get(), 'actionstatus': {}, + 'delta': str(self.__delta), 'mindelta': str(mindelta)} return result @@ -1161,8 +1188,8 @@ def update(self, value): return _issue # Complete action - # item_state: state item to read from - def complete(self, item_state, evals_items=None): + # state: state (item) to read from + def complete(self, state, evals_items=None, use=None): if isinstance(self.__value, list): item = self.__value[0].property.path else: @@ -1308,9 +1335,9 @@ def get(self): except Exception: pass result = {'function': str(self._function), 'special': str(self.__special), - 'value': str(value_result), 'conditionset': str(self.conditionset.get()), - 'previousconditionset': str(self.previousconditionset.get()), - 'previousstate_conditionset': str(self.previousstate_conditionset.get()), 'actionstatus': {}} + 'value': str(value_result), 'conditionset': self.conditionset.get(), + 'previousconditionset': self.previousconditionset.get(), + 'previousstate_conditionset': self.previousstate_conditionset.get(), 'actionstatus': {}} return result @@ -1356,9 +1383,9 @@ def get(self): except Exception: value = None result = {'function': str(self._function), 'item': item, - 'value': value, 'conditionset': str(self.conditionset.get()), - 'previousconditionset': str(self.previousconditionset.get()), - 'previousstate_conditionset': str(self.previousstate_conditionset.get()), 'actionstatus': {}} + 'value': value, 'conditionset': self.conditionset.get(), + 'previousconditionset': self.previousconditionset.get(), + 'previousstate_conditionset': self.previousstate_conditionset.get(), 'actionstatus': {}} return result @@ -1410,9 +1437,9 @@ def get(self): except Exception: value = None result = {'function': str(self._function), 'item': item, - 'value': value, 'conditionset': str(self.conditionset.get()), - 'previousconditionset': str(self.previousconditionset.get()), - 'previousstate_conditionset': str(self.previousstate_conditionset.get()), 'actionstatus': {}} + 'value': value, 'conditionset': self.conditionset.get(), + 'previousconditionset': self.previousconditionset.get(), + 'previousstate_conditionset': self.previousstate_conditionset.get(), 'actionstatus': {}} return result @@ -1466,9 +1493,9 @@ def get(self): except Exception: value = None result = {'function': str(self._function), 'item': item, - 'value': value, 'conditionset': str(self.conditionset.get()), - 'previousconditionset': str(self.previousconditionset.get()), - 'previousstate_conditionset': str(self.previousstate_conditionset.get()), 'actionstatus': {}} + 'value': value, 'conditionset': self.conditionset.get(), + 'previousconditionset': self.previousconditionset.get(), + 'previousstate_conditionset': self.previousstate_conditionset.get(), 'actionstatus': {}} return result @@ -1520,7 +1547,7 @@ def get(self): except Exception: value = None result = {'function': str(self._function), 'item': item, - 'value': value, 'conditionset': str(self.conditionset.get()), - 'previousconditionset': str(self.previousconditionset.get()), - 'previousstate_conditionset': str(self.previousstate_conditionset.get()), 'actionstatus': {}} + 'value': value, 'conditionset': self.conditionset.get(), + 'previousconditionset': self.previousconditionset.get(), + 'previousstate_conditionset': self.previousstate_conditionset.get(), 'actionstatus': {}} return result diff --git a/stateengine/StateEngineActions.py b/stateengine/StateEngineActions.py index 6aa9976f2..89bef33bd 100755 --- a/stateengine/StateEngineActions.py +++ b/stateengine/StateEngineActions.py @@ -21,7 +21,6 @@ from . import StateEngineAction from . import StateEngineTools -import ast import threading import queue @@ -49,16 +48,16 @@ def __init__(self, abitem): def __repr__(self): return "SeActions, count {}".format(self.count()) - def dict_actions(self, type, state): + def dict_actions(self, action_type, state): result = {} for name in self.__actions: - self._abitem._initactionname = name + self._abitem.initactionname = name result.update({name: self.__actions[name].get()}) try: - result[name].update({'actionstatus': self._abitem.webif_infos[state][type][name].get('actionstatus')}) + result[name].update({'actionstatus': self._abitem.webif_infos[state][action_type][name].get('actionstatus')}) except Exception: pass - self._abitem._initactionname = None + self._abitem.initactionname = None return result def reset(self): @@ -77,6 +76,14 @@ def update(self, attribute, value): _count = 0 _issue = None try: + if func == "se_action": # and name not in self.__actions: + _issue = self.__handle_combined_action_attribute(name, value) + _count += 1 + return _count, _issue + elif isinstance(value, str): + value = ":".join(map(str.strip, value.split(":"))) + if value[:1] == '[' and value[-1:] == ']': + value = StateEngineTools.convert_str_to_list(value, False) if func == "se_delay": # set delay if name not in self.__actions: @@ -149,9 +156,6 @@ def update(self, attribute, value): else: _issue = self.__actions[name].update_order(value) return _count, _issue - elif func == "se_action": # and name not in self.__actions: - _issue = self.__handle_combined_action_attribute(name, value) - _count += 1 else: _issue_list = [] _ensure_action, _issue = self.__ensure_action_exists(func, name) @@ -165,9 +169,9 @@ def update(self, attribute, value): _count += 1 _issue = StateEngineTools.flatten_list(_issue_list) except ValueError as ex: + _issue = {name: {'issue': ex, 'issueorigin': [{'state': 'unknown', 'action': self.__actions[name].function}], 'ignore': True}} if name in self.__actions: del self.__actions[name] - _issue = {name: {'issue': ex, 'issueorigin': [{'state': 'unknown', 'action': self.__actions[name].function}]}} self._log_warning("Ignoring action {0} because: {1}", attribute, ex) return _count, _issue @@ -194,13 +198,14 @@ def __check_force_setting(self, name, value, function): "because parameter 'force' is 'False'!", name) _returnfunction = "set" return _issue, _returnfunction + def __check_mode_setting(self, name, value, function, action): if value is not None: possible_mode_list = ['first', 'last', 'all'] _issue = None # Parameter mode is supported only for type "remove" - if not "remove" in function: - _issue = {name: {'issue': ['Parameter mode not supported for this function'], 'attribute': 'mode', + if "remove" not in function: + _issue = {name: {'issue': ['Parameter mode only supported for remove function'], 'attribute': 'mode', 'issueorigin': [{'state': 'unknown', 'action': function}]}} self._log_warning("Attribute 'se_action_{0}': Parameter 'mode' not supported for function '{1}'", name, function) @@ -312,39 +317,54 @@ def __ensure_action_exists(self, func, name): return True, _issue_list def __handle_combined_action_attribute(self, name, value_list): + def remove_action(ex): + if name in self.__actions: + del self.__actions[name] + _issue = {name: {'issue': [ex], 'issueorigin': [{'state': 'unknown', 'action': parameter['function']}], 'ignore': True}} + _issue_list.append(_issue) + self._log_warning("Ignoring action {0} because: {1}", name, ex) + + parameter = {'function': None, 'force': None, 'repeat': None, 'delay': 0, 'order': None, 'conditionset': None, + 'previousconditionset': None, 'previousstate_conditionset': None, 'mode': None, 'instanteval': None} + _issue = None + _issue_list = [] # value_list needs to be string or list if isinstance(value_list, str): value_list = [value_list, ] elif not isinstance(value_list, list): - raise ValueError("Attribute 'se_action_{0}': Value must be a string or a list!".format(name)) + remove_action("Value must be a string or a list!") + return _issue_list # parse parameters - parameter = {'function': None, 'force': None, 'repeat': None, 'delay': 0, 'order': None, 'conditionset': None, - 'previousconditionset': None, 'previousstate_conditionset': None, 'mode': None, 'instanteval': None} for entry in value_list: - if isinstance(entry, dict): - entry = list("{!s}:{!s}".format(k, v) for (k, v) in entry.items())[0] - key, val = StateEngineTools.partition_strip(entry, ":") - val = ":".join(map(str.strip, val.split(":"))) - if val[:1] == '[' and val[-1:] == ']': - val = ast.literal_eval(val) - if key == "function": - parameter[key] = StateEngineTools.cast_str(val) - elif key == "force": - parameter[key] = StateEngineTools.cast_bool(val) - else: - parameter[key] = val + try: + if isinstance(entry, dict): + entry = list("{!s}:{!s}".format(k, v) for (k, v) in entry.items())[0] + key, val = StateEngineTools.partition_strip(entry, ":") + val = ":".join(map(str.strip, val.split(":"))) + if val[:1] == '[' and val[-1:] == ']': + val = StateEngineTools.convert_str_to_list(val, False) + if key == "function": + parameter[key] = StateEngineTools.cast_str(val) + elif key == "force": + parameter[key] = StateEngineTools.cast_bool(val) + else: + parameter[key] = val + except Exception as ex: + remove_action("Problem with entry {} for action {}: {}".format(entry, name, ex)) + if _issue_list: + return _issue_list parameter['action'] = name - _issue_list = [] + # function given and valid? if parameter['function'] is None: - raise ValueError("Attribute 'se_action_{0}: Parameter 'function' must be set!".format(name)) + remove_action("Attribute 'se_action_{0}: Parameter 'function' must be set!".format(name)) + return _issue_list if parameter['function'] not in ('set', 'force', 'run', 'byattr', 'trigger', 'special', 'add', 'remove', 'removeall', 'removefirst', 'removelast'): - raise ValueError("Attribute 'se_action_{0}: Invalid value '{1}' for parameter " - "'function'!".format(name, parameter['function'])) + remove_action("Attribute 'se_action_{0}: Invalid value '{1}' for parameter 'function'!".format(name, parameter['function'])) + return _issue_list - _issue = None _issue, parameter['function'] = self.__check_force_setting(name, parameter['force'], parameter['function']) if _issue: _issue_list.append(_issue) @@ -354,7 +374,6 @@ def __handle_combined_action_attribute(self, name, value_list): if _action: self.__actions[name] = _action # create action based on function - exists = False try: if parameter['function'] == "set": _action_exists, _issue = self.__ensure_action_exists("se_set", name) @@ -363,7 +382,6 @@ def __handle_combined_action_attribute(self, name, value_list): if _action_exists: self.__raise_missing_parameter_error(parameter, 'to') self.__actions[name].update(parameter['to']) - exists = True elif parameter['function'] == "force": _action_exists, _issue = self.__ensure_action_exists("se_force", name) if _issue: @@ -371,7 +389,6 @@ def __handle_combined_action_attribute(self, name, value_list): if _action_exists: self.__raise_missing_parameter_error(parameter, 'to') self.__actions[name].update(parameter['to']) - exists = True elif parameter['function'] == "run": _action_exists, _issue = self.__ensure_action_exists("se_run", name) if _issue: @@ -379,7 +396,6 @@ def __handle_combined_action_attribute(self, name, value_list): if _action_exists: self.__raise_missing_parameter_error(parameter, 'eval') self.__actions[name].update(parameter['eval']) - exists = True elif parameter['function'] == "byattr": _action_exists, _issue = self.__ensure_action_exists("se_byattr", name) if _issue: @@ -387,7 +403,6 @@ def __handle_combined_action_attribute(self, name, value_list): if _action_exists: self.__raise_missing_parameter_error(parameter, 'attribute') self.__actions[name].update(parameter['attribute']) - exists = True elif parameter['function'] == "trigger": _action_exists, _issue = self.__ensure_action_exists("se_trigger", name) if _issue: @@ -398,7 +413,6 @@ def __handle_combined_action_attribute(self, name, value_list): self.__actions[name].update(parameter['logic'] + ':' + parameter['value']) else: self.__actions[name].update(parameter['logic']) - exists = True elif parameter['function'] == "special": _action_exists, _issue = self.__ensure_action_exists("se_special", name) if _issue: @@ -406,7 +420,6 @@ def __handle_combined_action_attribute(self, name, value_list): if _action_exists: self.__raise_missing_parameter_error(parameter, 'value') self.__actions[name].update(parameter['value']) - exists = True elif parameter['function'] == "add": _action_exists, _issue = self.__ensure_action_exists("se_add", name) if _issue: @@ -414,7 +427,6 @@ def __handle_combined_action_attribute(self, name, value_list): if _action_exists: self.__raise_missing_parameter_error(parameter, 'value') self.__actions[name].update(parameter['value']) - exists = True elif parameter['function'] == "remove": _action_exists, _issue = self.__ensure_action_exists("se_remove", name) if _issue: @@ -422,7 +434,6 @@ def __handle_combined_action_attribute(self, name, value_list): if _action_exists: self.__raise_missing_parameter_error(parameter, 'value') self.__actions[name].update(parameter['value']) - exists = True elif parameter['function'] == "removeall": _action_exists, _issue = self.__ensure_action_exists("se_removeall", name) if _issue: @@ -430,7 +441,6 @@ def __handle_combined_action_attribute(self, name, value_list): if _action_exists: self.__raise_missing_parameter_error(parameter, 'value') self.__actions[name].update(parameter['value']) - exists = True elif parameter['function'] == "removefirst": _action_exists, _issue = self.__ensure_action_exists("se_removefirst", name) if _issue: @@ -438,7 +448,6 @@ def __handle_combined_action_attribute(self, name, value_list): if _action_exists: self.__raise_missing_parameter_error(parameter, 'value') self.__actions[name].update(parameter['value']) - exists = True elif parameter['function'] == "removelast": _action_exists, _issue = self.__ensure_action_exists("se_removelast", name) if _issue: @@ -446,55 +455,49 @@ def __handle_combined_action_attribute(self, name, value_list): if _action_exists: self.__raise_missing_parameter_error(parameter, 'value') self.__actions[name].update(parameter['value']) - exists = True except ValueError as ex: - exists = False - if name in self.__actions: - del self.__actions[name] - _issue = {name: {'issue': ex, 'issueorigin': [{'state': 'unknown', 'action': parameter['function']}]}} - _issue_list.append(_issue) - self._log_warning("Ignoring action {0} because: {1}", name, ex) + remove_action(ex) + return _issue_list # add additional parameters - if exists: - if parameter['instanteval'] is not None: - _issue = self.__actions[name].update_instanteval(parameter['instanteval']) - if _issue: - _issue_list.append(_issue) - if parameter['repeat'] is not None: - _issue = self.__actions[name].update_repeat(parameter['repeat']) - if _issue: - _issue_list.append(_issue) - if parameter['delay'] != 0: - _issue = self.__actions[name].update_delay(parameter['delay']) - if _issue: - _issue_list.append(_issue) - if parameter['order'] is not None: - _issue = self.__actions[name].update_order(parameter['order']) - if _issue: - _issue_list.append(_issue) - if parameter['conditionset'] is not None: - _issue = self.__actions[name].update_conditionset(parameter['conditionset']) - if _issue: - _issue_list.append(_issue) - if parameter['previousconditionset'] is not None: - _issue = self.__actions[name].update_previousconditionset(parameter['previousconditionset']) - if _issue: - _issue_list.append(_issue) - if parameter['previousstate_conditionset'] is not None: - _issue = self.__actions[name].update_previousstate_conditionset(parameter['previousstate_conditionset']) - if _issue: - _issue_list.append(_issue) - if parameter['mode'] is not None: - _val, _issue = self.__actions[name].update_mode(parameter['mode']) - if _issue: - _issue_list.append(_issue) - _issue, _action = self.__check_mode_setting(name, _val, parameter['function'], self.__actions[name]) - if _issue: - _issue_list.append(_issue) - if _action: - self.__actions[name] = _action + if parameter['instanteval'] is not None: + _issue = self.__actions[name].update_instanteval(parameter['instanteval']) + if _issue: + _issue_list.append(_issue) + if parameter['repeat'] is not None: + _issue = self.__actions[name].update_repeat(parameter['repeat']) + if _issue: + _issue_list.append(_issue) + if parameter['delay'] != 0: + _issue = self.__actions[name].update_delay(parameter['delay']) + if _issue: + _issue_list.append(_issue) + if parameter['order'] is not None: + _issue = self.__actions[name].update_order(parameter['order']) + if _issue: + _issue_list.append(_issue) + if parameter['conditionset'] is not None: + _issue = self.__actions[name].update_conditionset(parameter['conditionset']) + if _issue: + _issue_list.append(_issue) + if parameter['previousconditionset'] is not None: + _issue = self.__actions[name].update_previousconditionset(parameter['previousconditionset']) + if _issue: + _issue_list.append(_issue) + if parameter['previousstate_conditionset'] is not None: + _issue = self.__actions[name].update_previousstate_conditionset(parameter['previousstate_conditionset']) + if _issue: + _issue_list.append(_issue) + if parameter['mode'] is not None: + _val, _issue = self.__actions[name].update_mode(parameter['mode']) + if _issue: + _issue_list.append(_issue) + _issue, _action = self.__check_mode_setting(name, _val, parameter['function'], self.__actions[name]) + if _issue: + _issue_list.append(_issue) + if _action: + self.__actions[name] = _action return _issue_list # noinspection PyMethodMayBeStatic @@ -504,15 +507,17 @@ def __raise_missing_parameter_error(self, parameter, param_name): "function '{2}'!".format(parameter['action'], param_name, parameter['function'])) # Check the actions optimize and complete them - # item_state: item to read from - def complete(self, item_state, evals_items=None): + # state: state (item) to read from + def complete(self, state, evals_items=None, use=None): _status = {} + if use is None: + use = state.use.get() for name in self.__actions: try: - _status.update(self.__actions[name].complete(item_state, evals_items)) + _status.update(self.__actions[name].complete(state, evals_items, use)) except ValueError as ex: - _status.update({name: {'issue': ex, 'issueorigin': {'state': item_state.property.path, 'action': 'unknown'}}}) - raise ValueError("State '{0}', Action '{1}': {2}".format(item_state.property.path, name, ex)) + _status.update({name: {'issue': ex, 'issueorigin': {'state': state.id, 'action': 'unknown'}}}) + raise ValueError("State '{0}', Action '{1}': {2}".format(state.id, name, ex)) return _status def set(self, value): @@ -573,7 +578,7 @@ def write_to_logger(self): # noinspection PyProtectedMember self._log_info("Action '{0}':", action.name) self._log_increase_indent() - self._abitem._initactionname = action.name + self._abitem.initactionname = action.name action.write_to_logger() - self._abitem._initactionname = None + self._abitem.initactionname = None self._log_decrease_indent() diff --git a/stateengine/StateEngineCliCommands.py b/stateengine/StateEngineCliCommands.py index 0c82d6c78..e3eb363d5 100755 --- a/stateengine/StateEngineCliCommands.py +++ b/stateengine/StateEngineCliCommands.py @@ -23,7 +23,6 @@ # noinspection PyUnresolvedReferences from lib.model.smartplugin import SmartPlugin from lib.plugin import Plugins -from bin.smarthome import VERSION class SeCliCommands: @@ -42,7 +41,8 @@ def __init__(self, smarthome, items, logger): self.logger.info("StateEngine: Additional CLI commands not registered because CLI plugin is too old") else: cli.commands.add_command("se_list", self.cli_list, "StateEngine", "se_list: list StateEngine items") - cli.commands.add_command("se_detail", self.cli_detail, "StateEngine", "se_detail [seItem]: show details on StateEngine item [seItem]") + cli.commands.add_command("se_detail", self.cli_detail, "StateEngine", + "se_detail [seItem]: show details on StateEngine item [seItem]") self.logger.info("StateEngine: Two additional CLI commands registered") except AttributeError as err: self.logger.error("StateEngine: Additional CLI commands not registered because error occured.") diff --git a/stateengine/StateEngineCondition.py b/stateengine/StateEngineCondition.py index ce3dd0e75..abfe71745 100755 --- a/stateengine/StateEngineCondition.py +++ b/stateengine/StateEngineCondition.py @@ -63,17 +63,24 @@ def __init__(self, abitem, name: str): def __repr__(self): return "SeCondition 'item': {}, 'status': {}, 'eval': {}, " \ - "'status_eval': {}, 'value': {}".format(self.__item, self.__status, self.__eval, self.__status_eval, self.__value) + "'status_eval': {}, 'value': {}".format(self.__item, self.__status, self.__eval, + self.__status_eval, self.__value) - def check_items(self, check, value=None, item_state=None): + def check_items(self, check, value=None, state=None, use=None): item_issue, status_issue, eval_issue, status_eval_issue = None, None, None, None item_value, status_value, eval_value, status_eval_value = None, None, None, None + if state and use is None: + use = state.use.get() + if check == "attribute": + _orig_value = value + else: + _orig_value = None if check == "se_item" or (check == "attribute" and self.__item is None and self.__eval is None): if value is None: - value = StateEngineTools.find_attribute(self._sh, item_state, "se_item_" + self.__name) - if value is not None: + value = StateEngineTools.find_attribute(self._sh, state, "se_item_" + self.__name, 0, use) + if isinstance(value, str): match = re.match(r'^(.*):', value) - if isinstance(value, str) and value.startswith("eval:"): + if value.startswith("eval:"): _, _, value = value.partition(":") self.__eval = value self.__item = None @@ -87,9 +94,11 @@ def check_items(self, check, value=None, item_state=None): self.__item = value item_value = value if check == "se_status" or (check == "attribute" and self.__status is None and self.__status_eval is None): + if check == "attribute": + value = _orig_value if value is None: - value = StateEngineTools.find_attribute(self._sh, item_state, "se_status_" + self.__name) - if value is not None: + value = StateEngineTools.find_attribute(self._sh, state, "se_status_" + self.__name, 0, use) + if isinstance(value, str): match = re.match(r'^(.*):', value) if isinstance(value, str) and value.startswith("eval:"): _, _, value = value.partition(":") @@ -107,8 +116,8 @@ def check_items(self, check, value=None, item_state=None): status_value = value if check == "se_eval" or (check == "attribute" and self.__eval is None): if value is None: - value = StateEngineTools.find_attribute(self._sh, item_state, "se_eval_" + self.__name) - if value is not None: + value = StateEngineTools.find_attribute(self._sh, state, "se_eval_" + self.__name, 0, use) + if isinstance(value, str): match = re.match(r'^(.*):', value) if value.startswith("eval:"): _, _, value = value.partition("eval:") @@ -120,9 +129,11 @@ def check_items(self, check, value=None, item_state=None): self.__eval = value eval_value = value if check == "se_status_eval" or (check == "attribute" and self.__status_eval is None): + if check == "attribute": + value = _orig_value if value is None: - value = StateEngineTools.find_attribute(self._sh, item_state, "se_status_eval_" + self.__name) - if value is not None: + value = StateEngineTools.find_attribute(self._sh, state, "se_status_eval_" + self.__name, 0, use) + if isinstance(value, str): match = re.match(r'^(.*):', value) if value.startswith("eval:"): _, _, value = value.partition("eval:") @@ -214,9 +225,9 @@ def get(self): return result # Complete condition (do some checks, cast value, min and max based on item or eval data types) - # item_state: item to read from + # state: state (item) to read from # abitem_object: Related SeItem instance for later determination of current age and current delay - def complete(self, item_state): + def complete(self, state, use): # check if it is possible to complete this condition if self.__min.is_empty() and self.__max.is_empty() and self.__value.is_empty() \ and self.__agemin.is_empty() and self.__agemax.is_empty() \ @@ -275,8 +286,7 @@ def complete(self, item_state): self.__eval = self._abitem.get_update_original_caller elif self.__name == "original_source": self.__eval = self._abitem.get_update_original_source - - self.check_items("attribute", None, item_state) + self.check_items("attribute", None, state, use) # now we should have either 'item' or '(status)eval' set. If not, raise ValueError if all(item is None for item in [self.__item, self.__status, self.__eval, self.__status_eval]): @@ -301,8 +311,9 @@ def complete(self, item_state): elif self.__name in ("weekday", "sun_azimut", "sun_altitude", "age", "delay", "random", "month"): self.__cast_all(StateEngineTools.cast_num) elif self.__name in ( - "laststate", "laststate_id", "laststate_name", "lastconditionset", "lastconditionset_id", "lastconditionset_name", - "previousstate", "previousstate_name", "previousstate_id", "previousconditionset", "previousconditionset_id", "previousconditionset_name", + "laststate", "laststate_id", "laststate_name", "lastconditionset", "lastconditionset_id", + "lastconditionset_name", "previousstate", "previousstate_name", "previousstate_id", + "previousconditionset", "previousconditionset_id", "previousconditionset_name", "previousstate_conditionset", "previousstate_conditionset_id", "previousstate_conditionset_name", "trigger_item", "trigger_caller", "trigger_source", "trigger_dest", "original_item", "original_caller", "original_source"): @@ -430,12 +441,8 @@ def __convert(convert_value, convert_current): self._log_develop("Ignoring value None for conversion") return convert_value, convert_current _oldvalue = convert_value - try: - if isinstance(convert_value, re._pattern_type): - return convert_value, convert_current - except Exception: - if isinstance(convert_value, re.Pattern): - return convert_value, convert_current + if isinstance(convert_value, re.Pattern): + return convert_value, convert_current if isinstance(convert_current, bool): self.__value.set_cast(StateEngineTools.cast_bool) convert_value = StateEngineTools.cast_bool(convert_value) @@ -454,7 +461,7 @@ def __convert(convert_value, convert_current): else: self.__value.set_cast(StateEngineTools.cast_str) convert_value = StateEngineTools.cast_str(convert_value) - convert_current = StateEngineTools.cast_str(convert_value) + convert_current = StateEngineTools.cast_str(convert_current) if not type(_oldvalue) == type(convert_value): self._log_debug("Value {} was type {} and therefore not the same" " type as item value {}. It got converted to {}.", @@ -469,11 +476,15 @@ def __convert(convert_value, convert_current): self.__updatedbynegate if valuetype == "updatedby" else\ self.__triggeredbynegate if valuetype == "triggeredby" else\ self.__negate - + _key_current = ['{}'.format(state.id), 'conditionsets', '{}'.format( + self._abitem.get_variable('current.conditionset_name')), '{}'.format(self.__name), + 'current', '{}'.format(valuetype)] + _key_match = ['{}'.format(state.id), 'conditionsets', '{}'.format( + self._abitem.get_variable('current.conditionset_name')), '{}'.format(self.__name), + 'match', '{}'.format(valuetype)] if isinstance(value, list): text = "Condition '{0}': {1}={2} negate={3} current={4}" - _key = ['{}'.format(state.id), 'conditionsets', '{}'.format(self._abitem.get_variable('current.conditionset_name')), '{}'.format(self.__name), 'current', '{}'.format(valuetype)] - self._abitem.update_webif(_key, str(current)) + self._abitem.update_webif(_key_current, str(current)) self._log_info(text, self.__name, valuetype, value, negate, current) self._log_increase_indent() for i, element in enumerate(value): @@ -481,40 +492,31 @@ def __convert(convert_value, convert_current): regex_check = False if valuetype == "value" and type(element) != type(current) and current is not None: element, current = __convert(element, current) - try: - if isinstance(element, re._pattern_type): - regex_result = element.fullmatch(str(current)) - regex_check = True - except Exception: - if isinstance(element, re.Pattern): - regex_result = element.fullmatch(str(current)) - regex_check = True + if isinstance(element, re.Pattern): + regex_result = element.fullmatch(str(current)) + regex_check = True if negate: if (regex_result is not None and regex_check is True)\ or (current == element and regex_check is False): self._log_debug("{0} found but negated -> not matching", element) - _key = ['{}'.format(state.id), 'conditionsets', '{}'.format(self._abitem.get_variable('current.conditionset_name')), '{}'.format(self.__name), 'match', '{}'.format(valuetype)] - self._abitem.update_webif(_key, 'no') + self._abitem.update_webif(_key_match, 'no') return False else: if (regex_result is not None and regex_check is True)\ or (current == element and regex_check is False): self._log_debug("{0} found -> matching", element) - _key = ['{}'.format(state.id), 'conditionsets', '{}'.format(self._abitem.get_variable('current.conditionset_name')), '{}'.format(self.__name), 'match', '{}'.format(valuetype)] - self._abitem.update_webif(_key, 'yes') + self._abitem.update_webif(_key_match, 'yes') return True if regex_check is True: self._log_debug("Regex '{}' result: {}, element {}", element, regex_result) if negate: self._log_debug("{0} not in list -> matching", current) - _key = ['{}'.format(state.id), 'conditionsets', '{}'.format(self._abitem.get_variable('current.conditionset_name')), '{}'.format(self.__name), 'match', '{}'.format(valuetype)] - self._abitem.update_webif(_key, 'yes') + self._abitem.update_webif(_key_match, 'yes') return True else: self._log_debug("{0} not in list -> not matching", current) - _key = ['{}'.format(state.id), 'conditionsets', '{}'.format(self._abitem.get_variable('current.conditionset_name')), '{}'.format(self.__name), 'match', '{}'.format(valuetype)] - self._abitem.update_webif(_key, 'no') + self._abitem.update_webif(_key_match, 'no') return False else: regex_result = None @@ -523,39 +525,36 @@ def __convert(convert_value, convert_current): if valuetype == "value" and type(value) != type(current) and current is not None: value, current = __convert(value, current) text = "Condition '{0}': {1}={2} negate={3} current={4}" - _key = ['{}'.format(state.id), 'conditionsets', '{}'.format(self._abitem.get_variable('current.conditionset_name')), '{}'.format(self.__name), 'current', valuetype] - self._abitem.update_webif(_key, str(current)) + self._abitem.update_webif(_key_current, str(current)) self._log_info(text, self.__name, valuetype, value, negate, current) self._log_increase_indent() - try: - if isinstance(value, re._pattern_type): - regex_result = value.fullmatch(str(current)) - regex_check = True - except Exception: - if isinstance(value, re.Pattern): - regex_result = value.fullmatch(str(current)) - regex_check = True + if isinstance(value, re.Pattern): + regex_result = value.fullmatch(str(current)) + regex_check = True if negate: if (regex_result is None and regex_check is True)\ or (current != value and regex_check is False): self._log_debug("not OK but negated -> matching") - _key = ['{}'.format(state.id), 'conditionsets', '{}'.format(self._abitem.get_variable('current.conditionset_name')), '{}'.format(self.__name), 'match', '{}'.format(valuetype)] - self._abitem.update_webif(_key, 'yes') + self._abitem.update_webif(_key_match, 'yes') return True else: if (regex_result is not None and regex_check is True)\ or (current == value and regex_check is False): self._log_debug("OK -> matching") - _key = ['{}'.format(state.id), 'conditionsets', '{}'.format(self._abitem.get_variable('current.conditionset_name')), '{}'.format(self.__name), 'match', '{}'.format(valuetype)] - self._abitem.update_webif(_key, 'yes') + self._abitem.update_webif(_key_match, 'yes') return True self._log_debug("not OK -> not matching") - _key = ['{}'.format(state.id), 'conditionsets', '{}'.format(self._abitem.get_variable('current.conditionset_name')), '{}'.format(self.__name), 'match', '{}'.format(valuetype)] - self._abitem.update_webif(_key, 'no') + self._abitem.update_webif(_key_match, 'no') return False # Check if value conditions match def __check_value(self, state): + _key_current = ['{}'.format(state.id), 'conditionsets', '{}'.format( + self._abitem.get_variable('current.conditionset_name')), '{}'.format(self.__name), + 'current', 'value'] + _key_match = ['{}'.format(state.id), 'conditionsets', '{}'.format( + self._abitem.get_variable('current.conditionset_name')), '{}'.format(self.__name), + 'match', 'value'] try: cond_min_max = self.__min.is_empty() and self.__max.is_empty() if not self.__value.is_empty(): @@ -568,18 +567,11 @@ def __check_value(self, state): min_get_value = self.__min.get() max_get_value = self.__max.get() current = self.__get_current() - try: - if isinstance(min_get_value, re._pattern_type) or isinstance(max_get_value, re._pattern_type): - self._log_warning("You can not use regular expression with min/max -> ignoring") - _key = ['{}'.format(state.id), 'conditionsets', '{}'.format(self._abitem.get_variable('current.conditionset_name')), '{}'.format(self.__name), 'match', 'value'] - self._abitem.update_webif(_key, 'You can not use regular expression with min or max') - return True - except Exception: - if isinstance(min_get_value, re.Pattern) or isinstance(max_get_value, re.Pattern): - self._log_warning("You can not use regular expression with min/max -> ignoring") - _key = ['{}'.format(state.id), 'conditionsets', '{}'.format(self._abitem.get_variable('current.conditionset_name')), '{}'.format(self.__name), 'match', 'value'] - self._abitem.update_webif(_key, 'You can not use regular expression with min or max') - return True + + if isinstance(min_get_value, re.Pattern) or isinstance(max_get_value, re.Pattern): + self._log_warning("You can not use regular expression with min/max -> ignoring") + self._abitem.update_webif(_key_match, 'You can not use regular expression with min or max') + return True min_value = [min_get_value] if not isinstance(min_get_value, list) else min_get_value max_value = [max_get_value] if not isinstance(max_get_value, list) else max_get_value min_value = StateEngineTools.flatten_list(min_value) @@ -588,8 +580,7 @@ def __check_value(self, state): min_value = min_value + [None] * abs(diff_len) if diff_len < 0 else min_value max_value = max_value + [None] * diff_len if diff_len > 0 else max_value text = "Condition '{0}': min={1} max={2} negate={3} current={4}" - _key = ['{}'.format(state.id), 'conditionsets', '{}'.format(self._abitem.get_variable('current.conditionset_name')), '{}'.format(self.__name), 'current', 'value'] - self._abitem.update_webif(_key, str(current)) + self._abitem.update_webif(_key_current, str(current)) self._log_info(text, self.__name, min_value, max_value, self.__negate, current) if diff_len != 0: self._log_debug("Min and max are always evaluated as valuepairs. " @@ -597,56 +588,53 @@ def __check_value(self, state): self._log_increase_indent() _notmatching = 0 for i, _ in enumerate(min_value): - min = None if min_value[i] == 'novalue' else min_value[i] - max = None if max_value[i] == 'novalue' else max_value[i] - self._log_debug("Checking minvalue {} ({}) and maxvalue {} ({}) against current {} ({})", min, type(min), max, type(max), current, type(current)) - if min is not None and max is not None and min > max: - min, max = max, min + _min = None if min_value[i] == 'novalue' else min_value[i] + _max = None if max_value[i] == 'novalue' else max_value[i] + self._log_debug("Checking minvalue {} ({}) and maxvalue {} ({}) against current {} ({})", + _min, type(_min), _max, type(_max), current, type(current)) + if _min is not None and _max is not None and _min > _max: + _min, _max = _max, _min self._log_warning("Condition {}: min must not be greater than max! " - "Values got switched: min is now {}, max is now {}", self.__name, min, max) - if min is None and max is None: + "Values got switched: min is now {}, max is now {}", + self.__name, _min, _max) + if _min is None and _max is None: self._log_debug("no limit given -> matching") - _key = ['{}'.format(state.id), 'conditionsets', '{}'.format(self._abitem.get_variable('current.conditionset_name')), '{}'.format(self.__name), 'match', 'value'] - self._abitem.update_webif(_key, 'yes') + self._abitem.update_webif(_key_match, 'yes') return True if not self.__negate: - if min is not None and current < min: + if _min is not None and current < _min: self._log_debug("too low -> not matching") _notmatching += 1 - elif max is not None and current > max: + elif _max is not None and current > _max: self._log_debug("too high -> not matching") _notmatching += 1 else: self._log_debug("given limits ok -> matching") - _key = ['{}'.format(state.id), 'conditionsets', '{}'.format(self._abitem.get_variable('current.conditionset_name')), '{}'.format(self.__name), 'match', 'value'] - self._abitem.update_webif(_key, 'yes') + self._abitem.update_webif(_key_match, 'yes') return True else: - if min is not None and current > min and (max is None or current < max): + if _min is not None and current > _min and (_max is None or current < _max): self._log_debug("not lower than min -> not matching") _notmatching += 1 - elif max is not None and current < max and (min is None or current > min): + elif _max is not None and current < _max and (_min is None or current > _min): self._log_debug("not higher than max -> not matching") _notmatching += 1 else: self._log_debug("given limits ok -> matching") - _key = ['{}'.format(state.id), 'conditionsets', '{}'.format(self._abitem.get_variable('current.conditionset_name')), '{}'.format(self.__name), 'match', 'value'] - self._abitem.update_webif(_key, 'yes') + self._abitem.update_webif(_key_match, 'yes') return True if _notmatching == len(min_value): - _key = ['{}'.format(state.id), 'conditionsets', '{}'.format(self._abitem.get_variable('current.conditionset_name')), '{}'.format(self.__name), 'match', 'value'] - self._abitem.update_webif(_key, 'no') + self._abitem.update_webif(_key_match, 'no') return False else: self._log_debug("given limits ok -> matching") - _key = ['{}'.format(state.id), 'conditionsets', '{}'.format(self._abitem.get_variable('current.conditionset_name')), '{}'.format(self.__name), 'match', 'value'] - self._abitem.update_webif(_key, 'yes') + self._abitem.update_webif(_key_match, 'yes') return True elif self.__value.is_empty() and cond_min_max: @@ -655,14 +643,12 @@ def __check_value(self, state): " evalutions. Min {}, max {}, value {}", self.__name, self.__min.get(), self.__max.get(), self.__value.get()) self._log_increase_indent() - _key = ['{}'.format(state.id), 'conditionsets', '{}'.format(self._abitem.get_variable('current.conditionset_name')), '{}'.format(self.__name), 'match', 'value'] - self._abitem.update_webif(_key, 'Neither value nor min/max given.') + self._abitem.update_webif(_key_match, 'Neither value nor min/max given.') return True except Exception as ex: self._log_warning("Problem checking value: {}", ex) - _key = ['{}'.format(state.id), 'conditionsets', '{}'.format(self._abitem.get_variable('current.conditionset_name')), '{}'.format(self.__name), 'match', 'value'] - self._abitem.update_webif(_key, 'Problem checking value: {}'.format(ex)) + self._abitem.update_webif(_key_match, 'Problem checking value: {}'.format(ex)) finally: self._log_decrease_indent() @@ -748,7 +734,8 @@ def __check_age(self, state): try: current = self.__get_current(eval_type='age') except Exception as ex: - _key = ['{}'.format(state.id), 'conditionsets', '{}'.format(self._abitem.get_variable('current.conditionset_name')), '{}'.format(self.__name), 'match', 'age'] + _key = ['{}'.format(state.id), 'conditionsets', '{}'.format( + self._abitem.get_variable('current.conditionset_name')), '{}'.format(self.__name), 'match', 'age'] self._abitem.update_webif(_key, 'Not possible to get age from eval {} ' 'or status_eval {}'.format(self.__eval, self.__status_eval)) self._log_warning("Age of '{0}': Not possible to get age from eval {1} or status_eval {2}! " @@ -767,7 +754,9 @@ def __check_age(self, state): agemin = agemin + [None] * abs(diff_len) if diff_len < 0 else agemin agemax = agemax + [None] * diff_len if diff_len > 0 else agemax text = "Age of '{0}': min={1} max={2} negate={3} current={4}" - _key = ['{}'.format(state.id), 'conditionsets', '{}'.format(self._abitem.get_variable('current.conditionset_name')), '{}'.format(self.__name), 'current', 'age'] + _key = ['{}'.format(state.id), 'conditionsets', '{}'.format( + self._abitem.get_variable('current.conditionset_name')), '{}'.format(self.__name), + 'current', 'age'] self._abitem.update_webif(_key, str(current)) self._log_info(text, self.__name, agemin, agemax, self.__agenegate, current) if diff_len != 0: @@ -775,46 +764,45 @@ def __check_age(self, state): " If needed you can also provide 'novalue' as a list value") self._log_increase_indent() _notmatching = 0 + _key = ['{}'.format(state.id), 'conditionsets', '{}'.format( + self._abitem.get_variable('current.conditionset_name')), '{}'.format(self.__name), + 'match', 'age'] for i, _ in enumerate(agemin): - min = None if agemin[i] == 'novalue' else agemin[i] - max = None if agemax[i] == 'novalue' else agemax[i] - self._log_debug("Testing valuepair min {} and max {}", min, max) + _min = None if agemin[i] == 'novalue' else agemin[i] + _max = None if agemax[i] == 'novalue' else agemax[i] + self._log_debug("Testing valuepair min {} and max {}", _min, _max) if not self.__agenegate: - if min is not None and current < min: + if _min is not None and current < _min: self._log_debug("too young -> not matching") _notmatching += 1 - elif max is not None and current > max: + elif _max is not None and current > _max: self._log_debug("too old -> not matching") _notmatching += 1 else: self._log_debug("given limits ok -> matching") - _key = ['{}'.format(state.id), 'conditionsets', '{}'.format(self._abitem.get_variable('current.conditionset_name')), '{}'.format(self.__name), 'match', 'age'] self._abitem.update_webif(_key, 'yes') return True else: - if min is not None and current > min and (max is None or current < max): + if _min is not None and current > _min and (_max is None or current < _max): self._log_debug("not younger than min -> not matching") _notmatching += 1 - elif max is not None and current < max and (min is None or current > min): + elif _max is not None and current < _max and (_min is None or current > _min): self._log_debug("not older than max -> not matching") _notmatching += 1 else: self._log_debug("given limits ok -> matching") - _key = ['{}'.format(state.id), 'conditionsets', '{}'.format(self._abitem.get_variable('current.conditionset_name')), '{}'.format(self.__name), 'match', 'age'] self._abitem.update_webif(_key, 'yes') return True if _notmatching == len(agemin): - _key = ['{}'.format(state.id), 'conditionsets', '{}'.format(self._abitem.get_variable('current.conditionset_name')), '{}'.format(self.__name), 'match', 'age'] self._abitem.update_webif(_key, 'no') return False else: self._log_debug("given limits ok -> matching") - _key = ['{}'.format(state.id), 'conditionsets', '{}'.format(self._abitem.get_variable('current.conditionset_name')), '{}'.format(self.__name), 'match', 'age'] self._abitem.update_webif(_key, 'yes') return True finally: @@ -824,9 +812,10 @@ def __check_age(self, state): def __get_current(self, eval_type='value'): def check_eval(eval_or_status_eval): if isinstance(eval_or_status_eval, str): + # noinspection PyUnusedLocal sh = self._sh - shtime = self._shtime # noinspection PyUnusedLocal + shtime = self._shtime if "stateengine_eval" in eval_or_status_eval or "se_eval" in eval_or_status_eval: # noinspection PyUnusedLocal stateengine_eval = se_eval = StateEngineEval.SeEval(self._abitem) @@ -850,26 +839,26 @@ def check_eval(eval_or_status_eval): if self.__status is not None: # noinspection PyUnusedLocal - self._log_debug("Trying to get {} of status item {}", eval_type, self.__status) + self._log_debug("Trying to get {} of status item {}", eval_type, self.__status.property.path) return self.__status.property.last_change_age if eval_type == 'age' else\ - self.__status.property.last_change_by if eval_type == 'changedby' else\ - self.__status.property.last_update_by if eval_type == 'updatedby' else\ - self.__status.property.last_trigger_by if eval_type == 'triggeredby' else\ - self.__status.property.value - elif self.__item is not None: - # noinspection PyUnusedLocal - self._log_debug("Trying to get {} of item {}", eval_type, self.__item) - return self.__item.property.last_change_age if eval_type == 'age' else\ - self.__item.property.last_change_by if eval_type == 'changedby' else\ - self.__item.property.last_update_by if eval_type == 'updatedby' else\ - self.__item.property.last_trigger_by if eval_type == 'triggeredby' else\ - self.__item.property.value - if self.__status_eval is not None: + self.__status.property.last_change_by if eval_type == 'changedby' else\ + self.__status.property.last_update_by if eval_type == 'updatedby' else\ + self.__status.property.last_trigger_by if eval_type == 'triggeredby' else\ + self.__status.property.value + elif self.__status_eval is not None: self._log_debug("Trying to get {} of statuseval {}", eval_type, self.__status_eval) return_value = check_eval(self.__status_eval) return return_value + elif self.__item is not None: + # noinspection PyUnusedLocal + self._log_debug("Trying to get {} of item {}", eval_type, self.__item.property.path) + return self.__item.property.last_change_age if eval_type == 'age' else\ + self.__item.property.last_change_by if eval_type == 'changedby' else\ + self.__item.property.last_update_by if eval_type == 'updatedby' else\ + self.__item.property.last_trigger_by if eval_type == 'triggeredby' else\ + self.__item.property.value elif self.__eval is not None: - self._log_debug("Trying to get {} of statuseval {}", eval_type, self.__eval) + self._log_debug("Trying to get {} of eval {}", eval_type, self.__eval) return_value = check_eval(self.__eval) return return_value diff --git a/stateengine/StateEngineConditionSet.py b/stateengine/StateEngineConditionSet.py index 8b9d42c7f..ed4e4b6d0 100755 --- a/stateengine/StateEngineConditionSet.py +++ b/stateengine/StateEngineConditionSet.py @@ -36,6 +36,10 @@ def name(self): def id(self): return self.__id + @property + def path(self): + return self.__id + # List of conditions that are part of this condition set @property def conditions(self): @@ -89,6 +93,9 @@ def update(self, item, grandparent_item): if item is not None: for attribute in item.conf: func, name = StateEngineTools.partition_strip(attribute, "_") + if name.startswith("eval_"): + _, name = StateEngineTools.partition_strip(name, "_") + func = f"{func}_eval" if name == "": continue try: @@ -97,6 +104,7 @@ def update(self, item, grandparent_item): self.__conditions[name] = StateEngineCondition.SeCondition(self._abitem, name) issue = self.__conditions[name].set(func, item.conf[attribute]) self.__conditions.move_to_end(name, last=True) + if issue not in [[], None, [None]]: self.__unused_attributes.update({name: {'attribute': attribute, 'issue': issue}}) elif name not in self.__used_attributes.keys(): @@ -107,9 +115,14 @@ def update(self, item, grandparent_item): raise ValueError("Condition {0} error: {1}".format(name, ex)) # Update item from grandparent_item + if grandparent_item is None: + return self.__unused_attributes, self.__used_attributes for attribute in grandparent_item.conf: func, name = StateEngineTools.partition_strip(attribute, "_") + if name.startswith("eval_"): + _, name = StateEngineTools.partition_strip(name, "_") + func = f"{func}_eval" if name == "": continue cond1 = name not in self.__used_attributes.keys() @@ -135,21 +148,22 @@ def update(self, item, grandparent_item): return self.__unused_attributes, self.__used_attributes # Check the condition set, optimize and complete it - # item_state: item to read from - def complete(self, item_state): + # state: state (item) to read from + def complete(self, state, use): conditions_to_remove = [] # try to complete conditions + for name in self.conditions: try: - if not self.__conditions[name].complete(item_state): + if not self.__conditions[name].complete(state, use): conditions_to_remove.append(name) continue except ValueError as ex: self._abitem.update_attributes(self.__unused_attributes, self.__used_attributes) - self._abitem.update_issues('state', {item_state.property.path: {'issue': ex, 'issueorigin': - [{'conditionset': self.name, 'condition': name}]}}) + self._abitem.update_issues('state', {state.id: {'issue': ex, 'issueorigin': + [{'conditionset': self.name, 'condition': name}]}}) text = "State '{0}', Condition Set '{1}', Condition '{2}' Error: {3}" - raise ValueError(text.format(item_state.property.path, self.name, name, ex)) + raise ValueError(text.format(state.id, self.name, name, ex)) # Remove incomplete conditions for name in conditions_to_remove: @@ -177,14 +191,14 @@ def all_conditions_matching(self, state): try: self._log_info("Check condition set '{0}'", self.__name) self._log_increase_indent() - self.__previousconditionset_set(self._abitem.get_variable('current.conditionset_id'), self._abitem.get_variable('current.conditionset_name')) + self.__previousconditionset_set(self._abitem.get_variable('current.conditionset_id'), + self._abitem.get_variable('current.conditionset_name')) self.__currentconditionset_set(self.__id.property.path, self.__name) for name in self.__conditions: if not self.__conditions[name].check(state): self.__currentconditionset_set('', '') return False - #self._abitem.previousconditionset_set(self._abitem.get_variable('previous.conditionset_id'), self._abitem.get_variable('previous.conditionset_name')) self._abitem.lastconditionset_set(self.__id.property.path, self.__name) return True finally: diff --git a/stateengine/StateEngineConditionSets.py b/stateengine/StateEngineConditionSets.py index e7dc7b7e7..c8dbf3612 100755 --- a/stateengine/StateEngineConditionSets.py +++ b/stateengine/StateEngineConditionSets.py @@ -71,10 +71,12 @@ def update(self, name, item, grandparent_item): return self.__condition_sets[name].unused_attributes, self.__condition_sets[name].used_attributes # Check the condition sets, optimize and complete them - # item_state: item to read from - def complete(self, item_state): + # state: item (item) to read from + def complete(self, state, use=None): + if use is None: + use = state.use.get() for name in self.__condition_sets: - self.__condition_sets[name].complete(item_state) + self.__condition_sets[name].complete(state, use) # Write all condition sets to logger def write_to_logger(self): diff --git a/stateengine/StateEngineEval.py b/stateengine/StateEngineEval.py index dea0a5bb3..55abe6db6 100755 --- a/stateengine/StateEngineEval.py +++ b/stateengine/StateEngineEval.py @@ -66,7 +66,8 @@ def remap(_value, _minoutput): _lamella_open_value = StateEngineDefaults.lamella_open_value _lamella_text = " (based on lamella open value of {0})".format(_lamella_open_value) value = remap(90 - altitude, _lamella_open_value) + offset - self._log_debug("Blinds at right angle to the sun at {0}° with an offset of {1}°{2}", value, offset, _lamella_text) + self._log_debug("Blinds at right angle to the sun at {0}° with an offset of {1}°{2}", + value, offset, _lamella_text) self._log_decrease_indent() self._eval_lock.release() @@ -94,8 +95,8 @@ def get_variable(self, varname): self._eval_lock.acquire() self._log_debug("Executing method 'get_variable({0})'", varname) try: - if self._abitem._initactionname and varname == 'current.action_name': - returnvalue = self._abitem._initactionname + if self._abitem.initactionname and varname == 'current.action_name': + returnvalue = self._abitem.initactionname self._log_debug("Return '{}' for variable {} during init", returnvalue, varname) else: returnvalue = self._abitem.get_variable(varname) @@ -115,8 +116,8 @@ def get_relative_itemid(self, subitem_id): self._eval_lock.acquire() self._log_debug("Executing method 'get_relative_itemid({0})'", subitem_id) try: - if self._abitem._initstate and subitem_id == '..state_name': - returnvalue = self._abitem.return_item(self._abitem._initstate.id)[0].property.path + if self._abitem.initstate and subitem_id == '..state_name': + returnvalue = self._abitem.return_item(self._abitem.initstate.id)[0].property.path self._log_debug("Return item path '{0}' during init", returnvalue) else: returnvalue = self._abitem.return_item(subitem_id)[0].property.path @@ -136,8 +137,8 @@ def get_relative_item(self, subitem_id): self._eval_lock.acquire() self._log_debug("Executing method 'get_relative_item({0})'", subitem_id) try: - if self._abitem._initstate and subitem_id == '..state_name': - returnvalue, issue = self._abitem.return_item(self._abitem._initstate.id) + if self._abitem.initstate and subitem_id == '..state_name': + returnvalue, issue = self._abitem.return_item(self._abitem.initstate.id) self._log_debug("Return item '{0}' during init", returnvalue) else: returnvalue, issue = self._abitem.return_item(subitem_id) @@ -158,14 +159,16 @@ def get_relative_itemvalue(self, subitem_id): returnvalue = [] self._log_debug("Executing method 'get_relative_itemvalue({0})'", subitem_id) try: - if self._abitem._initstate and subitem_id == '..state_name': - returnvalue = self._abitem._initstate.text + if self._abitem.initstate and subitem_id == '..state_name': + returnvalue = self._abitem.initstate.text self._log_debug("Return item value '{0}' during init", returnvalue) else: item, issue = self._abitem.return_item(subitem_id) returnvalue = item.property.value returnvalue = StateEngineTools.convert_str_to_list(returnvalue) - self._log_debug("Return item value '{0}' for item {1}", returnvalue, subitem_id) + issue = f" Issue: {issue}" if issue not in [[], None, [None]] else "" + self._log_debug("Return item value '{0}' for item {1}.{2}", + returnvalue, subitem_id, issue) except Exception as ex: self._log_warning("Problem evaluating value of '{0}': {1}", subitem_id, ex) finally: @@ -182,16 +185,17 @@ def get_relative_itemproperty(self, subitem_id, prop): self._eval_lock.acquire() self._log_debug("Executing method 'get_relative_itemproperty({0}, {1})'", subitem_id, prop) try: - item, issue = self._abitem.return_item(subitem_id) + item, _ = self._abitem.return_item(subitem_id) except Exception as ex: - self._log_warning("Problem evaluating property of {0} - relative item might not exist. Error: {1}", subitem_id, ex) + self._log_warning("Problem evaluating property of {0} - relative item might not exist. Error: {1}", + subitem_id, ex) self._eval_lock.release() return try: - if self._abitem._initstate and subitem_id == '..state_name': - returnvalue = getattr(self._abitem.return_item(self._abitem._initstate.id)[0].property, prop) + if self._abitem.initstate and subitem_id == '..state_name': + returnvalue = getattr(self._abitem.return_item(self._abitem.initstate.id)[0].property, prop) self._log_debug("Return item property '{0}' from {1}: {2} during init", prop, - self._abitem.return_item(self._abitem._initstate.id)[0].property.path, returnvalue) + self._abitem.return_item(self._abitem.initstate.id)[0].property.path, returnvalue) else: returnvalue = getattr(item.property, prop) if prop == "value": @@ -221,6 +225,7 @@ def get_attribute_value(self, item, attrib): def get_attributevalue(self, item, attrib): self._eval_lock.acquire() self._log_debug("Executing method 'get_attributevalue({0}, {1})'", item, attrib) + issue = None if ":" in item: var_type, item = StateEngineTools.partition_strip(item, ":") if var_type == "var": @@ -228,13 +233,14 @@ def get_attributevalue(self, item, attrib): else: item, issue = self._abitem.return_item(item) try: - if self._abitem._initstate and item == '..state_name': - returnvalue = self._abitem.return_item(self._abitem._initstate.id).conf[attrib] - self._log_debug("Return item attribute '{0}' from {1}: {2} during init", - attrib, self._abitem.return_item(self._abitem._initstate.id)[0].property.path, returnvalue) + if self._abitem.initstate and item == '..state_name': + returnvalue, issue = self._abitem.return_item(self._abitem.initstate.id).conf[attrib] + self._log_debug("Return item attribute '{0}' from {1}: {2} during init. Issue {3}", attrib, + self._abitem.return_item(self._abitem.initstate.id)[0].property.path, returnvalue, issue) else: returnvalue = item.conf[attrib] - self._log_debug("Return item attribute {0} from {1}: {2}", attrib, item.property.path, returnvalue) + self._log_debug("Return item attribute {0} from {1}: {2}. Issue {3}", + attrib, item.property.path, returnvalue, issue) except Exception as ex: returnvalue = None self._log_warning("Problem evaluating attribute {0} of {1} - attribute might not exist. " diff --git a/stateengine/StateEngineFunctions.py b/stateengine/StateEngineFunctions.py index a8df0f13e..ae5c2d7b1 100755 --- a/stateengine/StateEngineFunctions.py +++ b/stateengine/StateEngineFunctions.py @@ -25,7 +25,6 @@ from . import StateEngineLogger from . import StateEngineTools from . import StateEngineDefaults -from lib.item import Items from ast import literal_eval @@ -45,7 +44,6 @@ def __init__(self, smarthome=None, logger=None): self.__locks = {} self.__global_struct = {} self.__ab_alive = False - self.itemsApi = Items.get_instance() def __repr__(self): return "SeFunctions" @@ -99,7 +97,7 @@ def check_include_exclude(entry_type): elog.decrease_indent() return None - item = self.itemsApi.return_item(item_id) + item = self.__sh.return_item(item_id) if item is None: self.logger.error("manual_item_update_eval: item {0} not found!".format(item_id)) @@ -113,7 +111,7 @@ def check_include_exclude(entry_type): if "se_manual_logitem" in item.conf: elog_item_id = item.conf["se_manual_logitem"] - elog_item = self.itemsApi.return_item(elog_item_id) + elog_item = self.__sh.return_item(elog_item_id) if elog_item is None: self.logger.error("manual_item_update_item: se_manual_logitem {0} not found!".format(elog_item_id)) elog = StateEngineLogger.SeLoggerDummy() @@ -128,7 +126,7 @@ def check_include_exclude(entry_type): retval_trigger = not item() elog.info("Current value of item {0} is {1}", item_id, retval_no_trigger) - original_caller, original_source = StateEngineTools.get_original_caller(elog, caller, source) + original_caller, original_source = StateEngineTools.get_original_caller(self.__sh, elog, caller, source) elog.info("get_caller({0}, {1}): original trigger by {2}:{3}", caller, source, original_caller, original_source) original = "{}:{}".format(original_caller, original_source) diff --git a/stateengine/StateEngineItem.py b/stateengine/StateEngineItem.py index bc7c33166..42202d905 100755 --- a/stateengine/StateEngineItem.py +++ b/stateengine/StateEngineItem.py @@ -22,7 +22,6 @@ import datetime from collections import OrderedDict, defaultdict -import lib.item.item from . import StateEngineTools from .StateEngineLogger import SeLogger from . import StateEngineState @@ -33,14 +32,13 @@ from . import StateEngineStructs from . import StateEngineEval -from lib.item import Items from lib.shtime import Shtime from lib.item.item import Item +from lib.item.items import Items import copy import threading import queue import re -from ast import literal_eval # Class representing a blind item @@ -51,6 +49,10 @@ class SeItem: def id(self): return self.__id + @property + def path(self): + return self.__id + @property def variables(self): return self.__variables @@ -117,7 +119,7 @@ def laststate(self): @property def laststate_releasedby(self): _returnvalue = None if self.__laststate_item_id is None \ - else self.__release_info.get(self.__laststate_item_id.property.value) + else self.__release_info.get(self.__laststate_item_id.property.value) return _returnvalue @property @@ -180,7 +182,7 @@ def ab_alive(self, value): def __init__(self, smarthome, item, se_plugin): self.__item = item self.__logger = SeLogger.create(self.__item) - self.itemsApi = Items.get_instance() + self.__logging_off = False self.update_lock = threading.Lock() self.__ab_alive = False self.__queue = queue.Queue() @@ -201,8 +203,8 @@ def __init__(self, smarthome, item, se_plugin): self.__log_level = StateEngineValue.SeValue(self, "Log Level", False, "num") - _default_log_level = SeLogger.default_log_level.get() - _returnvalue, _returntype, _using_default, _issue = self.__log_level.set_from_attr(self.__item, "se_log_level", + _default_log_level = self.__logger.default_log_level.get() + _returnvalue, _returntype, _using_default, _issue, _ = self.__log_level.set_from_attr(self.__item, "se_log_level", _default_log_level) self.__using_default_log_level = _using_default _returnvalue = self.__log_level.get() @@ -210,11 +212,11 @@ def __init__(self, smarthome, item, se_plugin): _returnvalue = _returnvalue[0] self.__logger.log_level_as_num = 2 - _startup_log_level = SeLogger.startup_log_level.get() + _startup_log_level = self.__logger.startup_log_level.get() if _startup_log_level > 0: base = self.__sh.get_basedir() - SeLogger.manage_logdirectory(base, SeLogger.log_directory, True) + self.__logger.manage_logdirectory(base, self.__logger.log_directory, True) self.__logger.log_level_as_num = _startup_log_level self.__logger.header("") self.__logger.info("Set log level to startup log level {}", _startup_log_level) @@ -293,7 +295,8 @@ def __init__(self, smarthome, item, se_plugin): self.__previousstate_conditionset_internal_name = "" if self.__previousstate_conditionset_item_name is None else \ self.__previousstate_conditionset_item_name.property.value self.__config_issues.update(_issue) - filtered_dict = {key: value for key, value in self.__config_issues.items() if value.get('issue') not in [[], [None], None]} + filtered_dict = {key: value for key, value in self.__config_issues.items() if + value.get('issue') not in [[], [None], None]} self.__config_issues = filtered_dict self.__states = [] @@ -310,8 +313,8 @@ def __init__(self, smarthome, item, se_plugin): self.__repeat_actions = StateEngineValue.SeValue(self, "Repeat actions if state is not changed", False, "bool") self.__repeat_actions.set_from_attr(self.__item, "se_repeat_actions", True) self.__first_run = None - self._initstate = None - self._initactionname = None + self.initstate = None + self.initactionname = None self.__update_trigger_item = None self.__update_trigger_caller = None self.__update_trigger_source = None @@ -360,7 +363,6 @@ def __init__(self, smarthome, item, se_plugin): self.__logger.error("Issue finishing states because {}", ex) return - def __repr__(self): return self.__id @@ -416,7 +418,7 @@ def update_leave_action(self, default_instant_leaveaction): default_instant_leaveaction_value = default_instant_leaveaction.get() self.__default_instant_leaveaction = default_instant_leaveaction - _returnvalue_leave, _returntype_leave, _using_default_leave, _issue = self.__instant_leaveaction.set_from_attr( + _returnvalue_leave, _returntype_leave, _using_default_leave, _issue, _ = self.__instant_leaveaction.set_from_attr( self.__item, "se_instant_leaveaction", default_instant_leaveaction) if len(_returnvalue_leave) > 1: @@ -465,22 +467,29 @@ def run_queue(self): StateEngineDefaults.plugin_identification) return _current_log_level = self.__log_level.get() - _default_log_level = SeLogger.default_log_level.get() + _default_log_level = self.__logger.default_log_level.get() if _current_log_level <= -1: self.__using_default_log_level = True - value = SeLogger.default_log_level.get() + value = self.__logger.default_log_level.get() else: value = _current_log_level self.__using_default_log_level = False self.__logger.log_level_as_num = value - + additional_text = ", currently using default" if self.__using_default_log_level is True else "" if _current_log_level > 0: base = self.__sh.get_basedir() - SeLogger.manage_logdirectory(base, SeLogger.log_directory, True) - additional_text = ", currently using default" if self.__using_default_log_level is True else "" + self.__logger.manage_logdirectory(base, self.__logger.log_directory, True) + self.__logging_off = False + elif self.__logging_off is False: + self.__logger.log_level_as_num = 1 + self.__logger.info("Logging turned off! Current log level {} ({}), default {}{}", + _current_log_level, type(self.__logger.log_level), _default_log_level, additional_text) + self.__logger.log_level_as_num = value + self.__logging_off = True + self.__logger.info("Current log level {} ({}), default {}{}", - _current_log_level, type(self.__logger.log_level), _default_log_level, additional_text) + _current_log_level, type(self.__logger.log_level), _default_log_level, additional_text) _instant_leaveaction = self.__instant_leaveaction.get() _default_instant_leaveaction_value = self.__default_instant_leaveaction.get() if _instant_leaveaction <= -1: @@ -538,8 +547,8 @@ def run_queue(self): source, dest) # Find out what initially caused the update to trigger if the caller is "Eval" - orig_caller, orig_source, orig_item = StateEngineTools.get_original_caller(self.__logger, caller, - source, item) + orig_caller, orig_source, orig_item = StateEngineTools.get_original_caller(self.__sh, self.__logger, + caller, source, item) if orig_item is None: orig_item = item if orig_caller != caller: @@ -574,8 +583,8 @@ def run_queue(self): if last_state is not None: self.__logger.info("Last state: {0} ('{1}')", last_state.id, last_state.name) - _last_conditionset_id = self.__lastconditionset_internal_id #self.__lastconditionset_get_id() - _last_conditionset_name = self.__lastconditionset_internal_name # self.__lastconditionset_get_name() + _last_conditionset_id = self.__lastconditionset_internal_id # self.__lastconditionset_get_id() + _last_conditionset_name = self.__lastconditionset_internal_name # self.__lastconditionset_get_name() if _last_conditionset_id not in ['', None]: self.__logger.info("Last Conditionset: {0} ('{1}')", _last_conditionset_id, _last_conditionset_name) else: @@ -608,6 +617,8 @@ def run_queue(self): evaluated_instant_leaveaction = True else: evaluated_instant_leaveaction = False + _previousstate_conditionset_id = '' + _previousstate_conditionset_name = '' for state in self.__states: if not self.__ab_alive: self.__logger.debug("StateEngine Plugin not running (anymore). Stop state evaluation.") @@ -656,6 +667,7 @@ def run_queue(self): last_state.run_stay(self.__repeat_actions.get()) if self.update_lock.locked(): self.update_lock.release() + self.__logger.decrease_indent(50) self.__logger.debug("State evaluation finished") self.__logger.info("State evaluation queue empty.") self.__handle_releasedby(new_state, last_state, _instant_leaveaction) @@ -669,7 +681,6 @@ def run_queue(self): "State is a copy and therefore just releasing {}. Skipping state actions, running leave actions " "of last state, then retriggering.", new_state.is_copy_for.id) if last_state is not None and self.__ab_alive: - #self.lastconditionset_set(_original_conditionset_id, _original_conditionset_name) self.__logger.info("Leaving {0} ('{1}'). Condition set was: {2}.", last_state.id, last_state.name, _original_conditionset_id) self.__update_check_can_enter(last_state, _instant_leaveaction, False) @@ -685,7 +696,7 @@ def run_queue(self): if self.update_lock.locked(): self.update_lock.release() - self.update_state(self.__item, "Released_by Retrigger", state.id) + self.update_state(self.__item, "Released_by Retrigger") return _last_conditionset_id = self.__lastconditionset_internal_id @@ -753,6 +764,7 @@ def run_queue(self): self.__logger.debug("State evaluation finished") all_released_by = self.__handle_releasedby(new_state, last_state, _instant_leaveaction) + self.__logger.decrease_indent(50) self.__logger.info("State evaluation queue empty.") if new_state: self.__logger.develop("States {}, Current state released by {}", self.__states, all_released_by.get(new_state)) @@ -795,12 +807,12 @@ def __update_can_release(self, can_release, new_state=None): _stateindex = list(state_dict.keys()).index(state.id) for e in release_list: _valueindex = list(state_dict.keys()).index(e) if e in state_dict else -1 - self.__logger.develop("Testing entry in canrelease {}, state {} stateindex {}, "\ + self.__logger.develop("Testing entry in canrelease {}, state {} stateindex {}, " "valueindex {}", e, state.id, _stateindex, _valueindex) if e == state.id: self.__logger.info("Value in se_released_by must not be identical to state. Ignoring {}", e) elif _stateindex < _valueindex and not state.is_copy_for: - self.__logger.info("Value {} in se_released_by must have lower priority "\ + self.__logger.info("Value {} in se_released_by must have lower priority " "than state. Ignoring {}", state.id, e) else: can_release_list.append(e) @@ -888,7 +900,7 @@ def update_can_release_list(): self.__logger.develop("Entry {} defined by {} is a copy, skipping", e, releasedby[i]) continue _entryindex = list(state_dict.keys()).index(e) if e in state_dict else -1 - self.__logger.develop("Testing if entry {} should become a state copy. "\ + self.__logger.develop("Testing if entry {} should become a state copy. " "stateindex {}, entryindex {}", e, _stateindex, _entryindex) if e == new_state.id: self.__logger.warning("Value in se_released_by must no be identical to state. Ignoring {}", @@ -909,7 +921,7 @@ def update_can_release_list(): can_enter = self.__update_check_can_enter(relevant_state, instant_leaveaction) self.__logger.log_level_as_num = current_log_level if relevant_state == last_state: - self.__logger.debug("Possible release state {} = last state {}, "\ + self.__logger.debug("Possible release state {} = last state {}, " "not copying", relevant_state.id, last_state.id) elif can_enter: self.__logger.debug("Relevant state {} could enter, not copying", relevant_state.id) @@ -953,17 +965,14 @@ def _nested_test(dic, keys): def update_action_status(self, action_status): def combine_dicts(dict1, dict2): combined_dict = dict1.copy() - for key, value in dict2.items(): if key in combined_dict: for k, v in combined_dict.items(): v['issueorigin'].extend( [item for item in v['issueorigin'] if item not in combined_dict[k]['issueorigin']]) v['issue'].extend([item for item in v['issue'] if item not in combined_dict[k]['issue']]) - else: combined_dict[key] = value - return combined_dict combined_dict = combine_dicts(action_status, self.__action_status) @@ -971,13 +980,29 @@ def combine_dicts(dict1, dict2): def update_issues(self, issue_type, issues): def combine_dicts(dict1, dict2): + def update_list(existing, new_entries): + # Ensure existing is a list + if not isinstance(existing, list): + existing = [existing] + if not isinstance(new_entries, list): + new_entries = [new_entries] + # Append new entries to the list if they don't exist + for entry in new_entries: + if entry not in existing: + existing.append(entry) + return existing + combined_dict = dict1.copy() for key, value in dict2.items(): - if key in combined_dict and combined_dict[key].get('issueorigin'): - combined_dict[key]['issueorigin'].extend(value['issueorigin']) - else: + if key not in combined_dict: combined_dict[key] = value + continue + combined_entry = combined_dict[key] + if 'issue' in value: + combined_entry['issue'] = update_list(combined_entry.get('issue', []), value['issue']) + if 'issueorigin' in value: + combined_entry['issueorigin'] = update_list(combined_entry.get('issueorigin', []), value['issueorigin']) return combined_dict @@ -1009,7 +1034,7 @@ def update_attributes(self, unused_attributes, used_attributes): combined_entries = [{'state': state, 'conditionset': ', '.join(conditionsets)} for state, conditionsets in combined_dict.items()] combined_unused_dict[key]['issueorigin'] = combined_entries - except Exception as ex: + except Exception: pass self.__unused_attributes = combined_unused_dict @@ -1023,19 +1048,56 @@ def update_attributes(self, unused_attributes, used_attributes): self.__used_attributes = combined_dict def __log_issues(self, issue_type): + def print_readable_dict(data): + for key, value in data.items(): + if isinstance(value, list): + formatted_entries = [] + for item in value: + if isinstance(item, dict): + for sub_key, sub_value in item.items(): + if isinstance(sub_value, list): + formatted_entries.append(f"{sub_key}: {', '.join(sub_value)}") + else: + formatted_entries.append(f"{sub_key}: {sub_value}") + else: + formatted_entries.append(item) + if formatted_entries: + self.__logger.info("- {}: {}", key, ', '.join(formatted_entries)) + else: + self.__logger.info("- {}: {}", key, value) def list_issues(v): _issuelist = StateEngineTools.flatten_list(v.get('issue')) if isinstance(_issuelist, list) and len(_issuelist) > 1: self.__logger.info("has the following issues:") self.__logger.increase_indent() for e in _issuelist: - self.__logger.info("- {}", e) + if isinstance(e, dict): + print_readable_dict(e) + else: + self.__logger.info("- {}", e) self.__logger.decrease_indent() elif isinstance(_issuelist, list) and len(_issuelist) == 1: - self.__logger.info("has the following issue: {}", _issuelist[0]) + if isinstance(_issuelist[0], dict): + self.__logger.info("has the following issues:") + self.__logger.increase_indent() + print_readable_dict(_issuelist[0]) + self.__logger.decrease_indent() + else: + self.__logger.info("has the following issue: {}", _issuelist[0]) else: - self.__logger.info("has the following issue: {}", _issuelist) + if isinstance(_issuelist, dict): + self.__logger.info("has the following issues:") + self.__logger.increase_indent() + print_readable_dict(_issuelist) + self.__logger.decrease_indent() + else: + self.__logger.info("has the following issue: {}", _issuelist) + if "ignore" in v: + self.__logger.info("It will be ignored") + warn_unused = "" + warn_issues = "" + warn = "" if issue_type == 'actions': to_check = self.__action_status.items() warn = ', '.join(key for key in self.__action_status.keys()) @@ -1068,7 +1130,6 @@ def list_issues(v): self.__logger.increase_indent() for entry, value in to_check: if 'issue' in value: - origin_text = '' origin_list = value.get('issueorigin') or [] if issue_type == 'states': self.__logger.info("State {} is ignored because", entry) @@ -1076,7 +1137,11 @@ def list_issues(v): if value.get('attribute'): self.__logger.info("Attribute {}", value.get('attribute')) self.__logger.increase_indent() - self.__logger.info("defined in state {}", entry) + if value.get('origin'): + origin = value.get('origin') + else: + origin = "state" + self.__logger.info("defined in {} {}", origin, entry) self.__logger.decrease_indent() list_issues(value) else: @@ -1084,7 +1149,9 @@ def list_issues(v): self.__logger.info("") continue elif issue_type == 'structs': - self.__logger.info("Struct {} has an issue: {}", entry, value.get('issue')) + self.__logger.info("Struct {} ", entry) + #self.__logger.info("") + list_issues(value) self.__logger.info("") continue else: @@ -1196,7 +1263,7 @@ def __initialize_state(self, item_state, _statecount): _issue = _state.update_order(_statecount) if _issue: self.__config_issues.update({item_state.property.path: - {'issue': _issue, 'attribute': 'se_stateorder'}}) + {'issue': _issue, 'attribute': 'se_stateorder'}}) self.__logger.error("Issue with state {0} while setting order: {1}", item_state.property.path, _issue) self.__states.append(_state) @@ -1234,7 +1301,8 @@ def update_state(self, item, caller=None, source=None, dest=None): return self.__queue.put(["stateevaluation", item, caller, source, dest]) if not self.update_lock.locked(): - self.__logger.debug("Run queue to update state. Item: {}, caller: {}, source: {}", item.property.path, caller, source) + self.__logger.debug("Run queue to update state. Item: {}, caller: {}, source: {}", item.property.path, + caller, source) self.run_queue() # check if state can be entered after setting state-specific variables @@ -1573,7 +1641,7 @@ def process_returnvalue(value): if v == state.id: _returnvalue_issue = "State {} defined by {} in se_released_by attribute of state {} " \ "must not be identical.".format(v, _returnvalue[i], state.id) - elif _valueindex == - 1: #not any(value == test.id for test in self.__states): + elif _valueindex == - 1: # not any(value == test.id for test in self.__states): _returnvalue_issue = "State {} defined by {} in se_released_by attribute of state {} " \ "does currently not exist.".format(v, _returnvalue[i], state.id) elif _valueindex < _stateindex: @@ -1602,7 +1670,7 @@ def process_returnvalue(value): _returnvalue_issue_list.append(_returnvalue_issue) elif _valueindex < _stateindex: _returnvalue_issue = "State {} defined by {} in se_released_by " \ - "attribute of state {} must be lower priority "\ + "attribute of state {} must be lower priority " \ "than actual state.".format(match, _returnvalue[i], state.id) self.__logger.warning("{} Removing it.", _returnvalue_issue) if _returnvalue_issue not in _returnvalue_issue_list: @@ -1617,7 +1685,7 @@ def process_returnvalue(value): _returnvalue_issue = _returnvalue_issue_list if not matches: - _returnvalue_issue = "No states match regex {} defined in "\ + _returnvalue_issue = "No states match regex {} defined in " \ "se_released_by attribute of state {}.".format(value, state.id) self.__logger.warning("{} Removing it.", _returnvalue_issue) elif _returntype[i] == 'eval': @@ -1654,7 +1722,7 @@ def process_returnvalue(value): v_list.append(v) _converted_typelist.append(_returntype[i]) else: - _returnvalue_issue = "Found invalid definition in se_released_by attribute "\ + _returnvalue_issue = "Found invalid definition in se_released_by attribute " \ "of state {}, original {}.".format(state.id, v, original_value) self.__logger.warning("{} Removing it.", _returnvalue_issue) _converted_evaluatedlist.append(v_list) @@ -1753,16 +1821,17 @@ def write_to_log(self): self.__logger.debug("Item 'Previousstate condition Id': {0}", self.__previousstate_conditionset_item_id.property.path) if self.__previousstate_conditionset_item_name is not None: self.__logger.debug("Item 'Previousstate condition Name': {0}", - self.__previousstate_conditionset_item_name.property.path) + self.__previousstate_conditionset_item_name.property.path) self.__init_releasedby() for state in self.__states: # log states state.write_to_log() - self._initstate = None + self.initstate = None - filtered_dict = {key: value for key, value in self.__config_issues.items() if value.get('issue') not in [[], [None], None]} + filtered_dict = {key: value for key, value in self.__config_issues.items() if + value.get('issue') not in [[], [None], None]} self.__config_issues = filtered_dict # endregion @@ -1926,20 +1995,23 @@ def return_item(self, item_id): if isinstance(item_id, (StateEngineStruct.SeStruct, self.__itemClass)): return item_id, None if isinstance(item_id, StateEngineState.SeState): - return self.itemsApi.return_item(item_id.id), None + return self.__sh.return_item(item_id.id), None if item_id is None: _issue = "item_id is None" return None, [_issue] + if item_id == Items.get_instance(): + _issue = "'{0}' is no valid item.".format(item_id) + self.__logger.warning("{0} Check your item config!", _issue, item_id) + return None, [_issue] if not isinstance(item_id, str): - _issue = "'{0}' is not defined as string.".format(item_id) - self.__logger.info("{0} Check your item config!", _issue, item_id) + _issue = "'{0}' is not defined as string, cannot find item.".format(item_id) + self.__logger.warning("{0} Check your item config!", _issue, item_id) return None, [_issue] item_id = item_id.strip() if item_id.startswith("struct:"): item = None _, item_id = StateEngineTools.partition_strip(item_id, ":") try: - # self.__logger.debug("Creating struct for id {}".format(item_id)) item = StateEngineStructs.create(self, item_id) except Exception as e: _issue = "Struct {} creation failed. Error: {}".format(item_id, e) @@ -1959,11 +2031,12 @@ def return_item(self, item_id): _, _, item = item.partition(":") return item, None elif match: - _issue = "Item '{0}' has to be defined as an item path or eval expression without {}.".format(match.group(1), item_id) + _issue = ("Item '{0}' has to be defined as an item path " + "or eval expression without {}.").format(match.group(1), item_id) self.__logger.warning(_issue) return None, [_issue] else: - item = self.itemsApi.return_item(item_id) + item = self.__sh.return_item(item_id) if item is None: _issue = "Item '{0}' not found.".format(item_id) self.__logger.warning(_issue) @@ -1986,7 +2059,7 @@ def return_item(self, item_id): rel_item_id = item_id[parent_level:] if rel_item_id != "": result += "." + rel_item_id - item = self.itemsApi.return_item(result) + item = self.__sh.return_item(result) if item is None: _issue = "Determined item '{0}' does not exist.".format(item_id) self.__logger.warning(_issue) diff --git a/stateengine/StateEngineLogger.py b/stateengine/StateEngineLogger.py index 07eabac00..a398a6b15 100755 --- a/stateengine/StateEngineLogger.py +++ b/stateengine/StateEngineLogger.py @@ -92,7 +92,6 @@ def manage_logdirectory(base, log_directory, create=True): os.makedirs(log_directory) return log_directory - # Remove old log files (by scheduler) @staticmethod def remove_old_logfiles(): @@ -142,7 +141,6 @@ def __init__(self, item, manual=False): self.__filename = "" self.update_logfile() - # Update name logfile if required def update_logfile(self): if self.__date == datetime.datetime.today() and self.__filename is not None: diff --git a/stateengine/StateEngineState.py b/stateengine/StateEngineState.py index 8efc61aff..c8986e21b 100755 --- a/stateengine/StateEngineState.py +++ b/stateengine/StateEngineState.py @@ -38,6 +38,14 @@ class SeState(StateEngineTools.SeItemChild): def id(self): return self.__id + @property + def path(self): + return self.__id + + @property + def use(self): + return self.__use + @property def state_item(self): return self.__item @@ -84,7 +92,7 @@ def releasedby(self): @releasedby.setter def releasedby(self, value): - self.__releasedby.set(value, "", True, None, False) + self.__releasedby.set(value, "", True, False) @property def order(self): @@ -92,7 +100,7 @@ def order(self): @order.setter def order(self, value): - self.__order.set(value, "", True, None, False) + self.__order.set(value, "", True, False) @property def can_release(self): @@ -100,7 +108,7 @@ def can_release(self): @can_release.setter def can_release(self, value): - self.__can_release.set(value, "", True, None, False) + self.__can_release.set(value, "", True, False) @property def has_released(self): @@ -108,7 +116,7 @@ def has_released(self): @has_released.setter def has_released(self, value): - self.__has_released.set(value, "", True, None, False) + self.__has_released.set(value, "", True, False) @property def was_releasedby(self): @@ -116,7 +124,7 @@ def was_releasedby(self): @was_releasedby.setter def was_releasedby(self, value): - self.__was_releasedby.set(value, "", True, None, False) + self.__was_releasedby.set(value, "", True, False) @property def is_copy_for(self): @@ -130,7 +138,7 @@ def is_copy_for(self, value): webif_id = None _key_copy = ['{}'.format(self.id), 'is_copy_for'] self._abitem.update_webif(_key_copy, webif_id) - self.__is_copy_for.set(value, "", True, None, False) + self.__is_copy_for.set(value, "", True, False) # Constructor # abitem: parent SeItem instance @@ -158,6 +166,8 @@ def __init__(self, abitem, item_state): self.__used_attributes = {} self.__action_status = {} self.__use_done = [] + self.__use_list = [] + self.__use_ignore_list = [] self.__conditions = StateEngineConditionSets.SeConditionSets(self._abitem) self.__actions_enter_or_stay = StateEngineActions.SeActions(self._abitem) self.__actions_enter = StateEngineActions.SeActions(self._abitem) @@ -166,12 +176,13 @@ def __init__(self, abitem, item_state): self.__order = StateEngineValue.SeValue(self._abitem, "State Order", False, "num") self._log_increase_indent() try: - self.__fill(self.__item, 0) + self.__initialize_se_use(self, 0) + self.__fill(self, 0, "reinit") finally: self._log_decrease_indent() def __repr__(self): - return "SeState item: {}, id {}.".format(self.__item, self.__id) + return "SeState item: {}, id {}".format(self.__item, self.__id) # Check conditions if state can be entered # returns: True = At least one enter condition set is fulfilled, False = No enter condition set is fulfilled @@ -192,7 +203,7 @@ def can_enter(self): # log state data def write_to_log(self): - self._abitem._initstate = self + self._abitem.initstate = self self._log_info("State {0}:", self.id) self._log_increase_indent() self.update_name(self.__item) @@ -265,9 +276,9 @@ def update_order(self, value=None): elif len(value) == 1: value = value[0] if value is None and "se_stateorder" in self.__item.conf: - _, _, _, _issue = self.__order.set_from_attr(self.__item, "se_stateorder") + _, _, _, _issue, _ = self.__order.set_from_attr(self.__item, "se_stateorder") elif value is not None: - _, _, _issue = self.__order.set(value, "", True, None, False) + _, _, _issue, _ = self.__order.set(value, "", True, False) else: _issue = [None] @@ -324,7 +335,6 @@ def run_leave(self, allow_item_repeat: bool): for elem in self._abitem.webif_infos: _key_leave = ['{}'.format(elem), 'leave'] self._abitem.update_webif(_key_leave, False) - #self._log_debug('set leave for {} to false', elem) self.__actions_leave.execute(False, allow_item_repeat, self) self._log_decrease_indent(50) self._log_increase_indent() @@ -341,25 +351,25 @@ def refill(self): self._log_debug("State {}: se_use attribute including item or eval " "- updating state conditions and actions", self.__name) self._log_increase_indent() - self.__fill(self.__item, 0, "reinit") + self.__fill(self, 0, "reinit") self._log_decrease_indent() def update_releasedby_internal(self, states=None): if states == []: - _returnvalue, _returntype, _issue = self.__releasedby.set([None], "", True, None, False) + _returnvalue, _returntype, _issue, _ = self.__releasedby.set([None], "", True, False) elif states: self._log_develop("Setting releasedby to {}", states) - _returnvalue, _returntype, _issue = self.__releasedby.set(states, "", True, None, False) + _returnvalue, _returntype, _issue, _ = self.__releasedby.set(states, "", True, False) self._log_develop("returnvalue {}", _returnvalue) else: - _returnvalue, _returntype, _, _issue = self.__releasedby.set_from_attr(self.__item, "se_released_by") + _returnvalue, _returntype, _, _issue, _ = self.__releasedby.set_from_attr(self.__item, "se_released_by") return _returnvalue, _returntype, _issue def update_can_release_internal(self, states): if states == []: - _returnvalue, _returntype, _issue = self.__can_release.set([None], "", True, None, False) + _returnvalue, _returntype, _issue, _ = self.__can_release.set([None], "", True, False) elif states: - _returnvalue, _returntype, _issue = self.__can_release.set(states, "", True, None, False) + _returnvalue, _returntype, _issue, _ = self.__can_release.set(states, "", True, False) else: _returnvalue, _returntype, _issue = [None], [None], None return _returnvalue, _returntype, _issue @@ -375,33 +385,177 @@ def update_name(self, item_state, recursion_depth=0): elif self.__text.is_empty() and recursion_depth == 0: self.__text.set("value:" + self.__name) self.__name = self.text + self._log_develop("Updated name of state {} to {}.", item_state, self.__name) return self.__name - def __fill_list(self, item_states, recursion_depth, se_use=None): + def __fill_list(self, item_states, recursion_depth, se_use=None, use=None): for i, element in enumerate(item_states): if element == self.state_item: self._log_info("Use element {} is same as current state - Ignoring.", element) elif element is not None and element not in self.__use_done: + if isinstance(se_use, list): + se_use = se_use[i] try: - _use = se_use[i] + se_use = element.property.path except Exception: - _use = element - self.__fill(element, recursion_depth, _use) + se_use = element self.__use_done.append(element) + self.__fill(element, recursion_depth, se_use, use) + + + def __initialize_se_use(self, state, recursion_depth): + # Import data from other item if attribute "use" is found + if isinstance(state, SeState): + item_state = state.state_item + state_type = "state" + elif isinstance(state, Item): + item_state = state + state_type = "item" + elif isinstance(state, list): + for item in state: + item_state = item + self.__initialize_se_use(item_state, recursion_depth + 1) + else: + item_state = state + state_type = "struct" + if recursion_depth > 5: + self._log_error("{0}/{1}: too many levels of 'use'", self.id, item_state) + return + if "se_use" in item_state.conf: + _returnvalue, _returntype, _, _issue, _origvalue = self.__use.set_from_attr( + item_state, "se_use", None, True, None, + self.__use_list + self.__use_ignore_list) + _configvalue = copy(_returnvalue) + _configvalue = [_configvalue] if not isinstance(_configvalue, list) else _configvalue + _configorigvalue = copy(_origvalue) + _configorigvalue = [_configorigvalue] if not isinstance(_configorigvalue, list) else _configorigvalue + self._abitem.update_issues('config', {state.id: {'issue': _issue, 'attribute': 'se_use'}}) + _use = self.__use.get() + if self.__use.is_empty() or _use is None: + _issue = "se_use {} is set up in a wrong way".format(_use) + self._abitem.update_issues('config', {state.id: {'issue': _issue, 'attribute': 'se_use', 'origin': state_type}}) + self._log_warning("{} - ignoring.", _issue) + else: + _use = [_use] if not isinstance(_use, list) else _use + _returntype = [_returntype] if not isinstance(_returntype, list) else _returntype + cleaned_use_list = [] + for i, element in enumerate(_use): + try: + _name = element.id + except Exception: + _name = element + _fill = True + _path = None + if isinstance(element, StateEngineStruct.SeStruct): + _path = element.property.path + text1 = "Reading struct {0}. It is{1} a valid struct for the state configuration.{2}" + _fill = element.property.valid_se_use + valid1 = " NOT" if _fill is False else "" + valid2 = " Ignoring." if _fill is False else "" + self._log_info(text1, _path, valid1, valid2) + if _fill is False: + _issue = "Not valid. Ensure it is addressed by .rules.." + self._abitem.update_issues('struct', {_path: {'issue': _issue}}) + self.__use_ignore_list.append(_path) + elif _configvalue and _configvalue[i] not in cleaned_use_list: + cleaned_use_list.append(_configvalue[i]) + elif isinstance(element, self.__itemClass): + _path = element.property.path + if element.return_parent() == Items.get_instance(): + valid1 = " most likely NOT" + valid3 = "" + valid2 = ", because it has no parent item!" + else: + valid2 = "" + valid1 = " NOT" if _fill is False else " most likely" + valid3 = " Ignoring." if _fill is False else "" + text1 = "Reading Item {0}. It is{1} a valid item for the state configuration{2}.{3}" + self._log_info(text1, _path, valid1, valid2, valid3) + if _fill is False: + _issue = "Item {} is not a valid item for the state configuration.".format(_path) + self._abitem.update_issues('config', + {state.id: {'issue': _issue, 'attribute': 'se_use', 'origin': state_type}}) + self.__use_ignore_list.append(_path) + elif _configorigvalue and _configorigvalue[i] not in cleaned_use_list: + cleaned_use_list.append(_configorigvalue[i]) + if _returntype[i] == 'value': + _issues = self.__use.get_issues() + for item in _issues.get('cast_item'): + if (_configorigvalue[i] is not None and isinstance(_configorigvalue[i], str) and + (StateEngineTools.partition_strip(_configorigvalue[i], ":")[1] in item or + _configorigvalue[i] in item)): + _issue_list = [item for key, value in _issues.items() if value for item in value] + self._log_warning("se_use {} points to invalid item. Ignoring.", _configorigvalue[i]) + self._abitem.update_issues('config', {state.id: + {'issue': _issue_list, + 'attribute': 'se_use', 'origin': state_type}}) + self.__use_ignore_list.append(_configorigvalue[i]) + _path = None + elif _returntype[i] in ['item', 'eval']: + _path = _configvalue[i] + _issues = self.__use.get_issues() + for list_key in ['cast_item', 'eval', 'item']: + if list_key in _issues: + for item in _issues[list_key]: + if (_path is not None and isinstance(_path, str) and + StateEngineTools.partition_strip(_path, ":")[1] in item): + + _issue_list = [item for key, value in _issues.items() if value for item in value] + self._log_warning("se_use {} defined by invalid item/eval. Ignoring.", _path) + self._abitem.update_issues('config', {state.id: + {'issue': _issue_list, + 'attribute': 'se_use', 'origin': state_type}}) + self.__use_ignore_list.append(_path) + _path = None + if _path is None: + pass + + elif _path is not None and _configorigvalue[i] not in cleaned_use_list: + self._log_info("se_use {} defined by item/eval {}. Even if current result is not valid, " + "entry will be re-evaluated on next state evaluation. element: {}", _path, _configorigvalue[i], element) + cleaned_use_list.append(_configorigvalue[i]) + #self.__use_done.append(_path) + if _path is None: + pass + elif element == self.state_item: + self._log_info("Use element {} is same as current state - Ignoring.", _name) + self.__use_ignore_list.append(element) + elif _fill and element is not None and _configorigvalue[i] not in self.__use_list: + + if isinstance(_name, list): + self._log_develop( + "Adding list element {} to state fill function. path is {}, name is {}. configvalue {}", + element, _path, _name, _configorigvalue[i]) + self.__use_list.append(_configorigvalue[i]) + for item in _name: + self.__initialize_se_use(item, recursion_depth + 1) + else: + self._log_develop( + "Adding element {} to state fill function. path is {}, name is {}.", + _configorigvalue[i], _path, _name) + self.__use_list.append(_configorigvalue[i]) + self.__initialize_se_use(element, recursion_depth + 1) + elif _fill and element is not None and _configorigvalue[i] in self.__use_list: + self._log_debug("Ignoring element {} as it is already added. cleaned use {}", element, cleaned_use_list) + self.__use_list.extend(cleaned_use_list) + seen = set() + self.__use_list = [x for x in self.__use_list if not (x in seen or seen.add(x))] + self.__use.set(self.__use_list) # Read configuration from item and populate data in class # item_state: item to read from # recursion_depth: current recursion_depth (recursion is canceled after five levels) # se_use: If se_use Attribute is used or not - def __fill(self, item_state, recursion_depth, se_use=None): - def update_unused(used_attributes, type, name): + def __fill(self, state, recursion_depth, se_use=None, use=None): + + def update_unused(used_attributes, attrib_type, attrib_name): #filtered_dict = {key: value for key, value in self.__unused_attributes.items() if key not in used_attributes} #self.__unused_attributes = copy(filtered_dict) - for item, nested_dict in self.__unused_attributes.items(): - if item in used_attributes.keys(): - used_attributes[item].update({type: name}) - used_attributes[item].update(nested_dict) + for nested_entry, nested_dict in self.__unused_attributes.items(): + if nested_entry in used_attributes.keys(): + used_attributes[nested_entry].update({attrib_type: attrib_name}) + used_attributes[nested_entry].update(nested_dict) self.__used_attributes.update(used_attributes) def update_action_status(action_status, actiontype): @@ -449,97 +603,38 @@ def update_action_status(action_status, actiontype): self.__action_status = filtered_dict #self._log_develop("Updated action status: {}, updated used {}", self.__action_status, self.__used_attributes) + if isinstance(state, SeState): + item_state = state.state_item + else: + item_state = state + self._log_develop("Fill state {} type {}, called by {}, recursion {}", item_state, type(item_state), se_use, recursion_depth) if se_use == "reinit": - self._log_develop("Resetting conditions and actions at re-init") + self._log_develop("Resetting conditions and actions at re-init use is {}", use) self.__conditions.reset() self.__actions_enter_or_stay.reset() self.__actions_enter.reset() self.__actions_stay.reset() self.__actions_leave.reset() self.__use_done = [] - if recursion_depth > 5: - self._log_error("{0}/{1}: too many levels of 'use'", self.id, item_state.property.path) - return - # Import data from other item if attribute "use" is found - if "se_use" in item_state.conf: - _returnvalue, _returntype, _, _issue = self.__use.set_from_attr(item_state, "se_use") - _configvalue = copy(_returnvalue) - _configvalue = [_configvalue] if not isinstance(_configvalue, list) else _configvalue - self._abitem.update_issues('config', {item_state.property.path: {'issue': _issue, 'attribute': 'se_use'}}) - _use = self.__use.get() - if self.__use.is_empty() or _use is None: - _issue = "se_use {} is set up in a wrong way".format(_use) - self._abitem.update_issues('config', - {item_state.property.path: {'issue': _issue, 'attribute': 'se_use'}}) - self._log_warning("{} - ignoring.", _issue) - else: - _use = [_use] if not isinstance(_use, list) else _use - _returntype = [_returntype] if not isinstance(_returntype, list) else _returntype - cleaned_use_list = [] - for i, element in enumerate(_use): - try: - _name = element.property.path - except Exception: - _name = element - _fill = True - _path = None - if isinstance(element, StateEngineStruct.SeStruct): - _path = element.property.path - text1 = "Reading struct {0}. It is{1} a valid struct for the state configuration.{2}" - _fill = element.property.valid_se_use - valid1 = " NOT" if _fill is False else "" - valid2 = " Ignoring." if _fill is False else "" - self._log_info(text1, _path, valid1, valid2) - if _fill is False: - _issue = "Not valid. Ensure it is addressed by .rules.." - self._abitem.update_issues('struct', {_path: {'issue': _issue}}) - elif _configvalue and _configvalue[i] not in cleaned_use_list: - cleaned_use_list.append(_configvalue[i]) - elif isinstance(element, self.__itemClass): - _path = element.property.path - text1 = "Reading Item {0}. It is{1} a valid item for the state configuration.{2}" - valid1 = " NOT" if _fill is False else " most likely" - valid2 = " Ignoring." if _fill is False else "" - self._log_info(text1, _path, valid1, valid2) - if _fill is False: - _issue = "Item {} is not a valid item for the state configuration.".format(_path) - self._abitem.update_issues('config', - {item_state.property.path: {'issue': _issue, - 'attribute': 'se_use'}}) - elif _configvalue and _configvalue[i] not in cleaned_use_list: - cleaned_use_list.append(_configvalue[i]) - if _returntype[i] in ['item', 'eval']: - _path = _configvalue[i] - self._log_info("se_use {} defined by item/eval. Even if current result is not valid, " - "entry will be re-evaluated on next state evaluation.", _path) - if _path is not None and _path not in cleaned_use_list: - cleaned_use_list.append(_path) - self.__use_done.append(_path) - if _path is None: - pass - elif element == self.state_item: - self._log_info("Use element {} is same as current state - Ignoring.", _name) - elif _fill and element is not None and element not in self.__use_done: - self._log_develop("Adding element {} to state fill function.", _name) - if isinstance(_name, list): - self.__fill_list(element, recursion_depth + 1, _name) - else: - self.__use_done.append(element) - self.__fill(element, recursion_depth + 1, _name) - self.__use.set(cleaned_use_list) + use = self.__use.get() + if use is not None: + use = use if isinstance(use, list) else [use] + use = [u for u in use if u is not None] + use = StateEngineTools.flatten_list(use) + self.__fill_list(use, recursion_depth, se_use, use) # Get action sets and condition sets + self._log_develop("Use is {}", use) parent_item = item_state.return_parent() + if parent_item == Items.get_instance(): + parent_item = None child_items = item_state.return_children() _conditioncount = 0 - _enter_actioncount = 0 - _enter_stay_actioncount = 0 - _leave_actioncount = 0 - _stay_actioncount = 0 - _actioncount = 0 + _action_counts = {"enter": 0, "stay": 0, "enter_or_stay": 0, "leave": 0} _unused_attributes = {} _used_attributes = {} + _action_status = {} # first check all conditions for child_item in child_items: child_name = StateEngineTools.get_last_part_of_item_id(child_item) @@ -560,7 +655,7 @@ def update_action_status(action_status, actiontype): except ValueError as ex: raise ValueError("Condition {0} error: {1}".format(child_name, ex)) - if _conditioncount == 0: + if _conditioncount == 0 and parent_item: for attribute in parent_item.conf: func, name = StateEngineTools.partition_strip(attribute, "_") cond1 = name and name not in self.__used_attributes @@ -574,84 +669,72 @@ def update_action_status(action_status, actiontype): for child_item in child_items: child_name = StateEngineTools.get_last_part_of_item_id(child_item) try: - if child_name == "on_enter": - _actioncount += 1 + action_mapping = { + "on_enter": ("enter", self.__actions_enter), + "on_stay": ("stay", self.__actions_stay), + "on_enter_or_stay": ("enter_or_stay", self.__actions_enter_or_stay), + "on_leave": ("leave", self.__actions_leave) + } + + if child_name in action_mapping: + action_name, action_method = action_mapping[child_name] for attribute in child_item.conf: - _enter_actioncount += 1 - _, _action_status = self.__actions_enter.update(attribute, child_item.conf[attribute]) + self._log_develop("Filling state with {} action named {}", child_name, attribute) + _action_counts[action_name] += 1 + _, _action_status = action_method.update(attribute, child_item.conf[attribute]) if _action_status: - update_action_status(_action_status, 'enter') - self._abitem.update_action_status(self.__action_status) - update_unused(_used_attributes, 'action', child_name) - elif child_name == "on_stay": - _actioncount += 1 - for attribute in child_item.conf: - _stay_actioncount += 1 - _, _action_status = self.__actions_stay.update(attribute, child_item.conf[attribute]) - if _action_status: - update_action_status(_action_status, 'stay') - self._abitem.update_action_status(self.__action_status) - update_unused(_used_attributes, 'action', child_name) - elif child_name == "on_enter_or_stay": - _actioncount += 1 - for attribute in child_item.conf: - _enter_stay_actioncount += 1 - _, _action_status = self.__actions_enter_or_stay.update(attribute, child_item.conf[attribute]) - if _action_status: - update_action_status(_action_status, 'enter_or_stay') - self._abitem.update_action_status(self.__action_status) - update_unused(_used_attributes, 'action', child_name) - elif child_name == "on_leave": - _actioncount += 1 - for attribute in child_item.conf: - _leave_actioncount += 1 - _, _action_status = self.__actions_leave.update(attribute, child_item.conf[attribute]) - if _action_status: - update_action_status(_action_status, 'leave') + update_action_status(_action_status, action_name) self._abitem.update_action_status(self.__action_status) update_unused(_used_attributes, 'action', child_name) + except ValueError as ex: raise ValueError("Condition {0} check for actions error: {1}".format(child_name, ex)) + self._abitem.update_attributes(self.__unused_attributes, self.__used_attributes) # Actions defined directly in the item go to "enter_or_stay" for attribute in item_state.conf: _result = self.__actions_enter_or_stay.update(attribute, item_state.conf[attribute]) - _enter_stay_actioncount += _result[0] if _result else 0 + _action_counts["enter_or_stay"] += _result[0] if _result else 0 _action_status = _result[1] if _action_status: update_action_status(_action_status, 'enter_or_stay') self._abitem.update_action_status(self.__action_status) - _total_actioncount = _enter_actioncount + _stay_actioncount + _enter_stay_actioncount + _leave_actioncount + _total_actioncount = _action_counts["enter"] + _action_counts["stay"] + _action_counts["enter_or_stay"] + _action_counts["leave"] self.update_name(item_state, recursion_depth) # Complete condition sets and actions at the end + if recursion_depth == 0: - self.__conditions.complete(item_state) - _action_status = self.__actions_enter.complete(item_state, self.__conditions.evals_items) + self.__conditions.complete(self, use) + _action_status = self.__actions_enter.complete(self, self.__conditions.evals_items, use) if _action_status: update_action_status(_action_status, 'enter') self._abitem.update_action_status(self.__action_status) - _action_status = self.__actions_stay.complete(item_state, self.__conditions.evals_items) + _action_status = self.__actions_stay.complete(self, self.__conditions.evals_items, use) if _action_status: update_action_status(_action_status, 'stay') self._abitem.update_action_status(self.__action_status) - _action_status = self.__actions_enter_or_stay.complete(item_state, self.__conditions.evals_items) + _action_status = self.__actions_enter_or_stay.complete(self, self.__conditions.evals_items, use) if _action_status: update_action_status(_action_status, 'enter_or_stay') self._abitem.update_action_status(self.__action_status) - _action_status = self.__actions_leave.complete(item_state, self.__conditions.evals_items) + _action_status = self.__actions_leave.complete(self, self.__conditions.evals_items, use) if _action_status: update_action_status(_action_status, 'leave') self._abitem.update_action_status(self.__action_status) self._abitem.update_action_status(self.__action_status) self._abitem.update_attributes(self.__unused_attributes, self.__used_attributes) _summary = "{} on_enter, {} on_stay , {} on_enter_or_stay, {} on_leave" + if self.__action_status: + _ignore_list = [entry for entry in self.__action_status if self.__action_status[entry].get('ignore') is True] + if _ignore_list: + self._log_info("Ignored {} action(s) due to errors: {}", len(_ignore_list), _ignore_list) if se_use is not None: self._log_debug("Added {} action(s) based on se_use {}. " + _summary, _total_actioncount, se_use, - _enter_actioncount, _stay_actioncount, _enter_stay_actioncount, _leave_actioncount) + _action_counts["enter"], _action_counts["stay"], _action_counts["enter_or_stay"], _action_counts["leave"]) self._log_debug("Added {} condition set(s) based on se_use: {}", _conditioncount, se_use) else: self._log_debug("Added {} action(s) based on item configuration: " + _summary, _total_actioncount, - _enter_actioncount, _stay_actioncount, _enter_stay_actioncount, _leave_actioncount) + _action_counts["enter"], _action_counts["stay"], _action_counts["enter_or_stay"], _action_counts["leave"]) self._log_debug("Added {} condition set(s) based on item configuration", _conditioncount) diff --git a/stateengine/StateEngineStruct.py b/stateengine/StateEngineStruct.py index 4c6edb69e..51a958efa 100755 --- a/stateengine/StateEngineStruct.py +++ b/stateengine/StateEngineStruct.py @@ -47,6 +47,10 @@ def conf(self): def id(self): return self.struct_path + @property + def path(self): + return self.struct_path + def return_children(self): for child in self._conf.keys(): yield child @@ -61,7 +65,7 @@ def __init__(self, abitem, struct_path, global_struct): self._conf = {} self._full_conf = {} self._struct = None - self._global_struct = global_struct # copy.deepcopy(self.itemsApi.return_struct_definitions()) + self._global_struct = global_struct # copy.deepcopy(self.itemsApi.return_struct_definitions()) self._struct_rest = None self._children_structs = [] self._parent_struct = None @@ -103,7 +107,7 @@ def get(self): raise NotImplementedError("Class {} doesn't implement get()".format(self.__class__.__name__)) -# Class representing struct child +# Class representing struct class SeStructMain(SeStruct): # Initialize the action # abitem: parent SeItem instance @@ -160,7 +164,7 @@ def get(self): self.create_children() self.valid_se_use = True if "se_use" in self._full_conf else self.valid_se_use else: - _issue = "Item '{}' does not exist".format( self._struct_rest) + _issue = "Item '{}' does not exist".format(self._struct_rest) self._abitem.update_issues('struct', {self.struct_path: {'issue': _issue}}) self._log_error("{} in struct {}", _issue, self._struct) except Exception as ex: @@ -184,7 +188,8 @@ def __repr__(self): def get(self): try: - self._conf = self.dict_get(self._global_struct.get(self._struct) or {}, self._struct_rest, self._global_struct.get(self._struct) or {}) + self._conf = self.dict_get(self._global_struct.get(self._struct) or {}, + self._struct_rest, self._global_struct.get(self._struct) or {}) except Exception: self._conf = {} @@ -199,12 +204,14 @@ def __init__(self, abitem, struct_path, global_struct): #self._log_debug("Struct path {} for {}", self.struct_path, __class__.__name__) def __repr__(self): - return "SeStructParent {}".format(self.struct_path, self._conf) + return "SeStructParent {}".format(self.struct_path) def get(self): try: parent_name = self.struct_path.split(".")[-2] - _temp_dict = self.dict_get(self._global_struct.get(self._struct) or {}, parent_name, self._global_struct.get(self._struct) or {}) + self.struct_path = self.struct_path.rsplit('.', 1)[0] + _temp_dict = self.dict_get(self._global_struct.get(self._struct) or {}, parent_name, + self._global_struct.get(self._struct) or {}) _temp_dict = collections.OrderedDict( {key: value for (key, value) in _temp_dict.items() if not isinstance(value, collections.abc.Mapping)}) self._conf = _temp_dict diff --git a/stateengine/StateEngineTools.py b/stateengine/StateEngineTools.py index f47861f2a..a29968b0a 100755 --- a/stateengine/StateEngineTools.py +++ b/stateengine/StateEngineTools.py @@ -22,12 +22,9 @@ from . import StateEngineLogger import datetime from ast import literal_eval -from lib.item import Items -from lib.item.item import Item import re +from lib.item.items import Items -itemsApi = Items.get_instance() -__itemClass = Item # General class for everything that is below the SeItem Class # This class provides some general stuff: @@ -124,7 +121,7 @@ def parse_relative(evalstr, begintag, endtags): rel = rest[:rest.find(endtag)] rest = rest[rest.find(endtag)+len(endtag):] if 'property' in endtag: - rest1 = re.split('([ +\-*/])', rest, 1) + rest1 = re.split('([- +*/])', rest, 1) rest = ''.join(rest1[1:]) pref += "se_eval.get_relative_itemproperty('{}', '{}')".format(rel, rest1[0]) elif '()' in endtag: @@ -260,25 +257,42 @@ def cast_time(value): # smarthome: instance of smarthome.py base class # base_item: base item to search in # attribute: name of attribute to find -def find_attribute(smarthome, base_item, attribute, recursion_depth=0): +def find_attribute(smarthome, state, attribute, recursion_depth=0, use=None): + if isinstance(state, list): + for element in state: + result = find_attribute(smarthome, element, attribute, recursion_depth) + if result is not None: + return result + return None + # 1: parent of given item could have attribute - parent_item = base_item.return_parent() try: - _parent_conf = parent_item.conf - if parent_item is not None and attribute in _parent_conf: - return parent_item.conf[attribute] + # if state is state object, get the item and se_use information + base_item = state.state_item + if use is None: + use = state.use.get() except Exception: - return None + # if state is a standard item (e.g. evaluated by se_use, just take it as it is + base_item = state + use = None + parent_item = base_item.return_parent() + if parent_item == Items.get_instance(): + pass + else: + try: + _parent_conf = parent_item.conf + if parent_item is not None and attribute in _parent_conf: + return parent_item.conf[attribute] + except Exception: + return None - # 2: if item has attribute "se_use", get the item to use and search this item for required attribute - if "se_use" in base_item.conf: + # 2: if state has attribute "se_use", get the item to use and search this item for required attribute + if use is not None: if recursion_depth > 5: return None - use_item = itemsApi.return_item(base_item.conf.get("se_use")) - if use_item is not None: - result = find_attribute(smarthome, use_item, attribute, recursion_depth + 1) - if result is not None: - return result + result = find_attribute(smarthome, use, attribute, recursion_depth + 1) + if result is not None: + return result # 3: nothing found return None @@ -289,8 +303,8 @@ def find_attribute(smarthome, base_item, attribute, recursion_depth=0): # splitchar: where to split # returns: Parts before and after split, whitespaces stripped def partition_strip(value, splitchar): - if isinstance(value, list): - raise ValueError("You can not use list entries!") + if not isinstance(value, str): + raise ValueError("value has to be a string!") elif value.startswith("se_") and splitchar == "_": part1, __, part2 = value[3:].partition(splitchar) return "se_" + part1.strip(), part2.strip() @@ -302,8 +316,8 @@ def partition_strip(value, splitchar): # return list representation of string # value: list as string # returns: list or original value -def convert_str_to_list(value): - if isinstance(value, str) and ("," in value and value.startswith("[")): +def convert_str_to_list(value, force=True): + if isinstance(value, str) and (value[:1] == '[' and value[-1:] == ']'): value = value.strip("[]") if isinstance(value, str) and "," in value: try: @@ -314,11 +328,12 @@ def convert_str_to_list(value): return literal_eval(formatted_str) except Exception as ex: raise ValueError("Problem converting string to list: {}".format(ex)) - elif isinstance(value, list): + elif isinstance(value, list) or force is False: return value else: return [value] + # return dict representation of string # value: OrderedDict as string # returns: OrderedDict or original value @@ -341,6 +356,7 @@ def convert_str_to_dict(value): except Exception as ex: raise ValueError("Problem converting string to OrderedDict: {}".format(ex)) + # return string representation of eval function # eval_func: eval function # returns: string representation @@ -368,7 +384,9 @@ def get_eval_name(eval_func): # source: source # item: item being updated # eval_type: update or change -def get_original_caller(elog, caller, source, item=None, eval_keyword=['Eval'], eval_type='update'): +def get_original_caller(smarthome, elog, caller, source, item=None, eval_keyword=None, eval_type='update'): + if eval_keyword is None: + eval_keyword = ['Eval'] original_caller = caller original_item = item if isinstance(source, str): @@ -376,7 +394,7 @@ def get_original_caller(elog, caller, source, item=None, eval_keyword=['Eval'], else: original_source = "None" while partition_strip(original_caller, ":")[0] in eval_keyword: - original_item = itemsApi.return_item(original_source) + original_item = smarthome.return_item(original_source) if original_item is None: elog.info("get_caller({0}, {1}): original item not found", caller, source) break diff --git a/stateengine/StateEngineValue.py b/stateengine/StateEngineValue.py index 11774967c..bfc2c87b8 100755 --- a/stateengine/StateEngineValue.py +++ b/stateengine/StateEngineValue.py @@ -54,11 +54,13 @@ def __init__(self, abitem, name, allow_value_list=False, value_type=None): self.__varname = None self.__template = None self.__issues = [] + self.__get_issues = {'cast_item': [], 'eval': [], 'regex': [], 'struct': [], 'var': [], 'item': []} self._additional_sources = [] self.itemsApi = Items.get_instance() self.__itemClass = Item self.__listorder = [] self.__type_listorder = [] + self.__orig_listorder = [] self.__valid_valuetypes = ["value", "regex", "eval", "var", "item", "template", "struct"] if value_type == "str": self.__cast_func = StateEngineTools.cast_str @@ -83,18 +85,32 @@ def is_empty(self): return self.__value is None and self.__item is None and self.__eval is None and \ self.__varname is None and self.__regex is None and self.__struct is None + def get_issues(self): + return self.__get_issues + # Set value directly from attribute # item: item containing the attribute # attribute_name: name of attribute to use # default_value: default value to be used if item contains no such attribute - def set_from_attr(self, item, attribute_name, default_value=None, reset=True, attr_type=None): + def set_from_attr(self, item, attribute_name, default_value=None, reset=True, attr_type=None, ignore=None): value = copy.deepcopy(item.conf.get(attribute_name)) if value is not None: _using_default = False + if isinstance(value, list): + if not ignore: + seen = set() + else: + ignore = ignore if isinstance(ignore, list) else [ignore] + seen = set(ignore) + self._log_develop("Ignoring values {}", ignore) + value = [x for x in value if not (x in seen or seen.add(x))] + elif value == ignore: + self._log_develop("Not setting value {} as it should be ignored", value) + return None, None, False, None, None self._log_develop("Processing value {0} from attribute name {1}, reset {2}, type {3}", value, attribute_name, reset, attr_type) elif default_value is None: - return None, None, False, None + return None, None, False, None, None else: value = default_value _using_default = True @@ -119,9 +135,9 @@ def set_from_attr(self, item, attribute_name, default_value=None, reset=True, at if value is not None: self._log_develop("Setting value {0}, attribute name {1}, reset {2}, type {3}", value, attribute_name, reset, attr_type) - _returnvalue, _returntype, _issue = self.set(value, attribute_name, reset, item) - self._log_develop("Set from attribute returnvalue {}, returntype {}, issue {}", _returnvalue, _returntype, _issue) - return _returnvalue, _returntype, _using_default, _issue + _returnvalue, _returntype, _issue, _origvalue = self.set(value, attribute_name, reset) + self._log_develop("Set from attribute returnvalue {}, returntype {}, issue {}, original {}", _returnvalue, _returntype, _issue, _origvalue) + return _returnvalue, _returntype, _using_default, _issue, _origvalue def _set_additional(self, _additional_sources): for _use in _additional_sources: @@ -140,11 +156,12 @@ def __resetvalue(self): self._additional_sources = [] self.__listorder = [] self.__type_listorder = [] + self.__orig_listorder = [] # Set value # value: string indicating value or source of value # name: name of object ("time" is being handled differently) - def set(self, value, name="", reset=True, item=None, copyvalue=True): + def set(self, value, name="", reset=True, copyvalue=True): if copyvalue is True: value = copy.copy(value) if reset: @@ -180,6 +197,7 @@ def set(self, value, name="", reset=True, item=None, copyvalue=True): _issue, self.__valid_valuetypes, field_value[i]) source[i] = "value" self.__type_listorder.append(source[i]) + self.__orig_listorder.append(val) if source[i] == "value": self.__listorder[i] = value[i] if source[i] == "template": @@ -256,6 +274,7 @@ def set(self, value, name="", reset=True, item=None, copyvalue=True): if source == "value": self.__listorder = [field_value] self.__type_listorder.append(source) + self.__orig_listorder.append(value) else: source = "value" field_value = value @@ -283,8 +302,8 @@ def set(self, value, name="", reset=True, item=None, copyvalue=True): self._log_warning(_issue) s = None try: - cond1 = s.lstrip('-').replace('.','',1).isdigit() - cond2 = field_value[i].lstrip('-').replace('.','',1).isdigit() + cond1 = s.lstrip('-').replace('.', '', 1).isdigit() + cond2 = field_value[i].lstrip('-').replace('.', '', 1).isdigit() except Exception: cond1 = False cond2 = False @@ -297,7 +316,7 @@ def set(self, value, name="", reset=True, item=None, copyvalue=True): self.__value = [] if self.__value is None else [self.__value] if not isinstance(self.__value, list) else self.__value if s == "value": - cond3 = isinstance(field_value[i], str) and field_value[i].lstrip('-').replace('.','',1).isdigit() + cond3 = isinstance(field_value[i], str) and field_value[i].lstrip('-').replace('.', '', 1).isdigit() if cond3: field_value[i] = ast.literal_eval(field_value[i]) elif isinstance(field_value[i], str) and field_value[i].lower() in ['true', 'yes']: @@ -315,6 +334,9 @@ def set(self, value, name="", reset=True, item=None, copyvalue=True): if s == "item": _item, _issue = self._abitem.return_item(field_value[i]) if _issue not in [[], None, [None], self.__issues]: + _issue_dict = {field_value[i]: _issue} + if _issue_dict not in self.__get_issues['item']: + self.__get_issues['item'].append(_issue_dict) self.__issues.append(_issue) self.__item.append(None if s != "item" else self.__absolute_item(_item, field_value[i])) self.__eval = [] if self.__eval is None else [self.__eval] if not isinstance(self.__eval, list) else self.__eval @@ -349,6 +371,9 @@ def set(self, value, name="", reset=True, item=None, copyvalue=True): if source == "item": _item, _issue = self._abitem.return_item(field_value) if _issue not in [[], None, [None], self.__issues]: + _issue_dict = {field_value: _issue} + if _issue_dict not in self.__get_issues['item']: + self.__get_issues['item'].append(_issue_dict) self.__issues.append(_issue) self.__item = None if source != "item" else self.__absolute_item(_item, field_value) self.__eval = None if source != "eval" else field_value @@ -359,7 +384,7 @@ def set(self, value, name="", reset=True, item=None, copyvalue=True): if isinstance(field_value, list) and not self.__allow_value_list: raise ValueError("{0}: value_in is not allowed, problem with {1}. Allowed = {2}".format( self.__name, field_value, self.__allow_value_list)) - cond3 = isinstance(field_value, str) and field_value.lstrip('-').replace('.','',1).isdigit() + cond3 = isinstance(field_value, str) and field_value.lstrip('-').replace('.', '', 1).isdigit() if cond3: field_value = ast.literal_eval(field_value) elif isinstance(field_value, str) and field_value.lower() in ['true', 'yes']: @@ -374,15 +399,19 @@ def set(self, value, name="", reset=True, item=None, copyvalue=True): self.__issues = StateEngineTools.flatten_list(self.__issues) self.__listorder = StateEngineTools.flatten_list(self.__listorder) self.__type_listorder = StateEngineTools.flatten_list(self.__type_listorder) + self.__orig_listorder = StateEngineTools.flatten_list(self.__orig_listorder) del value - return self.__listorder, self.__type_listorder, self.__issues + return self.__listorder, self.__type_listorder, self.__issues, self.__orig_listorder # Set cast function # cast_func: cast function def set_cast(self, cast_func): self.__cast_func = cast_func self.__value, _issue = self.__do_cast(self.__value) - return [_issue] + if _issue: + return [_issue] + else: + return [] # determine and return value def get(self, default=None, originalorder=True): @@ -479,7 +508,7 @@ def write_to_logger(self): if self.__eval is not None: self._log_debug("{0} from eval: {1}", self.__name, self.__eval) _original_listorder = self.__listorder.copy() - self._log_debug("Currently eval results in {}", self.__get_eval()) + self._log_debug("Currently eval results in {}. ", self.__get_eval()) self.__listorder = _original_listorder if self.__varname is not None: if isinstance(self.__varname, list): @@ -518,14 +547,31 @@ def get_text(self, prefix=None, suffix=None): def cast_item(self, value): try: _returnvalue, _issue = self._abitem.return_item(value) + if _issue not in [[], None, [None]]: + _issue_dict = {str(value): _issue[0]} + else: + _issue_dict = {} + if isinstance(_returnvalue, str): + try: + _returnvalue = eval(_returnvalue) + except Exception: + _issue = "Got string {0} while casting item {1}".format(_returnvalue, value) + _issue_dict = {str(value): _issue} + self._log_error(_issue) + if _issue_dict and _issue_dict not in self.__get_issues['cast_item']: + self.__get_issues['cast_item'].append(_issue_dict) return _returnvalue except Exception as ex: - self._log_error("Can't cast {0} to item/struct! {1}".format(value, ex)) + _issue = "Can't cast {0} to item/struct! {1}".format(value, ex) + _issue_dict = {str(value): _issue} + if _issue_dict not in self.__get_issues['cast_item']: + self.__get_issues['cast_item'].append(_issue_dict) + self._log_error(_issue) return value - def __update_item_listorder(self, value, newvalue, id=None): + def __update_item_listorder(self, value, newvalue, item_id=None): if value is None: - _id_value = "item:{}".format(id) + _id_value = "item:{}".format(item_id) self.__listorder[self.__listorder.index(_id_value)] = newvalue if value in self.__listorder: self.__listorder[self.__listorder.index(value)] = newvalue @@ -533,31 +579,31 @@ def __update_item_listorder(self, value, newvalue, id=None): _item_value = "item:{}".format(value.property.path) if _item_value in self.__listorder: self.__listorder[self.__listorder.index(_item_value)] = newvalue - if id: - _item_value = "item:{}".format(id) + if item_id: + _item_value = "item:{}".format(item_id) if _item_value in self.__listorder: self.__listorder[self.__listorder.index(_item_value)] = "item:{}".format(newvalue.property.path) self._log_develop("Updated relative declaration {} with absolute item path {}. Listorder is now: {}", _item_value, newvalue.property.path, self.__listorder) - def __absolute_item(self, value, id=None): + def __absolute_item(self, value, item_id=None): if value is None: - self.__update_item_listorder(value, value, id) + self.__update_item_listorder(value, value, item_id) elif isinstance(value, list): valuelist = [] for i, element in enumerate(value): element = self.cast_item(element) - self.__update_item_listorder(value, element, id[i]) + self.__update_item_listorder(value, element, item_id[i]) value = valuelist else: _newvalue = self.cast_item(value) - self.__update_item_listorder(value, _newvalue, id) + self.__update_item_listorder(value, _newvalue, item_id) value = _newvalue return value # Cast given value, if cast-function is set # value: value to cast - def __do_cast(self, value, id=None): + def __do_cast(self, value, item_id=None): _issue = None if value is not None and self.__cast_func is not None: try: @@ -574,7 +620,7 @@ def __do_cast(self, value, id=None): if element in self.__listorder: self.__listorder[self.__listorder.index(element)] = _newvalue if isinstance(element, self.__itemClass): - self.__update_item_listorder(value, _newvalue, id[i]) + self.__update_item_listorder(value, _newvalue, item_id[i]) if isinstance(element, StateEngineStruct.SeStruct): _item_value = "struct:{}".format(element.property.path) @@ -587,7 +633,7 @@ def __do_cast(self, value, id=None): if value in self.__listorder: self.__listorder[self.__listorder.index(value)] = _newvalue if isinstance(value, self.__itemClass): - self.__update_item_listorder(value, _newvalue, id) + self.__update_item_listorder(value, _newvalue, item_id) if isinstance(value, StateEngineStruct.SeStruct): _item_value = "struct:{}".format(value.property.path) @@ -628,29 +674,40 @@ def __get_from_struct(self): for val in self.__struct: if val is not None: _newvalue, _issue = self.__do_cast(val) + _issue_dict = {val: _issue} + if _issue not in [[], None, [None]] and _issue_dict not in self.__get_issues['struct']: + self.__get_issues['struct'].append(_issue_dict) values.append(_newvalue) if 'struct:{}'.format(val.property.path) in self.__listorder: self.__listorder[self.__listorder.index('struct:{}'.format(val.property.path))] = _newvalue else: if self.__struct is not None: _newvalue, _issue = self.__do_cast(self.__struct) + _issue_dict = {self.__struct: _issue} + if _issue not in [[], None, [None]] and _issue_dict not in self.__get_issues['struct']: + self.__get_issues['struct'].append(_issue_dict) if 'struct:{}'.format(self.__regex) in self.__listorder: self.__listorder[self.__listorder.index('struct:{}'.format(self.__struct))] = _newvalue values = _newvalue + if values: return values try: _newvalue, _issue = self.__do_cast(self.__struct) + _issue_dict = {_newvalue: _issue} + if _issue not in [[], None, [None]] and _issue_dict not in self.__get_issues['struct']: + self.__get_issues['struct'].append(_issue_dict) if 'struct:{}'.format(self.__struct) in self.__listorder: self.__listorder[self.__listorder.index('struct:{}'.format(self.__struct))] = _newvalue values = _newvalue except Exception as ex: values = self.__struct _issue = "Problem while getting from struct '{0}': {1}.".format(values, ex) - #self.__issues.append(_issue) + _issue_dict = {values: _issue} + if _issue_dict not in self.__get_issues['struct']: + self.__get_issues['struct'].append(_issue_dict) self._log_info(_issue) - return values # Determine value by regular expression @@ -669,7 +726,6 @@ def __get_from_regex(self): values = _newvalue if values is not None: return values - try: _newvalue = re.compile(self.__regex, re.IGNORECASE) if 'regex:{}'.format(self.__regex) in self.__listorder: @@ -678,7 +734,9 @@ def __get_from_regex(self): except Exception as ex: values = self.__regex _issue = "Problem while creating regex '{0}': {1}.".format(values, ex) - #self.__issues.append(_issue) + _issue_dict = {values: _issue} + if _issue_dict not in self.__get_issues['regex']: + self.__get_issues['regex'].append(_issue_dict) self._log_info(_issue) return values @@ -696,6 +754,9 @@ def __get_eval(self): self._log_increase_indent() try: _newvalue, _issue = self.__do_cast(eval(self.__eval)) + _issue_dict = {StateEngineTools.get_eval_name(self.__eval): _issue} + if _issue not in [[], None, [None]] and _issue_dict not in self.__get_issues['eval']: + self.__get_issues['eval'].append(_issue_dict) if 'eval:{}'.format(self.__eval) in self.__listorder: self.__listorder[self.__listorder.index('eval:{}'.format(self.__eval))] = [_newvalue] values = _newvalue @@ -704,8 +765,11 @@ def __get_eval(self): self._log_increase_indent() except Exception as ex: self._log_decrease_indent() - _issue = "Problem evaluating '{0}': {1}.".format(StateEngineTools.get_eval_name(self.__eval), ex) - #self.__issues.append(_issue) + _name = StateEngineTools.get_eval_name(self.__eval) + _issue = "Problem evaluating '{0}': {1}.".format(_name, ex) + _issue_dict = {_name: _issue} + if _issue_dict not in self.__get_issues['eval']: + self.__get_issues['eval'].append(_issue_dict) self._log_warning(_issue) self._log_increase_indent() values = None @@ -719,7 +783,7 @@ def __get_eval(self): val = val.replace("\n", "") except Exception: pass - self._log_debug("Checking eval from list: {0}.", val) + self._log_debug("Checking eval {0} from list {1}.", val, self.__eval) self._log_increase_indent() if isinstance(val, str): if "stateengine_eval" in val or "se_eval" in val: @@ -727,6 +791,9 @@ def __get_eval(self): stateengine_eval = se_eval = StateEngineEval.SeEval(self._abitem) try: _newvalue, _issue = self.__do_cast(eval(val)) + _issue_dict = {val: _issue} + if _issue not in [[], None, [None]] and _issue_dict not in self.__get_issues['eval']: + self.__get_issues['eval'].append(_issue_dict) if 'eval:{}'.format(val) in self.__listorder: self.__listorder[self.__listorder.index('eval:{}'.format(val))] = [_newvalue] value = _newvalue @@ -737,13 +804,18 @@ def __get_eval(self): self._log_decrease_indent() _issue = "Problem evaluating from list '{0}': {1}.".format( StateEngineTools.get_eval_name(val), ex) - #self.__issues.append(_issue) + _issue_dict = {val: _issue} + if _issue_dict not in self.__get_issues['eval']: + self.__get_issues['eval'].append(_issue_dict) self._log_warning(_issue) self._log_increase_indent() value = None else: try: _newvalue, _issue = self.__do_cast(val()) + _issue_dict = {str(val): _issue} + if _issue not in [[], None, [None]] and _issue_dict not in self.__get_issues['eval']: + self.__get_issues['eval'].append(_issue_dict) if 'eval:{}'.format(val) in self.__listorder: self.__listorder[self.__listorder.index('eval:{}'.format(val))] = [_newvalue] value = _newvalue @@ -751,7 +823,9 @@ def __get_eval(self): self._log_decrease_indent() _issue = "Problem evaluating '{0}': {1}.".format( StateEngineTools.get_eval_name(val), ex) - #self.__issues.append(_issue) + _issue_dict = {str(val): _issue} + if _issue_dict not in self.__get_issues['eval']: + self.__get_issues['eval'].append(_issue_dict) self._log_info(_issue) value = None if value is not None: @@ -762,6 +836,9 @@ def __get_eval(self): try: self._log_increase_indent() _newvalue, _issue = self.__do_cast(self.__eval()) + _issue_dict = {_newvalue: _issue} + if _issue not in [[], None, [None]] and _issue_dict not in self.__get_issues['eval']: + self.__get_issues['eval'].append(_issue_dict) if 'eval:{}'.format(self.__eval) in self.__listorder: self.__listorder[self.__listorder.index('eval:{}'.format(self.__eval))] = [_newvalue] values = _newvalue @@ -770,17 +847,18 @@ def __get_eval(self): self._log_increase_indent() except Exception as ex: self._log_decrease_indent() - _issue = "Problem evaluating '{0}': {1}.".format(StateEngineTools.get_eval_name(self.__eval), ex) - #self.__issues.append(_issue) + _name = StateEngineTools.get_eval_name(self.__eval) + _issue = "Problem evaluating '{0}': {1}.".format(_name, ex) self._log_warning(_issue) self._log_increase_indent() + _issue_dict = {_name: _issue} + if _issue_dict not in self.__get_issues['eval']: + self.__get_issues['eval'].append(_issue_dict) return None - return values # Determine value from item def __get_from_item(self): - _issue_list = [] if isinstance(self.__item, list): values = [] for val in self.__item: @@ -797,8 +875,9 @@ def __get_from_item(self): for entry in checked_entry: _newvalue, _issue = self.__do_cast(entry) - if _issue not in [[], None, [None], _issue_list]: - _issue_list.append(_issue) + _issue_dict = {entry: _issue} + if _issue not in [[], None, [None]] and _issue_dict not in self.__get_issues['item']: + self.__get_issues['item'].append(_issue_dict) if _newvalue is not None: _new_values.append(_newvalue) @@ -812,6 +891,7 @@ def __get_from_item(self): index = self.__listorder.index(search_item) self.__listorder[index] = _new_values values.append(_new_values) + if values is not None: return values else: @@ -826,8 +906,9 @@ def __get_from_item(self): _new_values = [] for entry in checked_entry: _newvalue, _issue = self.__do_cast(entry) - if _issue not in [[], None, [None], _issue_list]: - _issue_list.append(_issue) + _issue_dict = {entry: _issue} + if _issue not in [[], None, [None]] and _issue_dict not in self.__get_issues['item']: + self.__get_issues['item'].append(_issue_dict) if _newvalue is not None: _new_values.append(_newvalue) _new_values = _new_values[0] if len(_new_values) == 1 else None if len(_new_values) == 0 else [_new_values] @@ -852,12 +933,11 @@ def __get_from_item(self): except Exception as ex: values = self.__item _issue = "Problem while reading item path '{0}': {1}.".format(values, ex) - if _issue not in _issue_list: - _issue_list.append(_issue) self._log_info(_issue) _newvalue, _issue = self.__do_cast(values) - if _issue not in [[], None, [None], _issue_list]: - _issue_list.append(_issue) + _issue_dict = {_newvalue: _issue} + if _issue not in [[], None, [None]] and _issue_dict not in self.__get_issues['item']: + self.__get_issues['item'].append(_issue_dict) return _newvalue # Determine value from variable @@ -867,10 +947,12 @@ def update_value(varname): new_value, _issue = self.__do_cast(value) new_value = 'var:{}'.format(varname) if new_value == '' else new_value if isinstance(new_value, str) and 'Unknown variable' in new_value: - issue = "There is a problem with your variable {}".format(new_value) - #self.__issues.append(issue) - self._log_warning(issue) + _issue = "There is a problem with your variable {}".format(new_value) + self._log_warning(_issue) new_value = '' + _issue_dict = {varname: _issue} + if _issue not in [[], None, [None]] and _issue_dict not in self.__get_issues['var']: + self.__get_issues['var'].append(_issue_dict) self._log_debug("Checking variable '{0}', value {1} from list {2}", varname, new_value, self.__listorder) if 'var:{}'.format(varname) in self.__listorder: diff --git a/stateengine/StateEngineWebif.py b/stateengine/StateEngineWebif.py index a40a5a300..3181788b2 100755 --- a/stateengine/StateEngineWebif.py +++ b/stateengine/StateEngineWebif.py @@ -33,7 +33,7 @@ class WebInterface(StateEngineTools.SeItemChild): # Constructor # abitem: parent SeItem instance - def __init__(self, smarthome, abitem): + def __init__(self, abitem): super().__init__(abitem) if not REQUIRED_PACKAGE_IMPORTED: @@ -57,23 +57,34 @@ def __init__(self, smarthome, abitem): self.__conditionset_count = 0 def __repr__(self): - return "WebInterface item: {}, id {}.".format(self.__states, self.__name) if REQUIRED_PACKAGE_IMPORTED else "None" + return "WebInterface item: {}, id {}".format(self.__states, self.__name) if REQUIRED_PACKAGE_IMPORTED else "None" def _actionlabel(self, state, label_type, conditionset, previousconditionset, previousstate_conditionset): # Check if conditions for action are met or not # action_dict: abitem[state]['on_enter'/'on_stay'/'on_enter_or_stay'/'on_leave'].get(action) # condition_to_meet: 'conditionset'/'previousconditionset''previousstate_conditionset' # conditionset: name of conditionset that should get checked + def _strip_regex(regex_list): + pattern_strings = [] + for item in regex_list: + if isinstance(item, re.Pattern): + pattern_strings.append(item.pattern) + else: + pattern_strings.append(str(item)) + return str(pattern_strings) + def _check_webif_conditions(action_dict, condition_to_meet: str, conditionset: str): _condition_check = action_dict.get(condition_to_meet) _condition_check = StateEngineTools.flatten_list(_condition_check) _condition_necessary = 1 if _condition_check != 'None' else 0 _condition_check = _condition_check if isinstance(_condition_check, list) else [_condition_check] _condition_count = 0 + _condition = False for cond in _condition_check: try: - _cond = re.compile(cond) - _matching = _cond.fullmatch(conditionset) + if isinstance(cond, str): + cond = re.compile(cond) + _matching = cond.fullmatch(conditionset) except Exception: _matching = True _condition_count += 1 if _matching else 0 @@ -95,36 +106,37 @@ def _check_webif_conditions(action_dict, condition_to_meet: str, conditionset: s _success = None _issue = None _repeat = action_dict.get('repeat') - _delay = action_dict.get('delay') or 0 - _delta = action_dict.get('delta') or 0 - _mindelta = action_dict.get('mindelta') or 0 + _delay = int(float(action_dict.get('delay') or 0)) + _delta = action_dict.get('delta') or '0' + _mindelta = action_dict.get('mindelta') or '0' condition_necessary = 0 condition_met = True condition_count = 0 count, condition1, condition_to_meet, necessary = _check_webif_conditions(action_dict, 'conditionset', conditionset) condition_count += count - condition_necessary += necessary + condition_necessary += min(1, necessary) count, condition2, previouscondition_to_meet, necessary = _check_webif_conditions(action_dict, 'previousconditionset', previousconditionset) condition_count += count - condition_necessary += necessary + condition_necessary += min(1, necessary) count, condition3, previousstate_condition_to_meet, necessary = _check_webif_conditions(action_dict, 'previousstate_conditionset', previousstate_conditionset) condition_count += count - condition_necessary += necessary + condition_necessary += min(1, necessary) if condition_count < condition_necessary: condition_met = False cond1 = conditionset in ['', self.__active_conditionset] and state == self.__active_state cond2 = self.__states[state]['conditionsets'].get(conditionset) is not None cond_delta = float(_delta) < float(_mindelta) - fontcolor = "white" if cond1 and cond2 and (cond_delta or\ - (not condition_met or (_repeat is False and originaltype == 'actions_stay')))\ - else "#5c5646" if _delay > 0 else "darkred" if _delay < 0 \ - else "#303030" if not condition_met or _issue else "black" - condition_info = condition_to_meet if condition1 is False\ - else previouscondition_to_meet if condition2 is False\ - else previousstate_condition_to_meet if condition3 is False\ - else "" + fontcolor = "white" if cond1 and cond2 and ( + cond_delta or + (not condition_met or (_repeat is False and originaltype == 'actions_stay'))) \ + else "#5c5646" if _delay > 0 else "darkred" if _delay < 0 \ + else "#303030" if not condition_met or _issue else "black" + condition_info = _strip_regex(condition_to_meet) if condition1 is False \ + else _strip_regex(previouscondition_to_meet) if condition2 is False \ + else _strip_regex(previousstate_condition_to_meet) if condition3 is False \ + else "" if _issue: if tooltip_count > 0: action_tooltip += ' ' @@ -140,7 +152,7 @@ def _check_webif_conditions(action_dict, condition_to_meet: str, conditionset: s else " (delta {} < {})".format(_delta, _mindelta) if cond_delta and cond1 and cond2\ else "" action1 = action_dict.get('function') - if action1 == 'set': + if action1 in ['set', 'force set']: action2 = str(action_dict.get('item')) value_check = str(action_dict.get('value')) value_check = '""' if value_check == "" else value_check @@ -219,7 +231,10 @@ def _conditionlabel(self, state, conditionset, i): if condition not in conditions_done: current_clean = ", ".join(f"{k} = {v}" for k, v in current.items()) text = " Current {}".format(current_clean) if current and len(current) > 0 else " Not evaluated." - conditionlist += '
{}:{}
'.format(condition.upper(), text) + conditionlist += ('' + '' + '
{}:{}
').format(condition.upper(), text) conditions_done.append(condition) conditionlist += '' info_status = str(condition_dict.get('status') or '') @@ -292,7 +307,7 @@ def _conditionlabel(self, state, conditionset, i): and condition_dict.get('updatedbynegate') == 'True')\ else "updated by" if not updatedby_none and compare == "updatedby"\ else "not triggered by" if (not triggeredby_none and compare == "triggeredby" - and condition_dict.get('triggeredbynegate') == 'True')\ + and condition_dict.get('triggeredbynegate') == 'True')\ else "triggered by" if not triggeredby_none and compare == "triggeredby"\ else "!=" if (not value_none and compare == "value" and condition_dict.get('negate') == 'True')\ @@ -372,7 +387,6 @@ def drawgraph(self, filename): previous_state = '' previous_conditionset = '' previousconditionset = '' - previousstate = '' previousstate_conditionset = '' for i, state in enumerate(self.__states): #self._log_debug('Adding state for webif {}', self.__states[state]) @@ -433,6 +447,12 @@ def drawgraph(self, filename): actions_enter_or_stay = self.__states[state].get('actions_enter_or_stay') or [] actions_stay = self.__states[state].get('actions_stay') or [] actions_leave = self.__states[state].get('actions_leave') or [] + action_tooltip_count_enter = 0 + action_tooltip_count_stay = 0 + action_tooltip_count_leave = 0 + action_tooltip_enter = "" + action_tooltip_stay = "" + action_tooltip_leave = "" for j, conditionset in enumerate(self.__states[state]['conditionsets']): if len(actions_enter) > 0 or len(actions_enter_or_stay) > 0: diff --git a/stateengine/__init__.py b/stateengine/__init__.py index 1b879799e..bfa0c5c71 100755 --- a/stateengine/__init__.py +++ b/stateengine/__init__.py @@ -33,7 +33,6 @@ import os import copy from lib.model.smartplugin import * -from lib.item import Items from .webif import WebInterface from datetime import datetime @@ -48,14 +47,13 @@ class StateEngine(SmartPlugin): - PLUGIN_VERSION = '2.0.0' + PLUGIN_VERSION = '2.1.0' # Constructor # noinspection PyUnusedLocal,PyMissingConstructor def __init__(self, sh): super().__init__() StateEngineDefaults.logger = self.logger - self.itemsApi = Items.get_instance() self._items = self.abitems = {} self.mod_http = None self.__sh = sh @@ -63,9 +61,9 @@ def __init__(self, sh): self.__cli = None self.vis_enabled = self._test_visualization() if not self.vis_enabled: - self.logger.warning(f'StateEngine is missing the PyDotPlus package, WebIf visualization is disabled') + self.logger.warning(f'StateEngine is missing the PyDotPlus package or GraphViz, WebIf visualization is disabled') self.init_webinterface(WebInterface) - self.get_sh().stateengine_plugin_functions = StateEngineFunctions.SeFunctions(self.get_sh(), self.logger) + self.__sh.stateengine_plugin_functions = StateEngineFunctions.SeFunctions(self.__sh, self.logger) try: log_level = self.get_parameter_value("log_level") startup_log_level = self.get_parameter_value("startup_log_level") @@ -100,8 +98,8 @@ def __init__(self, sh): StateEngineDefaults.plugin_version = self.PLUGIN_VERSION StateEngineDefaults.write_to_log(self.logger) - StateEngineCurrent.init(self.get_sh()) - base = self.get_sh().get_basedir() + StateEngineCurrent.init(self.__sh) + base = self.__sh.get_basedir() log_directory = SeLogger.manage_logdirectory(base, log_directory, False) SeLogger.log_directory = log_directory @@ -109,7 +107,6 @@ def __init__(self, sh): text = "StateEngine extended logging is active. Logging to '{0}' with log level {1}." self.logger.info(text.format(log_directory, log_level)) - if log_maxage > 0: self.logger.info("StateEngine extended log files will be deleted after {0} days.".format(log_maxage)) cron = ['init', '30 0 * *'] @@ -124,14 +121,6 @@ def __init__(self, sh): # noinspection PyMethodMayBeStatic def parse_item(self, item): item.expand_relativepathes('se_manual_logitem', '', '') - try: - item.expand_relativepathes('se_item_*', '', '') - except Exception: - pass - try: - item.expand_relativepathes('se_status_*', '', '') - except Exception: - pass if self.has_iattr(item.conf, "se_manual_include") or self.has_iattr(item.conf, "se_manual_exclude"): item._eval = "sh.stateengine_plugin_functions.manual_item_update_eval('" + item.property.path + "', caller, source)" elif self.has_iattr(item.conf, "se_manual_invert"): @@ -141,12 +130,12 @@ def parse_item(self, item): # Initialization of plugin def run(self): # Initialize - StateEngineStructs.global_struct = copy.deepcopy(self.itemsApi.return_struct_definitions()) + StateEngineStructs.global_struct = copy.deepcopy(self.__sh.items.return_struct_definitions()) self.logger.info("Init StateEngine items") - for item in self.itemsApi.find_items("se_plugin"): + for item in self.__sh.find_items("se_plugin"): if item.conf["se_plugin"] == "active": try: - abitem = StateEngineItem.SeItem(self.get_sh(), item, self) + abitem = StateEngineItem.SeItem(self.__sh, item, self) abitem.ab_alive = True abitem.update_leave_action(self.__default_instant_leaveaction) abitem.write_to_log() @@ -161,9 +150,9 @@ def run(self): else: self.logger.info("StateEngine deactivated because no items have been found.") - self.__cli = StateEngineCliCommands.SeCliCommands(self.get_sh(), self._items, self.logger) + self.__cli = StateEngineCliCommands.SeCliCommands(self.__sh, self._items, self.logger) self.alive = True - self.get_sh().stateengine_plugin_functions.ab_alive = True + self.__sh.stateengine_plugin_functions.ab_alive = True # Stopping of plugin def stop(self): @@ -176,7 +165,7 @@ def stop(self): self._items[item].remove_all_schedulers() self.alive = False - self.get_sh().stateengine_plugin_functions.ab_alive = False + self.__sh.stateengine_plugin_functions.ab_alive = False self.logger.debug("stop method finished") # Determine if caller/source are contained in changed_by list @@ -184,7 +173,7 @@ def stop(self): # source: Source to check # changed_by: List of callers/source (element format :) to check against def is_changed_by(self, caller, source, changed_by): - original_caller, original_source = StateEngineTools.get_original_caller(self.logger, caller, source) + original_caller, original_source = StateEngineTools.get_original_caller(self.__sh, self.logger, caller, source) for entry in changed_by: entry_caller, __, entry_source = entry.partition(":") if (entry_caller == original_caller or entry_caller == "*") and ( @@ -197,7 +186,7 @@ def is_changed_by(self, caller, source, changed_by): # source: Source to check # changed_by: List of callers/source (element format :) to check against def not_changed_by(self, caller, source, changed_by): - original_caller, original_source = StateEngineTools.get_original_caller(self.logger, caller, source) + original_caller, original_source = StateEngineTools.get_original_caller(self.__sh, self.logger, caller, source) for entry in changed_by: entry_caller, __, entry_source = entry.partition(":") if (entry_caller == original_caller or entry_caller == "*") and ( @@ -221,7 +210,7 @@ def get_items(self): def get_graph(self, abitem, graphtype='link'): if isinstance(abitem, str): abitem = self._items[abitem] - webif = StateEngineWebif.WebInterface(self.__sh, abitem) + webif = StateEngineWebif.WebInterface(abitem) try: os.makedirs(self.path_join(self.get_plugin_dir(), 'webif/static/img/visualisations/')) except OSError: @@ -261,21 +250,20 @@ def get_graph(self, abitem, graphtype='link'): return '

Can not show visualization.

' \ 'Current issue: ' + str(ex) + '
' - def _test_visualization(self): if not VIS_ENABLED: return False img_path = self.path_join(self.get_plugin_dir(), 'webif/static/img/visualisations/se_test') graph = pydotplus.Dot('StateEngine', graph_type='digraph', splines='false', - overlap='scale', compound='false', imagepath=img_path) + overlap='scale', compound='false', imagepath=img_path) graph.set_node_defaults(color='lightgray', style='filled', shape='box', - fontname='Helvetica', fontsize='10') + fontname='Helvetica', fontsize='10') graph.set_edge_defaults(color='darkgray', style='filled', shape='box', - fontname='Helvetica', fontsize='10') + fontname='Helvetica', fontsize='10') try: result = graph.write_svg(img_path, prog='fdp') except pydotplus.graphviz.InvocationException: - return False - return True + result = False + return result diff --git a/stateengine/plugin.yaml b/stateengine/plugin.yaml index c08e5f59b..5897f722a 100755 --- a/stateengine/plugin.yaml +++ b/stateengine/plugin.yaml @@ -39,7 +39,7 @@ plugin: state: ready support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1303071-stateengine-plugin-support - version: 2.0.0 + version: '2.1.0' sh_minversion: '1.6' multi_instance: False classname: StateEngine @@ -344,6 +344,20 @@ item_attributes: parameter ``startup_delay_default``. ' + se_name: + type: str + description: + de: 'Zustandsname, überschreibt den im Attribut "name" angegebene Wert' + en: 'Name of state, overwriting the value defined by "name" attribute' + description_long: + de: '**Zustandsname, überschreibt den im Attribut "name" angegebene Wert**\n + Dies kann beispielsweise nützlich sein, um den Namen abhängig + von einer Bedingungsgruppe zu ändern. + ' + en: '**Name of state, overwriting the value defined by "name" attribute**\n + Could be useful to change the state name based on a condition group + ' + se_laststate_item_name: type: str description: diff --git a/stateengine/user_doc/06_aktionen.rst b/stateengine/user_doc/06_aktionen.rst index 55d6b187c..339427c70 100755 --- a/stateengine/user_doc/06_aktionen.rst +++ b/stateengine/user_doc/06_aktionen.rst @@ -348,11 +348,11 @@ Die einzelnen Angaben einer Liste werden als ``OR`` evaluiert. screens: conditionset_to_check: type: str - value: "screens.osten_s1.automatik.rules.abend.enter_abend" + initial_value: "screens.osten_s1.automatik.rules.abend.enter_abend" conditionset: - regex:enter_(.*)_test - - eval:sh.screens.conditionset_to_check.property.name + - eval:sh.screens.conditionset_to_check.property.value Der gesamte Pfad könnte wie folgt evaluiert werden: diff --git a/stateengine/webif/__init__.py b/stateengine/webif/__init__.py index b53855a6e..809f8084b 100755 --- a/stateengine/webif/__init__.py +++ b/stateengine/webif/__init__.py @@ -25,13 +25,8 @@ # ######################################################################### -import datetime -import time -import os -import logging import json -from lib.item import Items from lib.model.smartplugin import SmartPluginWebIf @@ -40,8 +35,6 @@ # ------------------------------------------ import cherrypy -import csv -from jinja2 import Environment, FileSystemLoader class WebInterface(SmartPluginWebIf): @@ -54,6 +47,7 @@ def __init__(self, webif_dir, plugin): :type webif_dir: str :type plugin: object """ + super().__init__() self.logger = plugin.logger self.webif_dir = webif_dir self.plugin = plugin @@ -69,7 +63,7 @@ def index(self, action=None, item_id=None, item_path=None, reload=None, abitem=N :return: contents of the template after beeing rendered """ - item = self.plugin.itemsApi.return_item(item_path) + item = self.plugin.get_sh().return_item(item_path) tmpl = self.tplenv.get_template('{}.html'.format(page)) pagelength = self.plugin.get_parameter_value('webif_pagelength') diff --git a/stateengine/webif/templates/index.html b/stateengine/webif/templates/index.html index e52c915f4..16aabcd6d 100755 --- a/stateengine/webif/templates/index.html +++ b/stateengine/webif/templates/index.html @@ -142,7 +142,7 @@ {% endif %} {{ item.logger.log_level_as_num }} - {% for cond in item.webif_infos.keys() %}{% if not p.itemsApi.return_item(cond) == None %}{% if loop.index > 1 %},{% endif %}{{ p.itemsApi.return_item(cond)._name.split('.')[-1] }}{% endif %}{% endfor %} + {% for cond in item.webif_infos.keys() %}{% if not p.get_sh().return_item(cond) == None %}{% if loop.index > 1 %},{% endif %}{{ p.get_sh().return_item(cond)._name.split('.')[-1] }}{% endif %}{% endfor %} diff --git a/zigbee2mqtt/__init__.py b/zigbee2mqtt/__init__.py index b8cd20b02..2256f9df9 100755 --- a/zigbee2mqtt/__init__.py +++ b/zigbee2mqtt/__init__.py @@ -25,6 +25,7 @@ from datetime import datetime import json +from logging import DEBUG from lib.model.mqttplugin import MqttPlugin @@ -47,7 +48,7 @@ class Zigbee2Mqtt(MqttPlugin): """ Main class of the Plugin. Does all plugin specific stuff and provides the update functions for the items """ - PLUGIN_VERSION = '2.0.1' + PLUGIN_VERSION = '2.0.2' def __init__(self, sh, **kwargs): """ Initializes the plugin. """ @@ -55,6 +56,8 @@ def __init__(self, sh, **kwargs): # Call init code of parent class (MqttPlugin) super().__init__() + # self.logger = logging.getLogger(__name__) + self.logger.info(f'Init {self.get_shortname()} plugin {self.PLUGIN_VERSION}') # get the parameters for the plugin (as defined in metadata plugin.yaml): @@ -62,8 +65,9 @@ def __init__(self, sh, **kwargs): self.cycle = self.get_parameter_value('poll_period') self.read_at_init = self.get_parameter_value('read_at_init') self._z2m_gui = self.get_parameter_value('z2m_gui') + self._pause_item_path = self.get_parameter_value('pause_item') - # bool_values is only good if used internally, because MQTT data is + # bool_values is only good if used internally, because MQTT data is # usually sent in JSON. So just make this easy... self.bool_values = [False, True] @@ -115,6 +119,8 @@ def run(self): self.logger.debug("Run method called") self.alive = True + if self._pause_item: + self._pause_item(False, self.get_fullname()) # start subscription to all topics self.start_subscriptions() @@ -134,8 +140,10 @@ def stop(self): """ Stop method for the plugin """ self.alive = False + if self._pause_item: + self._pause_item(True, self.get_fullname()) self.logger.debug("Stop method called") - self.scheduler_remove('z2m_c') + self.scheduler_remove('z2m_cycle') # stop subscription to all topics self.stop_subscriptions() @@ -154,15 +162,12 @@ def parse_item(self, item): can be sent to the knx with a knx write function within the knx plugin. """ - # remove this block when its included in smartplugin.py, - # replace with super().parse_item(item) - # check for suspend item - if item.property.path == self._suspend_item_path: - self.logger.debug(f'suspend item {item.property.path} registered') - self._suspend_item = item + # check for pause item + if item.property.path == self._pause_item_path: + self.logger.debug(f'pause item {item.property.path} registered') + self._pause_item = item self.add_item(item, updating=True) return self.update_item - # end block if self.has_iattr(item.conf, Z2M_ATTR): self.logger.debug(f"parsing item: {item}") @@ -174,13 +179,6 @@ def parse_item(self, item): attr = self.get_iattr_value(item.conf, Z2M_ATTR) - if item.type() == 'bool': - bval = self.get_iattr_value(item.conf, Z2M_BVAL) - if bval == []: - bval = None - if bval is None or type(bval) is not list: - bval = self.bool_values - # invert read-only/write-only logic to allow read/write write = not self.get_iattr_value(item.conf, Z2M_RO, False) read = not self.get_iattr_value(item.conf, Z2M_WO, False) or not write @@ -198,6 +196,9 @@ def parse_item(self, item): 'write': write, } if item.type() == 'bool': + bval = self.get_iattr_value(item.conf, Z2M_BVAL) + if bval is None or bval == [] or type(bval) is not list: + bval = self.bool_values data['bool_values'] = bval self._devices[device][attr].update(data) @@ -216,7 +217,7 @@ def parse_item(self, item): def remove_item(self, item): if item not in self._plg_item_dict: - return + return False mapping = self.get_item_mapping(item) if mapping: @@ -239,9 +240,9 @@ def remove_item(self, item): except ValueError: pass - super().remove_item(item) + return super().remove_item(item) - def update_item(self, item, caller='', source=None, dest=None): + def update_item(self, item, caller=None, source=None, dest=None): """ Item has been updated @@ -252,7 +253,17 @@ def update_item(self, item, caller='', source=None, dest=None): """ self.logger.debug(f"update_item: {item} called by {caller} and source {source}") - if self.alive and not self.suspended and not caller.startswith(self.get_shortname()): + # check for pause item + if item is self._pause_item: + if caller != self.get_shortname(): + self.logger.debug(f'pause item changed to {item()}') + if item() and self.alive: + self.stop() + elif not item() and not self.alive: + self.run() + return + + if self.alive and caller and not caller.startswith(self.get_shortname()): if item in self._items_write: @@ -311,7 +322,7 @@ def update_item(self, item, caller='', source=None, dest=None): attr: value }) else: - payload = None + payload = '' self.publish_z2m_topic(device, topic_3, topic_4, topic_5, payload, item, bool_values=bool_values) else: @@ -417,7 +428,13 @@ def on_mqtt_msg(self, topic: str, payload, qos=None, retain=None): if item is not None: item(value, src) - self.logger.info(f"{device}: Item '{item}' set to value {value}") + if device == 'bridge' and (isinstance(value, list) or isinstance(value, dict)): + if self.logger.isEnabledFor(DEBUG): + self.logger.debug(f"{device}: Item '{item}' set to value {value}") + else: + self.logger.info(f"{device}: Item '{item}' set to value {str(value)[:80]}[...] (enable debug log for full output)") + else: + self.logger.info(f"{device}: Item '{item}' set to value {value}") else: self.logger.info(f"{device}: No item for attribute '{attr}' defined to set to {value}") diff --git a/zigbee2mqtt/plugin.yaml b/zigbee2mqtt/plugin.yaml index d9de7f569..3f9d56658 100755 --- a/zigbee2mqtt/plugin.yaml +++ b/zigbee2mqtt/plugin.yaml @@ -12,8 +12,8 @@ plugin: documentation: '' support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1856775-support-thread-f%C3%BCr-das-zigbee2mqtt-plugin - version: 2.0.1 # Plugin version - sh_minversion: '1.9.5.6' # minimum shNG version to use this plugin + version: 2.0.2 # Plugin version + sh_minversion: '1.10.0' # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) py_minversion: '3.8' # minimum Python version to use for this plugin multi_instance: true # plugin supports multi instance @@ -45,12 +45,12 @@ parameters: de: Einlesen aller Werte beim Start en: Read all values at init - suspend_item: + pause_item: type: str default: '' description: - de: Pfad zum Suspend-Item - en: Path to suspend item + de: Pfad zum Pause-Item + en: Path to pause item z2m_gui: type: str